lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
contracts/tests/sim/test_storage.rs
alexkeating/moloch
bf11f8e03de63a000706bb1c711cec16410dbe09
use crate::utils::init_moloch; use near_sdk_sim::{call, to_yocto}; use std::convert::TryInto; #[test] fn simulate_storage_deposit_exact() { let (_, moloch, _fdai, _alice, bob, _deposit_amount) = init_moloch(); let start_amount = bob.account().unwrap().amount; let min_deposit = to_yocto("7"); println!("Here {:?} 23", start_amount); call!( bob, moloch.storage_deposit( Some(bob.account_id.to_string().try_into().unwrap()), Some(false) ), min_deposit, near_sdk_sim::DEFAULT_GAS ); let end_amount = bob.account().unwrap().amount; assert!( start_amount - end_amount >= to_yocto("7"), "Did not take all of the registration" ); } #[test] fn simulate_storage_deposit_transfer_back() { let (_, moloch, _fdai, _alice, bob, _deposit_amount) = init_moloch(); let start_amount = &bob.account().unwrap().amount; let deposit = 828593677552200000000; println!("Here {:?} 23", start_amount); call!( bob, moloch.storage_deposit( Some(bob.account_id.to_string().try_into().unwrap()), Some(true) ), deposit, near_sdk_sim::DEFAULT_GAS ); let end_amount = bob.account().unwrap().amount; assert!( (start_amount - end_amount) < deposit, "Not receieve correct excess amount" ); assert!( (start_amount - end_amount) > 1000000, "Not receieve correct excess amount" ); } #[test] fn simulate_storage_deposit_already_registered() { let (_, moloch, _, _, bob, _) = init_moloch(); let deposit = to_yocto("9"); call!( bob, moloch.storage_deposit( Some(bob.account_id.to_string().try_into().unwrap()), Some(false) ), deposit, near_sdk_sim::DEFAULT_GAS ); let start_amount = &bob.account().unwrap().amount; call!( bob, moloch.storage_deposit( Some(bob.account_id.to_string().try_into().unwrap()), Some(true) ), deposit, near_sdk_sim::DEFAULT_GAS ); let end_amount = bob.account().unwrap().amount; assert!( (start_amount - end_amount) < 800000000000000000000, "Not receieve correct excess amount" ); } #[test] fn simulate_storage_deposit_below_min_amount() { let (_, moloch, _, alice, _, _) = init_moloch(); let deposit = 0; let res = call!( alice, moloch.storage_deposit( Some(alice.account_id.to_string().try_into().unwrap()), Some(true) ), deposit, near_sdk_sim::DEFAULT_GAS ); assert!( format!("{:?}", res.status()) .contains("The attached deposit is less than the minimum storage balance bounds"), "Corrrect error was not raised" ) } #[test] fn simulate_storage_withdraw_and_unregister() { let (_, moloch, _, alice, _, _) = init_moloch(); let deposit = to_yocto("10"); let start_amount = &alice.account().unwrap().amount; call!( alice, moloch.storage_deposit( Some(alice.account_id.to_string().try_into().unwrap()), Some(false) ), deposit, near_sdk_sim::DEFAULT_GAS ); let res = call!( alice, moloch.storage_withdraw(Some(to_yocto("9").into())), 1, near_sdk_sim::DEFAULT_GAS ); let end_amount = &alice.account().unwrap().amount; println!("Resp {:?}", res); println!("Diff {}", (start_amount - end_amount)); println!("yocto {}", to_yocto("1.1")); assert!((start_amount - end_amount) < to_yocto("1.1")); let res = call!( alice, moloch.storage_unregister(Some(true)), 1, near_sdk_sim::DEFAULT_GAS ); let end_amount = &alice.account().unwrap().amount; println!("Resp {:?}", res); println!("Diff {}", (start_amount - end_amount)); println!("yocto {}", 6000000000000000000000u128); assert!((start_amount - end_amount) < 6000000000000000000000); }
use crate::utils::init_moloch; use near_sdk_sim::{call, to_yocto}; use std::convert::TryInto; #[test] fn simulate_storage_deposit_exact() { let (_, moloch, _fdai, _alice, bob, _deposit_amount) = init_moloch(); let start_amount = bob.account().unwrap().amount; let min_deposit = to_yocto("7"); println!("Here {:?} 23", start_amount); call!( bob, moloch.storage_deposit( Some(bob.account_id.to_string().try_into().unwrap()), Some(false) ), min_deposit, near_sdk_sim::DEFAULT_GAS ); let end_amount = bob.account().unwrap().amount; assert!( start_amount - end_amount >= to_yocto("7"), "Did not take all of the registration" ); } #[test]
#[test] fn simulate_storage_deposit_already_registered() { let (_, moloch, _, _, bob, _) = init_moloch(); let deposit = to_yocto("9"); call!( bob, moloch.storage_deposit( Some(bob.account_id.to_string().try_into().unwrap()), Some(false) ), deposit, near_sdk_sim::DEFAULT_GAS ); let start_amount = &bob.account().unwrap().amount; call!( bob, moloch.storage_deposit( Some(bob.account_id.to_string().try_into().unwrap()), Some(true) ), deposit, near_sdk_sim::DEFAULT_GAS ); let end_amount = bob.account().unwrap().amount; assert!( (start_amount - end_amount) < 800000000000000000000, "Not receieve correct excess amount" ); } #[test] fn simulate_storage_deposit_below_min_amount() { let (_, moloch, _, alice, _, _) = init_moloch(); let deposit = 0; let res = call!( alice, moloch.storage_deposit( Some(alice.account_id.to_string().try_into().unwrap()), Some(true) ), deposit, near_sdk_sim::DEFAULT_GAS ); assert!( format!("{:?}", res.status()) .contains("The attached deposit is less than the minimum storage balance bounds"), "Corrrect error was not raised" ) } #[test] fn simulate_storage_withdraw_and_unregister() { let (_, moloch, _, alice, _, _) = init_moloch(); let deposit = to_yocto("10"); let start_amount = &alice.account().unwrap().amount; call!( alice, moloch.storage_deposit( Some(alice.account_id.to_string().try_into().unwrap()), Some(false) ), deposit, near_sdk_sim::DEFAULT_GAS ); let res = call!( alice, moloch.storage_withdraw(Some(to_yocto("9").into())), 1, near_sdk_sim::DEFAULT_GAS ); let end_amount = &alice.account().unwrap().amount; println!("Resp {:?}", res); println!("Diff {}", (start_amount - end_amount)); println!("yocto {}", to_yocto("1.1")); assert!((start_amount - end_amount) < to_yocto("1.1")); let res = call!( alice, moloch.storage_unregister(Some(true)), 1, near_sdk_sim::DEFAULT_GAS ); let end_amount = &alice.account().unwrap().amount; println!("Resp {:?}", res); println!("Diff {}", (start_amount - end_amount)); println!("yocto {}", 6000000000000000000000u128); assert!((start_amount - end_amount) < 6000000000000000000000); }
fn simulate_storage_deposit_transfer_back() { let (_, moloch, _fdai, _alice, bob, _deposit_amount) = init_moloch(); let start_amount = &bob.account().unwrap().amount; let deposit = 828593677552200000000; println!("Here {:?} 23", start_amount); call!( bob, moloch.storage_deposit( Some(bob.account_id.to_string().try_into().unwrap()), Some(true) ), deposit, near_sdk_sim::DEFAULT_GAS ); let end_amount = bob.account().unwrap().amount; assert!( (start_amount - end_amount) < deposit, "Not receieve correct excess amount" ); assert!( (start_amount - end_amount) > 1000000, "Not receieve correct excess amount" ); }
function_block-full_function
[ { "content": "#[test]\n\nfn simulate_submit_proposal() {\n\n let (_root, moloch, fdai, alice, bob, deposit_amount) = init_moloch();\n\n register_user_moloch(&alice, &moloch);\n\n register_user_moloch(&bob, &moloch);\n\n call!(\n\n bob,\n\n fdai.ft_transfer_call(\n\n moloch.user_account.valid_account_id(),\n\n deposit_amount.into(),\n\n Some(\"Deposit some tokens into moloch\".to_string()),\n\n \"\".to_string()\n\n ),\n\n 1,\n\n near_sdk_sim::DEFAULT_GAS\n\n );\n\n\n\n call!(\n\n alice,\n\n fdai.ft_transfer_call(\n\n moloch.user_account.valid_account_id(),\n", "file_path": "contracts/tests/sim/test_moloch.rs", "rank": 0, "score": 77647.83981562394 }, { "content": "pub fn init_moloch() -> (\n\n UserAccount,\n\n ContractAccount<MolochContract>,\n\n ContractAccount<FdaiContract>,\n\n UserAccount,\n\n UserAccount,\n\n u128,\n\n) {\n\n let root = init_simulator(None);\n\n\n\n let fdai = deploy!(\n\n contract: FdaiContract,\n\n contract_id: FDAI_ID,\n\n bytes: &FDAI_WASM_BYTES,\n\n signer_account: root,\n\n init_method: new_default_meta(root.valid_account_id(), to_yocto(\"900\").into())\n\n );\n\n\n\n let alice = root.create_user(\"alice\".to_string(), to_yocto(\"100\"));\n\n let bob = root.create_user(\"bob\".to_string(), to_yocto(\"100\"));\n", "file_path": "contracts/tests/sim/utils.rs", "rank": 1, "score": 75034.09490286911 }, { "content": "const transferCallFdai = async (\n\n masterAccount,\n\n accountId,\n\n amount,\n\n ftAccountId\n\n) => {\n\n await masterAccount.functionCall({\n\n contractId: ftAccountId,\n\n methodName: \"ft_transfer_call\",\n\n args: {\n\n receiver_id: accountId,\n\n amount: amount,\n\n memo: \"transfer\",\n\n msg: \"\"\n\n },\n\n attachedDeposit: \"1\",\n\n gas: 300000000000000\n\n });\n", "file_path": "test/moloch.test.js", "rank": 2, "score": 72776.58123318733 }, { "content": "pub fn register_user_moloch(\n\n user: &near_sdk_sim::UserAccount,\n\n moloch: &near_sdk_sim::ContractAccount<MolochContract>,\n\n) {\n\n call!(\n\n user,\n\n moloch.storage_deposit(None, Some(false)),\n\n to_yocto(\"12\"),\n\n near_sdk_sim::DEFAULT_GAS\n\n );\n\n}\n\n\n", "file_path": "contracts/tests/sim/utils.rs", "rank": 4, "score": 66590.65974168445 }, { "content": "const registerMoloch = async (masterAccount, accountId, molochAccountId) => {\n\n console.log(masterAccount);\n\n console.log(accountId);\n\n console.log(molochAccountId);\n\n console.log(nearAPI.utils.format.parseNearAmount(\".1\"));\n\n await masterAccount.functionCall({\n\n contractId: molochAccountId,\n\n methodName: \"storage_deposit\",\n\n args: {\n\n account_id: accountId,\n\n registration_only: false\n\n },\n\n attachedDeposit: nearAPI.utils.format.parseNearAmount(\"1\")\n\n });\n", "file_path": "test/moloch.test.js", "rank": 9, "score": 53950.374417271996 }, { "content": "const delay = ms => {\n\n return new Promise(resolve => setTimeout(resolve, ms));\n", "file_path": "test/moloch.test.js", "rank": 10, "score": 52646.786448991705 }, { "content": "const utils = require(\"./utils\");\n", "file_path": "test/moloch.test.js", "rank": 11, "score": 52646.786448991705 }, { "content": "const BN = require(\"bn.js\");\n", "file_path": "test/moloch.test.js", "rank": 12, "score": 52646.786448991705 }, { "content": "const transferFdai = async (masterAccount, accountId, amount, ftAccountId) => {\n\n await masterAccount.functionCall({\n\n contractId: ftAccountId,\n\n methodName: \"ft_transfer\",\n\n args: {\n\n receiver_id: accountId,\n\n amount: amount,\n\n memo: \"transfer\",\n\n msg: \"\"\n\n },\n\n attachedDeposit: \"1\",\n\n gas: 300000000000000\n\n });\n", "file_path": "test/moloch.test.js", "rank": 13, "score": 51121.33350771128 }, { "content": "const nearAPI = require(\"near-api-js\");\n", "file_path": "test/moloch.test.js", "rank": 14, "score": 51121.33350771128 }, { "content": "const registerFdai = async (masterAccount, accountId, ftAccountId) => {\n\n await masterAccount.functionCall({\n\n contractId: ftAccountId,\n\n methodName: \"storage_deposit\",\n\n args: {\n\n account_id: accountId\n\n },\n\n attachedDeposit: nearAPI.utils.format.parseNearAmount(\".01\")\n\n });\n", "file_path": "test/moloch.test.js", "rank": 15, "score": 51121.33350771128 }, { "content": "const balanceOfFdai = async (masterAccount, accountId, ftAccountId) => {\n\n return await masterAccount.viewFunction(ftAccountId, \"ft_balance_of\", {\n\n account_id: accountId\n\n });\n", "file_path": "test/moloch.test.js", "rank": 16, "score": 51121.33350771128 }, { "content": "use near_sdk::json_types::U128;\n\nuse near_sdk_sim::{call, to_yocto, view};\n\n\n\nuse crate::utils::{init_moloch, register_user_moloch};\n\n\n\n#[test]\n", "file_path": "contracts/tests/sim/test_moloch.rs", "rank": 17, "score": 50816.642955901494 }, { "content": " to_yocto(\"2\").into(),\n\n Some(\"Deposit some tokens into moloch\".to_string()),\n\n \"\".to_string()\n\n ),\n\n 1,\n\n near_sdk_sim::DEFAULT_GAS\n\n );\n\n\n\n call!(\n\n bob,\n\n moloch.submit_proposal(\n\n alice.valid_account_id().to_string(),\n\n to_yocto(\"2\").into(),\n\n 15.into(),\n\n \"A random proposal\".to_string()\n\n ),\n\n 1,\n\n near_sdk_sim::DEFAULT_GAS\n\n );\n\n let moloch_balance: U128 = view!(fdai.ft_balance_of(moloch.valid_account_id())).unwrap_json();\n\n assert_eq!(to_yocto(\"4\"), moloch_balance.0);\n\n}\n", "file_path": "contracts/tests/sim/test_moloch.rs", "rank": 18, "score": 50814.12032709068 }, { "content": "const getEscrowBalance = async (masterAccount, accountId, molochAccountId) => {\n\n return await masterAccount.viewFunction(\n\n molochAccountId,\n\n \"get_escrow_user_balance\",\n\n { account_id: accountId }\n\n );\n", "file_path": "test/moloch.test.js", "rank": 19, "score": 49681.79197427562 }, { "content": "const getCurentPeriod = async (masterAccount, molochAccountId) => {\n\n return await masterAccount.viewFunction(\n\n molochAccountId,\n\n \"get_current_period\",\n\n {}\n\n );\n", "file_path": "test/moloch.test.js", "rank": 20, "score": 49681.79197427562 }, { "content": "const getBankBalance = async (masterAccount, molochAccountId) => {\n\n return await masterAccount.viewFunction(\n\n molochAccountId,\n\n \"get_bank_balance\",\n\n {}\n\n );\n", "file_path": "test/moloch.test.js", "rank": 21, "score": 49681.79197427562 }, { "content": "const nearAPI = require(\"near-api-js\");\n\nconst BN = require(\"bn.js\");\n\nconst utils = require(\"./utils\");\n\nconst { getOrCreateAccount, config } = utils;\n\nrequire(\"dotenv\").config();\n\n\n\nconst registerFdai = async (masterAccount, accountId, ftAccountId) => {\n\n await masterAccount.functionCall({\n\n contractId: ftAccountId,\n\n methodName: \"storage_deposit\",\n\n args: {\n\n account_id: accountId\n\n },\n\n attachedDeposit: nearAPI.utils.format.parseNearAmount(\".01\")\n\n });\n\n};\n\n\n\nconst registerMoloch = async (masterAccount, accountId, molochAccountId) => {\n\n console.log(masterAccount);\n\n console.log(accountId);\n\n console.log(molochAccountId);\n\n console.log(nearAPI.utils.format.parseNearAmount(\".1\"));\n\n await masterAccount.functionCall({\n\n contractId: molochAccountId,\n\n methodName: \"storage_deposit\",\n\n args: {\n\n account_id: accountId,\n\n registration_only: false\n\n },\n\n attachedDeposit: nearAPI.utils.format.parseNearAmount(\"1\")\n\n });\n\n};\n\n\n\nconst transferFdai = async (masterAccount, accountId, amount, ftAccountId) => {\n\n await masterAccount.functionCall({\n\n contractId: ftAccountId,\n\n methodName: \"ft_transfer\",\n\n args: {\n\n receiver_id: accountId,\n\n amount: amount,\n\n memo: \"transfer\",\n\n msg: \"\"\n\n },\n\n attachedDeposit: \"1\",\n\n gas: 300000000000000\n\n });\n\n};\n\n\n\nconst transferCallFdai = async (\n\n masterAccount,\n\n accountId,\n\n amount,\n\n ftAccountId\n\n) => {\n\n await masterAccount.functionCall({\n\n contractId: ftAccountId,\n\n methodName: \"ft_transfer_call\",\n\n args: {\n\n receiver_id: accountId,\n\n amount: amount,\n\n memo: \"transfer\",\n\n msg: \"\"\n\n },\n\n attachedDeposit: \"1\",\n\n gas: 300000000000000\n\n });\n\n};\n\n\n\nconst balanceOfFdai = async (masterAccount, accountId, ftAccountId) => {\n\n return await masterAccount.viewFunction(ftAccountId, \"ft_balance_of\", {\n\n account_id: accountId\n\n });\n\n};\n\n\n\nconst getEscrowBalance = async (masterAccount, accountId, molochAccountId) => {\n\n return await masterAccount.viewFunction(\n\n molochAccountId,\n\n \"get_escrow_user_balance\",\n\n { account_id: accountId }\n\n );\n\n};\n\n\n\nconst getBankBalance = async (masterAccount, molochAccountId) => {\n\n return await masterAccount.viewFunction(\n\n molochAccountId,\n\n \"get_bank_balance\",\n\n {}\n\n );\n\n};\n\n\n\nconst getCurentPeriod = async (masterAccount, molochAccountId) => {\n\n return await masterAccount.viewFunction(\n\n molochAccountId,\n\n \"get_current_period\",\n\n {}\n\n );\n\n};\n\n\n\nconst delay = ms => {\n\n return new Promise(resolve => setTimeout(resolve, ms));\n\n};\n\n\n\ndescribe(\"Moloch test\", () => {\n\n let alice;\n\n let aliceId;\n\n let bobId;\n\n let bob;\n\n let contractAccount;\n\n let proposalPeriod;\n\n\n\n const ftAccountId = `${process.env.FDAI_ACCOUNT_ID}.mrkeating.testnet`;\n\n const masterContractId = process.env.MASTER_ACCOUNT_ID;\n\n let contractAccountId = `${process.env.MOLOCH_ACCOUNT_ID}.${masterContractId}`;\n\n const now = Date.now();\n\n\n\n beforeAll(async () => {\n\n /// some users\n\n aliceId = \"alice-\" + now + \".\" + masterContractId;\n\n alice = await utils.getOrCreateAccount(aliceId);\n\n console.log(\"\\n\\n Alice accountId:\", aliceId, \"\\n\\n\");\n\n\n\n bobId = \"bob-\" + now + \".\" + masterContractId;\n\n bob = await getOrCreateAccount(bobId);\n\n console.log(\"\\n\\n Bob accountId:\", bobId, \"\\n\\n\");\n\n\n\n contractAccount = await getOrCreateAccount(contractAccountId);\n\n console.log(\"\\n\\n contract accountId:\", contractAccountId, \"\\n\\n\");\n\n\n\n ftAccount = await getOrCreateAccount(ftAccountId);\n\n console.log(\"\\n\\n contract accountId:\", ftAccountId, \"\\n\\n\");\n\n\n\n masterAccount = await getOrCreateAccount(masterContractId);\n\n console.log(\"\\n\\n master accountId:\", masterContractId, \"\\n\\n\");\n\n\n\n contract = new nearAPI.Contract(\n\n contractAccount,\n\n config.contractName,\n\n config.contractMethods\n\n );\n\n ftContract = new nearAPI.Contract(ftAccount, \"mrkeating.testnet\", {\n\n changeMethods: [\"ft_transfer\", \"ft_transfer_call\"],\n\n viewMethods: [\"ft_balance_of\"]\n\n });\n\n\n\n // register\n\n await registerFdai(masterAccount, masterContractId, ftAccountId);\n\n await registerFdai(masterAccount, aliceId, ftAccountId);\n\n await registerFdai(masterAccount, contractAccountId, ftAccountId);\n\n await registerFdai(masterAccount, bobId, ftAccountId);\n\n\n\n await registerMoloch(masterAccount, masterContractId, contractAccountId);\n\n await registerMoloch(masterAccount, aliceId, contractAccountId);\n\n await registerMoloch(masterAccount, bobId, contractAccountId);\n\n\n\n await transferFdai(masterAccount, aliceId, \"1000\", ftAccountId);\n\n await transferCallFdai(alice, contractAccountId, \"100\", ftAccountId);\n\n await transferCallFdai(\n\n masterAccount,\n\n contractAccountId,\n\n \"1000\",\n\n ftAccountId\n\n );\n\n }, 120000);\n\n\n\n // Create a proposal for bob\n\n test(\"Create a proposal for the first member\", async () => {\n\n await masterAccount.functionCall({\n\n contractId: contractAccountId,\n\n methodName: \"submit_proposal\",\n\n args: {\n\n applicant: aliceId,\n\n token_tribute: \"10\",\n\n shares_requested: \"10\",\n\n details: \"Let's add a second member\"\n\n },\n\n gas: 300000000000000\n\n });\n\n\n\n proposalPeriod = await getCurentPeriod(masterAccount, contractAccountId);\n\n\n\n // Check balances are correct\n\n // moloch balance 1100\n\n const molochBalance = await balanceOfFdai(\n\n masterAccount,\n\n contractAccountId,\n\n ftAccountId\n\n );\n\n expect(molochBalance).toEqual(\"1100\");\n\n // alice balance 900\n\n const aliceBalance = await balanceOfFdai(\n\n masterAccount,\n\n aliceId,\n\n ftAccountId\n\n );\n\n\n\n expect(aliceBalance).toEqual(\"900\");\n\n // Make sure the correct ammount is in escrow\n\n // Check queue length\n\n const bankBalance = await getBankBalance(masterAccount, contractAccountId);\n\n expect(bankBalance).toEqual(\"0\");\n\n\n\n const escrowBalance = await getEscrowBalance(\n\n masterAccount,\n\n aliceId,\n\n contractAccountId\n\n );\n\n expect(escrowBalance).toEqual(\"90\");\n\n\n\n const escrowBalanceMaster = await getEscrowBalance(\n\n masterAccount,\n\n masterContractId,\n\n contractAccountId\n\n );\n\n expect(escrowBalanceMaster).toEqual(\"990\");\n\n });\n\n\n\n test(\"Vote yes on member proposal\", async () => {\n\n // Wait one period\n\n await delay(1000 * 10);\n\n await masterAccount.functionCall({\n\n contractId: contractAccountId,\n\n methodName: \"submit_vote\",\n\n args: {\n\n proposal_index: \"0\", // change to 0\n\n uint_vote: 1\n\n }\n\n });\n\n\n\n let vote = await masterAccount.viewFunction(\n\n contractAccountId,\n\n \"get_member_proposal_vote\",\n\n {\n\n proposal_index: \"0\",\n\n member_id: masterContractId\n\n }\n\n );\n\n console.log(\"Vote\");\n\n console.log(vote);\n\n console.log(proposalPeriod);\n\n expect(vote).toEqual(\"Yes\");\n\n });\n\n\n\n test(\"Process passed proposal\", async () => {\n\n process_period = parseInt(proposalPeriod) + 3;\n\n console.log(process_period);\n\n current_period = await getCurentPeriod(masterAccount, contractAccountId);\n\n console.log(current_period);\n\n let periodsLeft = process_period - parseInt(current_period);\n\n if (periodsLeft >= 0) {\n\n await delay(10000 * (periodsLeft + 1));\n\n }\n\n await bob.functionCall({\n\n contractId: contractAccountId,\n\n methodName: \"process_proposal\",\n\n args: {\n\n proposal_index: \"0\"\n\n },\n\n attachedDeposit: \"1\",\n\n gas: 300000000000000\n\n });\n\n\n\n const bankBalance = await getBankBalance(masterAccount, contractAccountId);\n\n expect(bankBalance).toEqual(\"10\");\n\n\n\n const aliceBalance = await balanceOfFdai(\n\n masterAccount,\n\n aliceId,\n\n ftAccountId\n\n );\n\n\n\n expect(aliceBalance).toEqual(\"900\");\n\n\n\n // Pay processing reward\n\n const bobBalance = await balanceOfFdai(masterAccount, bobId, ftAccountId);\n\n\n\n expect(bobBalance).toEqual(\"1\");\n\n\n\n // Return proposal deposit\n\n const escrowBalanceMaster = await getEscrowBalance(\n\n masterAccount,\n\n masterContractId,\n\n contractAccountId\n\n );\n\n expect(escrowBalanceMaster).toEqual(\"999\");\n\n\n\n const molochBalance = await balanceOfFdai(\n\n masterAccount,\n\n contractAccountId,\n\n ftAccountId\n\n );\n\n expect(molochBalance).toEqual(\"1099\");\n\n }, 120000);\n\n\n\n // rage quit\n\n test(\"Rage quit\", async () => {\n\n await alice.functionCall({\n\n contractId: contractAccountId,\n\n methodName: \"rage_quit\",\n\n args: {\n\n shares_to_burn: \"5\" // 5 of 11 total\n\n },\n\n attachedDeposit: \"1\",\n\n gas: 300000000000000\n\n });\n\n\n\n // check the correct amount is withdrawn and sent alice\n\n const bankBalance = await getBankBalance(masterAccount, contractAccountId);\n\n // TODO: Double check this rounding is okay\n\n expect(bankBalance).toEqual(\"6\");\n\n });\n\n\n\n // withdraw\n\n\ttest(\"Escrow withdraw\", async () => {\n\n const start = await getEscrowBalance(masterAccount, aliceId, contractAccountId);\n\n\t\tconsole.log(start)\n\n await alice.functionCall({\n\n contractId: contractAccountId,\n\n methodName: \"escrow_withdraw\",\n\n args: {\n\n account_id: aliceId,\n\n amount: \"20\",\n\n },\n\n attachedDeposit: \"1\",\n\n gas: 300000000000000\n\n });\n\n\n\n const end = await getEscrowBalance(masterAccount, aliceId, contractAccountId);\n\n expect(start - end).toEqual(20);\n\n\t})\n\n\n\n});\n", "file_path": "test/moloch.test.js", "rank": 22, "score": 43550.08939809592 }, { "content": "// Register the given `user` with FT contract\n\npub fn register_user(user: &near_sdk_sim::UserAccount) {\n\n user.call(\n\n FDAI_ID.parse().unwrap(),\n\n \"storage_deposit\",\n\n &json!({\n\n \"account_id\": user.account_id()\n\n })\n\n .to_string()\n\n .into_bytes(),\n\n near_sdk_sim::DEFAULT_GAS / 2,\n\n near_sdk::env::storage_byte_cost() * 125, // attached deposit\n\n )\n\n .assert_success();\n\n}\n\n\n", "file_path": "contracts/tests/sim/utils.rs", "rank": 23, "score": 41829.732105360396 }, { "content": "const fs = require(\"fs\");\n", "file_path": "test/utils.js", "rank": 24, "score": 40364.500755703884 }, { "content": "const BN = require(\"bn.js\");\n", "file_path": "test/utils.js", "rank": 25, "score": 40364.500755703884 }, { "content": "const credentials = JSON.parse(serializedCreds);\n", "file_path": "test/utils.js", "rank": 26, "score": 40364.500755703884 }, { "content": "const near = new Near({\n\n networkId: config.networkId,\n\n nodeUrl: config.nodeUrl,\n\n deps: { keyStore }\n", "file_path": "test/utils.js", "rank": 27, "score": 40364.500755703884 }, { "content": "const config = {\n\n explorerUrl: \"https://explorer.testnet.near.org\",\n\n networkId: \"testnet\",\n\n nodeUrl: \"https://rpc.testnet.near.org\",\n\n // walletUrl: 'http://localhost:1234',\n\n walletUrl: \"https://wallet.testnet.near.org\",\n\n helperUrl: \"https://helper.testnet.near.org\",\n\n GAS: \"200000000000000\",\n\n DEFAULT_NEW_ACCOUNT_AMOUNT: \".5\",\n\n DEFAULT_NEW_CONTRACT_AMOUNT: \"5\",\n\n contractName: \"mrkeating.testnet\",\n\n contractMethods: {\n\n changeMethods: [\n\n \"submit_proposal\",\n\n \"send_applicant_tribute\",\n\n \"submit_vote\",\n\n \"process_proposal\",\n\n \"rage_quit\",\n\n \"abort\",\n\n \"update_delegate_key\"\n\n ],\n\n viewMethods: [\n\n \"get_current_period\",\n\n \"get_member_proposal_vote\",\n\n \"has_voting_expired\"\n\n ]\n\n }\n", "file_path": "test/utils.js", "rank": 28, "score": 40364.500755703884 }, { "content": "const nearAPI = require(\"near-api-js\");\n", "file_path": "test/utils.js", "rank": 29, "score": 38598.36818768151 }, { "content": "const serializedCreds = fs\n\n .readFileSync(\n\n `${process.env.HOME}/.near-credentials/${config.networkId}/${config.contractName}.json`\n\n )\n", "file_path": "test/utils.js", "rank": 30, "score": 38598.36818768151 }, { "content": "const keyStore = new InMemoryKeyStore();\n", "file_path": "test/utils.js", "rank": 31, "score": 38598.36818768151 }, { "content": "const fs = require(\"fs\");\n\nconst BN = require(\"bn.js\");\n\nconst nearAPI = require(\"near-api-js\");\n\nconst {\n\n keyStores: { InMemoryKeyStore },\n\n Near,\n\n Account,\n\n Contract,\n\n KeyPair,\n\n utils: {\n\n format: { parseNearAmount }\n\n }\n\n} = nearAPI;\n\n\n\nconst config = {\n\n explorerUrl: \"https://explorer.testnet.near.org\",\n\n networkId: \"testnet\",\n\n nodeUrl: \"https://rpc.testnet.near.org\",\n\n // walletUrl: 'http://localhost:1234',\n\n walletUrl: \"https://wallet.testnet.near.org\",\n\n helperUrl: \"https://helper.testnet.near.org\",\n\n GAS: \"200000000000000\",\n\n DEFAULT_NEW_ACCOUNT_AMOUNT: \".5\",\n\n DEFAULT_NEW_CONTRACT_AMOUNT: \"5\",\n\n contractName: \"mrkeating.testnet\",\n\n contractMethods: {\n\n changeMethods: [\n\n \"submit_proposal\",\n\n \"send_applicant_tribute\",\n\n \"submit_vote\",\n\n \"process_proposal\",\n\n \"rage_quit\",\n\n \"abort\",\n\n \"update_delegate_key\"\n\n ],\n\n viewMethods: [\n\n \"get_current_period\",\n\n \"get_member_proposal_vote\",\n\n \"has_voting_expired\"\n\n ]\n\n }\n\n};\n\n\n\nconsole.log(\n\n \"Loading Credentials:\\n\",\n\n `${process.env.HOME}/.near-credentials/${config.networkId}/${config.contractName}.json`\n\n);\n\n\n\nconst serializedCreds = fs\n\n .readFileSync(\n\n `${process.env.HOME}/.near-credentials/${config.networkId}/${config.contractName}.json`\n\n )\n\n .toString();\n\nconst credentials = JSON.parse(serializedCreds);\n\nconst keyStore = new InMemoryKeyStore();\n\nkeyStore.setKey(\n\n config.networkId,\n\n config.contractName,\n\n KeyPair.fromString(credentials.private_key)\n\n);\n\n\n\nconst near = new Near({\n\n networkId: config.networkId,\n\n nodeUrl: config.nodeUrl,\n\n deps: { keyStore }\n\n});\n\nconst { connection } = near;\n\n\n\nfunction generateUniqueSubAccount() {\n\n return `t${Date.now()}.${config.contractName}`;\n\n}\n\n\n\nasync function createAccount(\n\n accountId,\n\n fundingAmount = config.DEFAULT_NEW_ACCOUNT_AMOUNT,\n\n secret\n\n) {\n\n const contractAccount = new Account(connection, config.contractName);\n\n const newKeyPair = secret\n\n ? KeyPair.fromString(secret)\n\n : KeyPair.fromRandom(\"ed25519\");\n\n await contractAccount.createAccount(\n\n accountId,\n\n newKeyPair.publicKey,\n\n new BN(parseNearAmount(fundingAmount))\n\n );\n\n keyStore.setKey(config.networkId, accountId, newKeyPair);\n\n return new nearAPI.Account(connection, accountId);\n\n}\n\n\n\nasync function getOrCreateAccount(\n\n accountId,\n\n fundingAmount = config.DEFAULT_NEW_ACCOUNT_AMOUNT\n\n) {\n\n accountId = accountId || generateUniqueSubAccount();\n\n const account = new nearAPI.Account(connection, accountId);\n\n try {\n\n await account.state();\n\n return account;\n\n } catch (e) {\n\n if (!/does not exist/.test(e.toString())) {\n\n throw e;\n\n }\n\n }\n\n return await createAccount(accountId, fundingAmount);\n\n}\n\n\n\nmodule.exports = {\n\n getOrCreateAccount,\n\n connection,\n\n config\n\n};\n", "file_path": "test/utils.js", "rank": 34, "score": 30572.2966086204 }, { "content": " deposit_amount.into(),\n\n 2u128.into(),\n\n 1u128.into()\n\n )\n\n );\n\n\n\n register_user(&moloch.user_account);\n\n call!(\n\n root,\n\n fdai.ft_transfer(alice.valid_account_id(), to_yocto(\"100\").into(), None),\n\n deposit = 1\n\n )\n\n .assert_success();\n\n\n\n call!(\n\n root,\n\n fdai.ft_transfer(bob.valid_account_id(), to_yocto(\"100\").into(), None),\n\n deposit = 1\n\n )\n\n .assert_success();\n\n\n\n (root, moloch, fdai, alice, bob, deposit_amount)\n\n}\n", "file_path": "contracts/tests/sim/utils.rs", "rank": 35, "score": 27612.633454391744 }, { "content": "use moloch::MolochContract;\n\nuse near_sdk::serde_json::json;\n\nuse near_sdk_sim::{call, deploy, init_simulator, to_yocto, ContractAccount, UserAccount};\n\nuse test_fungible_token::ContractContract as FdaiContract;\n\n\n\nnear_sdk_sim::lazy_static_include::lazy_static_include_bytes! {\n\n MOLOCH_WASM_BYTES => \"res/moloch.wasm\",\n\n FDAI_WASM_BYTES => \"res/test_fungible_token.wasm\"\n\n}\n\n\n\nconst MOLOCH_ID: &str = \"moloch\";\n\nconst FDAI_ID: &str = \"fdai\";\n\n\n\n// Register the given `user` with FT contract\n", "file_path": "contracts/tests/sim/utils.rs", "rank": 36, "score": 27612.12811596222 }, { "content": "mod test_moloch;\n\nmod test_storage;\n\nmod utils;\n", "file_path": "contracts/tests/sim/main.rs", "rank": 37, "score": 27606.485087044286 }, { "content": " register_user(&alice);\n\n register_user(&bob);\n\n register_user(&root);\n\n let deposit_amount = to_yocto(\"2\");\n\n\n\n println!(\"Account Id\");\n\n println!(\"{:?}\", fdai.user_account.account_id.to_string());\n\n let moloch = deploy!(\n\n contract: MolochContract,\n\n contract_id: MOLOCH_ID,\n\n bytes: &MOLOCH_WASM_BYTES,\n\n signer_account: root,\n\n init_method: new(\n\n bob.valid_account_id().to_string(),\n\n fdai.user_account.account_id.to_string(),\n\n // nanoseconds\n\n 10u64.pow(9).into(),\n\n 3u64.into(),\n\n 1u64.into(),\n\n 2u64.into(),\n", "file_path": "contracts/tests/sim/utils.rs", "rank": 38, "score": 27606.18370101171 }, { "content": " fn ft_metadata(&self) -> FungibleTokenMetadata {\n\n self.metadata.get().unwrap()\n\n }\n\n}\n\n\n\n#[cfg(all(test, not(target_arch = \"wasm32\")))]\n\nmod tests {\n\n use near_sdk::test_utils::{accounts, VMContextBuilder};\n\n use near_sdk::MockedBlockchain;\n\n use near_sdk::{testing_env, Balance};\n\n\n\n use super::*;\n\n\n\n const TOTAL_SUPPLY: Balance = 1_000_000_000_000_000;\n\n\n\n fn get_context(predecessor_account_id: ValidAccountId) -> VMContextBuilder {\n\n let mut builder = VMContextBuilder::new();\n\n builder\n\n .current_account_id(accounts(0))\n\n .signer_account_id(predecessor_account_id.clone())\n", "file_path": "contracts/test-fungible-token/src/lib.rs", "rank": 39, "score": 25080.943343953037 }, { "content": " .predecessor_account_id(predecessor_account_id);\n\n builder\n\n }\n\n\n\n #[test]\n\n fn test_new() {\n\n let mut context = get_context(accounts(1));\n\n testing_env!(context.build());\n\n let contract = Contract::new_default_meta(accounts(1).into(), TOTAL_SUPPLY.into());\n\n testing_env!(context.is_view(true).build());\n\n assert_eq!(contract.ft_total_supply().0, TOTAL_SUPPLY);\n\n assert_eq!(contract.ft_balance_of(accounts(1)).0, TOTAL_SUPPLY);\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"The contract is not initialized\")]\n\n fn test_default() {\n\n let context = get_context(accounts(1));\n\n testing_env!(context.build());\n\n let _contract = Contract::default();\n", "file_path": "contracts/test-fungible-token/src/lib.rs", "rank": 40, "score": 25080.367011236525 }, { "content": "/*!\n\nhttps://github.com/near/near-sdk-rs/blob/master/examples/fungible-token/ft/src/lib.rs\n\nFungible Token implementation with JSON serialization.\n\nNOTES:\n\n - The maximum balance value is limited by U128 (2**128 - 1).\n\n - JSON calls should pass U128 as a base-10 string. E.g. \"100\".\n\n - The contract optimizes the inner trie structure by hashing account IDs. It will prevent some\n\n abuse of deep tries. Shouldn't be an issue, once NEAR clients implement full hashing of keys.\n\n - The contract tracks the change in storage before and after the call. If the storage increases,\n\n the contract requires the caller of the contract to attach enough deposit to the function call\n\n to cover the storage cost.\n\n This is done to prevent a denial of service attack on the contract by taking all available storage.\n\n If the storage decreases, the contract will issue a refund for the cost of the released storage.\n\n The unused tokens from the attached deposit are also refunded, so it's safe to\n\n attach more deposit than required.\n\n - To prevent the deployed contract from being modified or deleted, it should not have any access\n\n keys on its account.\n\n*/\n\nuse near_contract_standards::fungible_token::metadata::{\n\n FungibleTokenMetadata, FungibleTokenMetadataProvider, FT_METADATA_SPEC,\n", "file_path": "contracts/test-fungible-token/src/lib.rs", "rank": 41, "score": 25079.94799916614 }, { "content": " }\n\n\n\n #[test]\n\n fn test_transfer() {\n\n let mut context = get_context(accounts(2));\n\n testing_env!(context.build());\n\n let mut contract = Contract::new_default_meta(accounts(2).into(), TOTAL_SUPPLY.into());\n\n testing_env!(context\n\n .storage_usage(env::storage_usage())\n\n .attached_deposit(contract.storage_balance_bounds().min.into())\n\n .predecessor_account_id(accounts(1))\n\n .build());\n\n // Paying for account registration, aka storage deposit\n\n contract.storage_deposit(None, None);\n\n\n\n testing_env!(context\n\n .storage_usage(env::storage_usage())\n\n .attached_deposit(1)\n\n .predecessor_account_id(accounts(2))\n\n .build());\n", "file_path": "contracts/test-fungible-token/src/lib.rs", "rank": 42, "score": 25079.5669014057 }, { "content": " let transfer_amount = TOTAL_SUPPLY / 3;\n\n contract.ft_transfer(accounts(1), transfer_amount.into(), None);\n\n\n\n testing_env!(context\n\n .storage_usage(env::storage_usage())\n\n .account_balance(env::account_balance())\n\n .is_view(true)\n\n .attached_deposit(0)\n\n .build());\n\n assert_eq!(\n\n contract.ft_balance_of(accounts(2)).0,\n\n (TOTAL_SUPPLY - transfer_amount)\n\n );\n\n assert_eq!(contract.ft_balance_of(accounts(1)).0, transfer_amount);\n\n }\n\n}\n", "file_path": "contracts/test-fungible-token/src/lib.rs", "rank": 43, "score": 25079.08940027398 }, { "content": "};\n\nuse near_contract_standards::fungible_token::FungibleToken;\n\nuse near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};\n\nuse near_sdk::collections::LazyOption;\n\nuse near_sdk::json_types::{ValidAccountId, U128};\n\nuse near_sdk::{\n\n env, log, near_bindgen, AccountId, Balance, BorshStorageKey, PanicOnDefault, PromiseOrValue,\n\n};\n\n\n\nnear_sdk::setup_alloc!();\n\n\n\n#[near_bindgen]\n\n#[derive(BorshDeserialize, BorshSerialize, PanicOnDefault)]\n\npub struct Contract {\n\n token: FungibleToken,\n\n metadata: LazyOption<FungibleTokenMetadata>,\n\n}\n\n\n\nconst DATA_IMAGE_SVG_NEAR_ICON: &str = \"data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 288 288'%3E%3Cg id='l' data-name='l'%3E%3Cpath d='M187.58,79.81l-30.1,44.69a3.2,3.2,0,0,0,4.75,4.2L191.86,103a1.2,1.2,0,0,1,2,.91v80.46a1.2,1.2,0,0,1-2.12.77L102.18,77.93A15.35,15.35,0,0,0,90.47,72.5H87.34A15.34,15.34,0,0,0,72,87.84V201.16A15.34,15.34,0,0,0,87.34,216.5h0a15.35,15.35,0,0,0,13.08-7.31l30.1-44.69a3.2,3.2,0,0,0-4.75-4.2L96.14,186a1.2,1.2,0,0,1-2-.91V104.61a1.2,1.2,0,0,1,2.12-.77l89.55,107.23a15.35,15.35,0,0,0,11.71,5.43h3.13A15.34,15.34,0,0,0,216,201.16V87.84A15.34,15.34,0,0,0,200.66,72.5h0A15.35,15.35,0,0,0,187.58,79.81Z'/%3E%3C/g%3E%3C/svg%3E\";\n\n\n\n#[derive(BorshSerialize, BorshStorageKey)]\n", "file_path": "contracts/test-fungible-token/src/lib.rs", "rank": 44, "score": 25077.166745719358 }, { "content": " reference_hash: None,\n\n decimals: 24,\n\n },\n\n )\n\n }\n\n\n\n /// Initializes the contract with the given total supply owned by the given `owner_id` with\n\n /// the given fungible token metadata.\n\n #[init]\n\n pub fn new(\n\n owner_id: ValidAccountId,\n\n total_supply: U128,\n\n metadata: FungibleTokenMetadata,\n\n ) -> Self {\n\n assert!(!env::state_exists(), \"Already initialized\");\n\n metadata.assert_valid();\n\n let mut this = Self {\n\n token: FungibleToken::new(StorageKey::FungibleToken),\n\n metadata: LazyOption::new(StorageKey::Metadata, Some(&metadata)),\n\n };\n", "file_path": "contracts/test-fungible-token/src/lib.rs", "rank": 45, "score": 25076.624851012577 }, { "content": " this.token.internal_register_account(owner_id.as_ref());\n\n this.token\n\n .internal_deposit(owner_id.as_ref(), total_supply.into());\n\n this\n\n }\n\n\n\n fn on_account_closed(&mut self, account_id: AccountId, balance: Balance) {\n\n log!(\"Closed @{} with {}\", account_id, balance);\n\n }\n\n\n\n fn on_tokens_burned(&mut self, account_id: AccountId, amount: Balance) {\n\n log!(\"Account @{} burned {}\", account_id, amount);\n\n }\n\n}\n\n\n\nnear_contract_standards::impl_fungible_token_core!(Contract, token, on_tokens_burned);\n\nnear_contract_standards::impl_fungible_token_storage!(Contract, token, on_account_closed);\n\n\n\n#[near_bindgen]\n\nimpl FungibleTokenMetadataProvider for Contract {\n", "file_path": "contracts/test-fungible-token/src/lib.rs", "rank": 46, "score": 25073.683050972046 }, { "content": "#[derive(BorshSerialize, BorshStorageKey)]\n\nenum StorageKey {\n\n FungibleToken,\n\n Metadata,\n\n}\n\n\n\n#[near_bindgen]\n\nimpl Contract {\n\n /// Initializes the contract with the given total supply owned by the given `owner_id` with\n\n /// default metadata (for example purposes only).\n\n #[init]\n\n pub fn new_default_meta(owner_id: ValidAccountId, total_supply: U128) -> Self {\n\n Self::new(\n\n owner_id,\n\n total_supply,\n\n FungibleTokenMetadata {\n\n spec: FT_METADATA_SPEC.to_string(),\n\n name: \"Example NEAR fungible token\".to_string(),\n\n symbol: \"EXAMPLE\".to_string(),\n\n icon: Some(DATA_IMAGE_SVG_NEAR_ICON.to_string()),\n\n reference: None,\n", "file_path": "contracts/test-fungible-token/src/lib.rs", "rank": 47, "score": 22971.48592061685 }, { "content": " moloch\n\n }\n\n }\n\n}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::mocks::{\n\n alice, bob, get_context, get_context_builder, robert, storage_deposit, MockMember,\n\n MockMoloch, MockProposal,\n\n };\n\n use near_sdk::{testing_env, MockedBlockchain};\n\n use std::convert::TryInto;\n\n\n\n /// Tests for submit propposal\n\n #[test]\n\n fn submit_proposal() {\n\n let context = get_context(false);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 48, "score": 16844.38043102363 }, { "content": "\n\n #[test]\n\n fn get_member_proposal_vote_yes() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let member = MockMember::new().delegate_key(bob()).build();\n\n let proposal = MockProposal::new().yes_vote(&member).build();\n\n let contract = MockMoloch::new().add_proposal(proposal).build();\n\n let vote = contract.get_member_proposal_vote(bob(), 0.into());\n\n assert_eq!(vote, Vote::Yes, \"Bob did not vote yes\")\n\n }\n\n\n\n #[test]\n\n fn get_member_proposal_vote_null() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let member = MockMember::new().build();\n\n let proposal = MockProposal::new().yes_vote(&member).build();\n\n let contract = MockMoloch::new().add_proposal(proposal).build();\n\n let vote = contract.get_member_proposal_vote(bob(), 0.into());\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 49, "score": 16842.084108298 }, { "content": " fn only_member(&self) {\n\n let member = match self.members.get(&env::predecessor_account_id()) {\n\n Some(member) => member,\n\n None => Member::default(),\n\n };\n\n assert!(member != Member::default(), \"Account is not a member\");\n\n }\n\n}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\n#[cfg(test)]\n\npub mod mocks {\n\n use super::*;\n\n use near_sdk::test_utils::VMContextBuilder;\n\n use near_sdk::VMContext;\n\n use std::convert::TryInto;\n\n\n\n pub fn get_context(is_view: bool) -> VMContext {\n\n VMContextBuilder::new()\n\n .signer_account_id(bob().try_into().unwrap())\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 50, "score": 16842.064344948878 }, { "content": " testing_env!(context);\n\n let contract = MockMoloch::new().build();\n\n contract.get_member_proposal_vote(bob(), 0.into());\n\n }\n\n #[test]\n\n fn get_user_escrow_balance() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new().build();\n\n contract.escrow.deposit(robert(), 10);\n\n let balance = contract.get_escrow_user_balance(robert());\n\n assert_eq!(u128::from(balance), 10, \"Guild escrow balance is incorrect\");\n\n }\n\n\n\n #[test]\n\n fn get_bank_balance() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new().build();\n\n contract.bank.deposit(10);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 51, "score": 16842.012552497144 }, { "content": " );\n\n assert_eq!(\n\n contract.total_shares, 23,\n\n \"Total shares has not been updated correctly\"\n\n );\n\n }\n\n\n\n // Test passed proposal New member, assert member saved,\n\n #[test]\n\n fn process_proposal_passed_new_member() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let member = MockMember::new().shares(1).delegate_key(bob()).build();\n\n let proposal = MockProposal::new()\n\n .applicant(alice())\n\n .yes_vote(&member)\n\n .shares_requested(15)\n\n .build();\n\n\n\n let mut contract = MockMoloch::new()\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 52, "score": 16841.91984361847 }, { "content": " testing_env!(context);\n\n let mut contract = MockMoloch::new()\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n contract.update_delegate_key(\"soda\".to_string());\n\n let old_key = contract.members_by_delegate_key.get(&bob()).unwrap();\n\n assert_eq!(\n\n old_key,\n\n \"\".to_string(),\n\n \"Old has not been updated to an empty string\"\n\n );\n\n let new_key = contract\n\n .members_by_delegate_key\n\n .get(&\"soda\".to_string())\n\n .unwrap();\n\n assert_eq!(new_key, bob().to_string(), \"New key has been created\");\n\n let member = contract.members.get(&bob()).unwrap();\n\n assert_eq!(\n\n member.delegate_key,\n\n \"soda\".to_string(),\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 53, "score": 16841.90252328613 }, { "content": " testing_env!(context);\n\n let proposal = MockProposal::new().build();\n\n let mut contract = MockMoloch::new()\n\n .add_proposal(proposal)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n\n\n let mut context_builder = get_context_builder(false);\n\n let context = context_builder\n\n .block_timestamp((contract.summoning_time + contract.period_duration).into())\n\n .build();\n\n\n\n testing_env!(context);\n\n contract.submit_vote(0.into(), 2);\n\n\n\n let proposal = contract.proposal_queue.get(0).unwrap();\n\n assert_eq!(proposal.yes_votes, 0);\n\n assert_eq!(proposal.no_votes, 1);\n\n let member = contract.members.get(&bob()).unwrap();\n\n assert_eq!(member.highest_index_yes_vote, 0);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 54, "score": 16841.699535705 }, { "content": " .add_escrow_deposit(bob(), 10)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n contract.escrow_withdraw(bob(), 5.into());\n\n let bob_balance = contract.get_escrow_user_balance(bob());\n\n assert_eq!(u128::from(bob_balance), 5, \"Bob's balance is incorrect\");\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = r#\"Predecessor account id does not equal withdrawl account id\"#)]\n\n fn escrow_withdraw_different_account() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let member = MockMember::new().build();\n\n let mut contract = MockMoloch::new()\n\n .add_member(member)\n\n .add_escrow_deposit(robert(), 10)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n contract.escrow_withdraw(robert(), 5.into());\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 55, "score": 16841.52459298937 }, { "content": " }\n\n\n\n #[test]\n\n fn process_proposal_failed() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let member = MockMember::new().build();\n\n let proposal = MockProposal::new().no_vote(&member).build();\n\n let mut contract = MockMoloch::new()\n\n .add_proposal(proposal)\n\n .add_member(member)\n\n .add_escrow_deposit(bob(), 400)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n let proposal = contract.proposal_queue.get(0).unwrap();\n\n assert_eq!(proposal.processed, false, \"Proposal has been processed\");\n\n assert_eq!(\n\n contract.total_shares_requested, proposal.shares_requested,\n\n \"Total shares requested has not been set correctly\",\n\n );\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 56, "score": 16841.449679172652 }, { "content": " \"Member does not have the correct number not shares\"\n\n );\n\n assert_eq!(contract.total_shares, 11, \"Total shares is not correct\");\n\n let bob_balance = contract.get_escrow_user_balance(bob());\n\n assert_eq!(u128::from(bob_balance), 490, \"Bob's balance is incorrect\");\n\n let robert_balance = contract.get_escrow_user_balance(robert());\n\n assert_eq!(\n\n u128::from(robert_balance),\n\n 12,\n\n \"Robert's balance is incorrect\"\n\n );\n\n let bank_balance = contract.get_bank_balance();\n\n assert_eq!(u128::from(bank_balance), 0, \"Bank balance is incorrect\");\n\n }\n\n\n\n // Test failed proposal aborted\n\n #[test]\n\n fn process_proposal_failed_aborted() {\n\n let context = get_context(false);\n\n testing_env!(context);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 57, "score": 16841.29058032339 }, { "content": " );\n\n let bob_balance = contract.get_escrow_user_balance(bob());\n\n assert_eq!(u128::from(bob_balance), 490, \"Bob's balance is incorrect\");\n\n let bank_balance = contract.get_bank_balance();\n\n assert_eq!(u128::from(bank_balance), 12, \"Bank balance is incorrect\");\n\n }\n\n\n\n // Test passed proposal existing member, Assert shares are added\n\n #[test]\n\n fn process_proposal_passed_existing_member() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let member = MockMember::new().shares(1).delegate_key(bob()).build();\n\n let proposal = MockProposal::new()\n\n .applicant(alice())\n\n .yes_vote(&member)\n\n .shares_requested(15)\n\n .build();\n\n\n\n let existing_member = MockMember::new().shares(7).delegate_key(alice()).build();\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 58, "score": 16841.008348394964 }, { "content": " )\n\n .build();\n\n testing_env!(context);\n\n contract.process_proposal(0.into());\n\n\n\n let proposal = contract.proposal_queue.get(0).unwrap();\n\n assert_eq!(proposal.processed, true, \"Proposal has not been processed\");\n\n assert_eq!(\n\n contract.total_shares_requested, 0,\n\n \"Number of requested shares has not been subtracted\",\n\n );\n\n let member = contract.members.get(&robert()).unwrap();\n\n assert_eq!(\n\n member.shares, 10,\n\n \"Member does not have the correct number not shares\"\n\n );\n\n assert_eq!(contract.total_shares, 11, \"Total shares is not correct\");\n\n\n\n let bob_balance = contract.get_escrow_user_balance(bob());\n\n assert_eq!(u128::from(bob_balance), 490, \"Bob's balance is incorrect\");\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 59, "score": 16840.460348180437 }, { "content": " let contract = MockMoloch::new()\n\n .add_proposal(proposal_one)\n\n .add_proposal(proposal_two)\n\n .build();\n\n let period = contract.get_proposal_queue_length();\n\n assert_eq!(u64::from(period), 2)\n\n }\n\n\n\n #[test]\n\n fn can_rage_quit() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let proposal = MockProposal::new().processed(true).build();\n\n let contract = MockMoloch::new().add_proposal(proposal).build();\n\n let can = contract.can_rage_quit(0.into());\n\n assert_eq!(can, true)\n\n }\n\n\n\n #[test]\n\n fn can_rage_quit_false() {\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 60, "score": 16840.13064480388 }, { "content": " assert_eq!(vote, Vote::Null, \"Bob has not voted yes yet\")\n\n }\n\n\n\n // Member does not exist\n\n #[test]\n\n #[should_panic(expected = r#\"Member does not exist\"#)]\n\n fn get_member_proposal_vote_member_does_not_exist() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let member = MockMember::new().build();\n\n let proposal = MockProposal::new().yes_vote(&member).build();\n\n let contract = MockMoloch::new().add_proposal(proposal).build();\n\n contract.get_member_proposal_vote(robert(), 0.into());\n\n }\n\n\n\n // Proposal does not exist\n\n #[test]\n\n #[should_panic(expected = r#\"Proposal does not exist\"#)]\n\n fn get_member_proposal_vote_proposal_does_not_exist() {\n\n let context = get_context(false);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 61, "score": 16840.034169466526 }, { "content": " testing_env!(context);\n\n let contract = MockMoloch::new().build();\n\n let expired = contract.has_voting_period_expired(0.into());\n\n assert_eq!(expired, false, \"The voting period has expired\")\n\n }\n\n\n\n #[test]\n\n fn has_voting_period_expired_yes() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let contract = MockMoloch::new().build();\n\n let mut context_builder = get_context_builder(false);\n\n testing_env!(context_builder\n\n .block_timestamp(\n\n contract.summoning_time + contract.period_duration * contract.voting_period_length\n\n )\n\n .build());\n\n let expired = contract.has_voting_period_expired(0.into());\n\n assert_eq!(expired, true, \"The voting period has not expired\")\n\n }\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 62, "score": 16840.032677567633 }, { "content": " let balance = contract.get_bank_balance();\n\n assert_eq!(u128::from(balance), 10, \"Guild bank balance is incorrect\");\n\n }\n\n\n\n #[test]\n\n fn only_delegate() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let contract = MockMoloch::new().build();\n\n contract.only_delegate()\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = r#\"Account is not a delegate\"#)]\n\n fn only_delegate_not() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let contract = MockMoloch::new().build();\n\n\n\n let mut context = get_context_builder(false);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 63, "score": 16839.973273891857 }, { "content": "\n\n #[test]\n\n #[should_panic(expected = r#\"Account is not a delegate\"#)]\n\n fn submit_proposal_not_delegate() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new().summoner(robert()).build();\n\n contract.submit_proposal(robert(), 10.into(), 10.into(), \"\".to_string());\n\n }\n\n\n\n // Single vote\n\n #[test]\n\n fn submit_vote_yes() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let proposal = MockProposal::new().build();\n\n let mut contract = MockMoloch::new()\n\n .add_proposal(proposal)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 64, "score": 16839.729254993326 }, { "content": " let mut context_builder = get_context_builder(false);\n\n let proposal = MockProposal::new().build();\n\n let mut contract = MockMoloch::new()\n\n .add_proposal(proposal)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n let block_time = contract.summoning_time + contract.period_duration;\n\n let context = context_builder.block_timestamp(block_time.into()).build();\n\n testing_env!(context);\n\n\n\n contract.submit_vote(0.into(), 2);\n\n contract.submit_vote(0.into(), 1);\n\n let proposal = contract.proposal_queue.get(0).unwrap();\n\n assert_eq!(proposal.no_votes, 1)\n\n }\n\n\n\n // proposal has been aborted\n\n #[test]\n\n #[should_panic(expected = r#\"Proposal has been aborted\"#)]\n\n pub fn submit_vote_proposal_aborted() {\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 65, "score": 16839.70092237345 }, { "content": " }\n\n\n\n // Getter\n\n #[test]\n\n fn get_current_period() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let contract = MockMoloch::new().build();\n\n let period = contract.get_current_period();\n\n assert_eq!(u64::from(period), 0, \"Current period is not 0\")\n\n }\n\n\n\n #[test]\n\n fn get_current_period_after_three_periods() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let contract = MockMoloch::new().build();\n\n let mut context_builder = get_context_builder(false);\n\n testing_env!(context_builder\n\n .block_timestamp(contract.summoning_time + contract.period_duration * 3)\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 66, "score": 16839.682840865633 }, { "content": " #[should_panic(expected = r#\"Too many shares were requested\"#)]\n\n fn submit_proposal_shares_requested_overflow() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new().build();\n\n contract.submit_proposal(robert(), 10.into(), u128::MAX.into(), \"\".to_string());\n\n }\n\n\n\n #[test]\n\n #[should_panic(\n\n expected = r#\"Too many shares were requested: due to outstanding shares requested\"#\n\n )]\n\n fn submit_proposal_total_shares_requested_overflow() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new()\n\n .add_escrow_deposit(bob(), 100)\n\n .add_escrow_deposit(robert(), 10)\n\n .total_shares_requested(u128::MAX.saturating_sub(1).into())\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 67, "score": 16839.6681686466 }, { "content": " .build());\n\n let period = contract.get_current_period();\n\n assert_eq!(u64::from(period), 3, \"Current period is not 3\")\n\n }\n\n\n\n #[test]\n\n fn get_proposal_queue_length() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let contract = MockMoloch::new().build();\n\n let period = contract.get_proposal_queue_length();\n\n assert_eq!(u64::from(period), 0)\n\n }\n\n\n\n #[test]\n\n fn get_proposal_queue_length_two() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let proposal_one = MockProposal::new().build();\n\n let proposal_two = MockProposal::new().build();\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 68, "score": 16839.53918262926 }, { "content": " aborted: false,\n\n token_tribute: 20,\n\n details: \"\".to_string(),\n\n max_total_shares_at_yes_vote: 0,\n\n votes_by_member: HashMap::new(),\n\n };\n\n assert_eq!(proposal.unwrap(), expected_proposal);\n\n assert_eq!(contract.total_shares_requested, 30);\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = r#\"applicant must be a valid account id\"#)]\n\n fn submit_proposal_invalid_account() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new().build();\n\n contract.submit_proposal(\"\".to_string(), 10.into(), 10.into(), \"\".to_string());\n\n }\n\n\n\n #[test]\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 69, "score": 16839.48081681779 }, { "content": " let context = get_context(false);\n\n testing_env!(context);\n\n let proposal = MockProposal::new().processed(false).build();\n\n let contract = MockMoloch::new().add_proposal(proposal).build();\n\n let can = contract.can_rage_quit(0.into());\n\n assert_eq!(can, false)\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = r#\"Proposal does not exist\"#)]\n\n fn can_rage_quit_proposal_does_not_exist() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let contract = MockMoloch::new().build();\n\n contract.can_rage_quit(0.into());\n\n }\n\n\n\n #[test]\n\n fn has_voting_period_expired_no() {\n\n let context = get_context(false);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 70, "score": 16839.472426735807 }, { "content": " testing_env!(context);\n\n let mut contract = MockMoloch::new()\n\n .add_escrow_deposit(robert(), 13)\n\n .add_escrow_deposit(bob(), 101)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n contract.submit_proposal(robert(), 12.into(), 10.into(), \"\".to_string());\n\n\n\n let mut context_builder = get_context_builder(false);\n\n testing_env!(context_builder\n\n .storage_usage(env::storage_usage())\n\n .attached_deposit(1)\n\n .build());\n\n\n\n let proposal = contract.proposal_queue.get(0);\n\n let expected_proposal = Proposal {\n\n proposer: bob(),\n\n applicant: robert(),\n\n shares_requested: 10,\n\n starting_period: 1,\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 71, "score": 16839.46984013453 }, { "content": "\n\n let mut context_builder = get_context_builder(false);\n\n let context = context_builder\n\n .block_timestamp((contract.summoning_time + contract.period_duration).into())\n\n .build();\n\n testing_env!(context);\n\n contract.submit_vote(0.into(), 1);\n\n\n\n let proposal = contract.proposal_queue.get(0).unwrap();\n\n assert_eq!(proposal.yes_votes, 1);\n\n assert_eq!(proposal.no_votes, 0);\n\n assert_eq!(proposal.max_total_shares_at_yes_vote, 1);\n\n let member = contract.members.get(&bob()).unwrap();\n\n assert_eq!(member.highest_index_yes_vote, 0);\n\n }\n\n\n\n #[test]\n\n fn submit_vote_no() {\n\n // Setup\n\n let context = get_context(false);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 72, "score": 16839.470762895744 }, { "content": " yes_votes: 0,\n\n no_votes: 0,\n\n processed: false,\n\n did_pass: false,\n\n aborted: false,\n\n token_tribute: 12,\n\n details: \"\".to_string(),\n\n max_total_shares_at_yes_vote: 0,\n\n votes_by_member: HashMap::new(),\n\n };\n\n\n\n assert_eq!(proposal.unwrap(), expected_proposal);\n\n assert_eq!(contract.total_shares_requested, 10);\n\n }\n\n\n\n #[test]\n\n fn submit_proposal_multiple_proposals() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new()\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 73, "score": 16839.383659857678 }, { "content": " assert_eq!(proposal_one.aborted, false);\n\n }\n\n\n\n // Proposal does not exist\n\n #[test]\n\n #[should_panic(expected = r#\"Proposal does not exist\"#)]\n\n fn submit_vote_no_existing_proposal() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new().build();\n\n contract.submit_vote(0.into(), 1)\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = r#\"uint vote must be less than 3, 1 is yes 2 is no\"#)]\n\n pub fn submit_vote_undefined_vote() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let proposal = MockProposal::new().build();\n\n let mut contract = MockMoloch::new().add_proposal(proposal).build();\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 74, "score": 16839.36563859566 }, { "content": " .build();\n\n contract.submit_proposal(robert(), 10.into(), 1.into(), \"\".to_string());\n\n }\n\n #[test]\n\n #[should_panic(expected = r#\"Too many shares were requested: greater than max shares\"#)]\n\n fn submit_proposal_too_many_shares_submitted() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new()\n\n .add_escrow_deposit(bob(), 100)\n\n .add_escrow_deposit(robert(), 10)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n contract.submit_proposal(\n\n robert(),\n\n 10.into(),\n\n u128::MAX.saturating_sub(1).into(),\n\n \"\".to_string(),\n\n );\n\n }\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 75, "score": 16839.317642040547 }, { "content": " testing_env!(context_builder\n\n .predecessor_account_id(robert().try_into().unwrap())\n\n .block_timestamp(contract.summoning_time + contract.period_duration)\n\n .build());\n\n contract.abort(0.into());\n\n }\n\n\n\n // Caller is not applicant\n\n #[test]\n\n #[should_panic(expected = r#\"Calling account is not the proposal applicant\"#)]\n\n fn abort_proposal_calling_account_is_not_applicant() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let proposal = MockProposal::new().applicant(robert()).build();\n\n let mut contract = MockMoloch::new().add_proposal(proposal).build();\n\n let mut context_builder = get_context_builder(false);\n\n testing_env!(context_builder\n\n .block_timestamp(contract.summoning_time + contract.period_duration)\n\n .build());\n\n contract.abort(0.into());\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 76, "score": 16839.31014576046 }, { "content": " let mut contract = MockMoloch::new()\n\n .add_proposal(proposal)\n\n .add_member(existing_member)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n let mut context_builder = get_context_builder(false);\n\n let context = context_builder\n\n .block_timestamp(\n\n contract.summoning_time\n\n + (contract.period_duration\n\n * (contract.voting_period_length + contract.grace_period_length + 1)),\n\n )\n\n .build();\n\n testing_env!(context);\n\n contract.process_proposal(0.into());\n\n\n\n let member = contract.members.get(&alice()).unwrap();\n\n assert_eq!(\n\n member.shares, 22,\n\n \"Member does not have the correct number not shares\"\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 77, "score": 16839.27367213415 }, { "content": " testing_env!(context);\n\n let member = MockMember::new().build();\n\n let mut contract = MockMoloch::new()\n\n .add_member(member)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n contract.update_delegate_key(\"soda\".to_string());\n\n let mut context_builder = get_context_builder(false);\n\n testing_env!(context_builder\n\n .predecessor_account_id(robert().try_into().unwrap())\n\n .build());\n\n contract.update_delegate_key(\"soda\".to_string());\n\n }\n\n #[test]\n\n fn escrow_withdraw() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let member = MockMember::new().build();\n\n let mut contract = MockMoloch::new()\n\n .add_member(member)\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 78, "score": 16839.228009502614 }, { "content": " let robert_balance = contract.get_escrow_user_balance(robert());\n\n assert_eq!(\n\n u128::from(robert_balance),\n\n 12,\n\n \"Robert's balance is incorrect\"\n\n );\n\n let bank_balance = contract.get_bank_balance();\n\n assert_eq!(u128::from(bank_balance), 0, \"Bank balance is incorrect\");\n\n }\n\n\n\n // Proposall does not exist\n\n #[test]\n\n #[should_panic(expected = r#\"Proposal does not exist\"#)]\n\n fn process_proposal_does_not_exist() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new().build();\n\n contract.process_proposal(0.into());\n\n }\n\n\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 79, "score": 16839.120344528797 }, { "content": " // Make sure two periods pass so each proposal can\n\n // be voted on\n\n let mut context_builder = get_context_builder(false);\n\n let context_block_timestamp = context_builder\n\n .block_timestamp((contract.summoning_time + contract.period_duration * 2).into());\n\n testing_env!(context_block_timestamp.build());\n\n\n\n // Actions by bob\n\n contract.submit_vote(0.into(), 1);\n\n testing_env!(context_block_timestamp.build());\n\n contract.submit_vote(1.into(), 1);\n\n let member = contract.members.get(&bob()).unwrap();\n\n let proposal = contract.proposal_queue.get(0).unwrap();\n\n assert_eq!(\n\n member.highest_index_yes_vote, 1,\n\n \"Highest proposal index is wrong\"\n\n );\n\n assert_eq!(\n\n proposal.max_total_shares_at_yes_vote, 81,\n\n \"Max number of total shares is wrong\"\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 80, "score": 16839.079795584392 }, { "content": " let mut context_builder = get_context_builder(false);\n\n testing_env!(context_builder\n\n .predecessor_account_id(robert().try_into().unwrap())\n\n .block_timestamp(contract.summoning_time + contract.period_duration)\n\n .build());\n\n contract.abort(0.into());\n\n\n\n let proposal = contract.proposal_queue.get(0).unwrap();\n\n assert_eq!(proposal.aborted, true);\n\n assert_eq!(proposal.token_tribute, 0);\n\n }\n\n\n\n // Proposal does not exist\n\n #[test]\n\n #[should_panic(expected = r#\"Proposal does not exist\"#)]\n\n fn abort_proposal_does_not_exist() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new().build();\n\n let mut context_builder = get_context_builder(false);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 81, "score": 16838.886021025486 }, { "content": " let mut contract = MockMoloch::new()\n\n .add_proposal(proposal)\n\n .add_member(member)\n\n .add_escrow_deposit(bob(), 400)\n\n .add_escrow_deposit(robert(), 100)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .register_user(alice(), storage_deposit(), storage_deposit())\n\n .build();\n\n let proposal = contract.proposal_queue.get(0).unwrap();\n\n assert_eq!(proposal.processed, false, \"Proposal has been processed\");\n\n assert_eq!(\n\n contract.total_shares_requested, proposal.shares_requested,\n\n \"Total shares requested has not been set correctly\",\n\n );\n\n let mut context_builder = get_context_builder(false);\n\n let context = context_builder\n\n .block_timestamp(\n\n contract.summoning_time\n\n + (contract.period_duration\n\n * (contract.voting_period_length + contract.grace_period_length + 1)),\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 82, "score": 16838.274918958858 }, { "content": " let member = MockMember::new().build();\n\n let proposal = MockProposal::new().yes_vote(&member).aborted(true).build();\n\n let mut contract = MockMoloch::new()\n\n .add_proposal(proposal)\n\n .add_member(member)\n\n .add_escrow_deposit(bob(), 400)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n let proposal = contract.proposal_queue.get(0).unwrap();\n\n assert_eq!(proposal.processed, false, \"Proposal has been processed\");\n\n assert_eq!(\n\n contract.total_shares_requested, proposal.shares_requested,\n\n \"Total shares requested has not been set correctly\",\n\n );\n\n let mut context_builder = get_context_builder(false);\n\n let context = context_builder\n\n .block_timestamp(\n\n contract.summoning_time\n\n + (contract.period_duration\n\n * (contract.voting_period_length + contract.grace_period_length + 1)),\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 83, "score": 16838.14690500005 }, { "content": " let proposal = MockProposal::new().processed(true).build();\n\n let mut contract = MockMoloch::new()\n\n .add_proposal(proposal)\n\n .add_member(robert_member_info)\n\n .build();\n\n let mut context_builder = get_context_builder(false);\n\n testing_env!(context_builder\n\n .predecessor_account_id(robert().try_into().unwrap())\n\n .build());\n\n\n\n contract.rage_quit(30.into());\n\n assert_eq!(\n\n contract.total_shares, 1,\n\n \"Total shares have not properly been subtracted\"\n\n );\n\n let member = contract.members.get(&robert()).unwrap();\n\n assert_eq!(\n\n member.shares, 0,\n\n \"Shares have not been properly subtracted by a memeber\"\n\n );\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 84, "score": 16838.09714203795 }, { "content": " testing_env!(context\n\n .predecessor_account_id(robert().try_into().unwrap())\n\n .build());\n\n contract.only_delegate()\n\n }\n\n\n\n #[test]\n\n fn only_member() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let contract = MockMoloch::new().build();\n\n contract.only_member()\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = r#\"Account is not a member\"#)]\n\n fn only_member_not() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let contract = MockMoloch::new().build();\n\n\n\n let mut context_builder = get_context_builder(false);\n\n testing_env!(context_builder\n\n .predecessor_account_id(robert().try_into().unwrap())\n\n .build());\n\n contract.only_member()\n\n }\n\n}\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 85, "score": 16837.757381350526 }, { "content": " assert_eq!(\n\n member.delegate_key,\n\n alice().to_string(),\n\n \"Member delegate_key is incorrect\"\n\n );\n\n assert_eq!(member.exists, true, \"Member does not exists\");\n\n assert_eq!(\n\n member.highest_index_yes_vote, 0,\n\n \"Highest index vote is incorrect\"\n\n );\n\n assert_eq!(member_delegate_key, alice().to_string());\n\n assert_eq!(\n\n contract.total_shares, 16,\n\n \"Total shares has not been updated correctly\"\n\n );\n\n }\n\n\n\n // Test passed proposal New member, delegate_key already exists\n\n #[test]\n\n fn process_proposal_passed_new_member_existing_delegate_key() {\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 86, "score": 16837.637259423154 }, { "content": " #[test]\n\n #[should_panic(expected = r#\"Proposal is not ready to be processed\"#)]\n\n fn process_proposal_not_ready_to_be_processed() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let proposal = MockProposal::new().build();\n\n let mut contract = MockMoloch::new().add_proposal(proposal).build();\n\n contract.process_proposal(0.into());\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = r#\"Proposal has already been processed\"#)]\n\n fn process_proposal_already_processed() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let proposal = MockProposal::new().processed(true).build();\n\n let mut contract = MockMoloch::new().add_proposal(proposal).build();\n\n let mut context_builder = get_context_builder(false);\n\n testing_env!(context_builder\n\n .block_timestamp(\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 87, "score": 16837.301154066117 }, { "content": " }\n\n\n\n #[test]\n\n fn submit_vote_complex() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let proposal_one = MockProposal::new().build();\n\n let proposal_two = MockProposal::new().build();\n\n let robert_member_info = MockMember::new().delegate_key(robert()).shares(30).build();\n\n let alice_member_info = MockMember::new().delegate_key(alice()).shares(50).build();\n\n let mut contract = MockMoloch::new()\n\n .add_proposal(proposal_one)\n\n .add_proposal(proposal_two)\n\n .add_member(robert_member_info)\n\n .add_member(alice_member_info)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .register_user(alice(), storage_deposit(), storage_deposit())\n\n .register_user(robert(), storage_deposit(), storage_deposit())\n\n .build();\n\n\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 88, "score": 16837.254639055962 }, { "content": " let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new().build();\n\n contract.update_delegate_key(\"\".to_string());\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = r#\"Can't overwrite an existing members delegate_key\"#)]\n\n fn update_delegate_cannot_be_an_existing_member() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let member = MockMember::new().build();\n\n let mut contract = MockMoloch::new().add_member(member).build();\n\n contract.update_delegate_key(robert().to_string());\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = r#\"Can't overwrite existing delegate keys\"#)]\n\n fn update_delegate_cannot_be_an_existing_member_from_delegate_key() {\n\n let context = get_context(false);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 89, "score": 16837.194549071923 }, { "content": " .add_proposal(proposal)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n let mut context_builder = get_context_builder(false);\n\n let context = context_builder\n\n .block_timestamp(\n\n contract.summoning_time\n\n + (contract.period_duration\n\n * (contract.voting_period_length + contract.grace_period_length + 1)),\n\n )\n\n .build();\n\n testing_env!(context);\n\n contract.process_proposal(0.into());\n\n\n\n let member = contract.members.get(&alice()).unwrap();\n\n let member_delegate_key = contract.members_by_delegate_key.get(&alice()).unwrap();\n\n assert_eq!(\n\n member.shares, 15,\n\n \"Member does not have the correct number not shares\"\n\n );\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 90, "score": 16837.186273594718 }, { "content": " let context = get_context(false);\n\n testing_env!(context);\n\n let member = MockMember::new().shares(10).delegate_key(robert()).build();\n\n let proposal = MockProposal::new()\n\n .applicant(alice())\n\n .yes_vote(&member)\n\n .shares_requested(15)\n\n .build();\n\n\n\n let mut contract = MockMoloch::new()\n\n .add_proposal(proposal)\n\n .add_member(member)\n\n .update_member_delegate_key(&alice(), &robert())\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n let mut context_builder = get_context_builder(false);\n\n let context = context_builder\n\n .block_timestamp(\n\n contract.summoning_time\n\n + (contract.period_duration\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 91, "score": 16837.124717046678 }, { "content": " let proposal = MockProposal::new().build();\n\n let mut contract = MockMoloch::new().add_proposal(proposal).build();\n\n let mut context_builder = get_context_builder(false);\n\n testing_env!(context_builder\n\n .predecessor_account_id(robert().try_into().unwrap())\n\n .build());\n\n\n\n contract.rage_quit(20.into());\n\n }\n\n\n\n // Simple abort\n\n #[test]\n\n fn abort() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let proposal = MockProposal::new().applicant(robert()).build();\n\n let mut contract = MockMoloch::new()\n\n .add_proposal(proposal)\n\n .register_user(robert(), storage_deposit(), storage_deposit())\n\n .build();\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 92, "score": 16837.10840312222 }, { "content": " contract.rage_quit(20.into());\n\n assert_eq!(\n\n contract.total_shares, 11,\n\n \"Total shares have not properly been subtracted\"\n\n );\n\n let member = contract.members.get(&robert()).unwrap();\n\n assert_eq!(\n\n member.shares, 10,\n\n \"Shares have not been properly subtracted by a memeber\"\n\n );\n\n }\n\n\n\n // Highest proposal has not been indexed\n\n #[test]\n\n #[should_panic(\n\n expected = r#\"Can't rage quit until the highest index proposal member voted YES is processed\"#\n\n )]\n\n fn rage_quit_cant() {\n\n let context = get_context(false);\n\n testing_env!(context);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 93, "score": 16837.100841782252 }, { "content": " )\n\n .predecessor_account_id(alice().try_into().unwrap())\n\n .build();\n\n testing_env!(context);\n\n contract.process_proposal(0.into());\n\n\n\n let proposal = contract.proposal_queue.get(0).unwrap();\n\n assert_eq!(proposal.processed, true, \"Proposal has not been processed\");\n\n assert_eq!(\n\n contract.total_shares_requested, 0,\n\n \"Number of requested shares has not been subtracted\",\n\n );\n\n let member = contract.members.get(&robert()).unwrap();\n\n assert_eq!(\n\n member.shares, 20,\n\n \"Member does not have the correct number not shares\"\n\n );\n\n assert_eq!(\n\n contract.total_shares, 21,\n\n \"Total shares has not been updated correctly\"\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 94, "score": 16836.80211970171 }, { "content": " .add_escrow_deposit(bob(), 200)\n\n .add_escrow_deposit(robert(), 32)\n\n .register_user(bob(), storage_deposit(), storage_deposit())\n\n .build();\n\n contract.submit_proposal(robert(), 12.into(), 10.into(), \"\".to_string());\n\n\n\n let context = get_context(false);\n\n testing_env!(context);\n\n contract.submit_proposal(robert(), 20.into(), 20.into(), \"\".to_string());\n\n\n\n let proposal = contract.proposal_queue.get(1);\n\n let expected_proposal = Proposal {\n\n proposer: bob(),\n\n applicant: robert(),\n\n shares_requested: 20,\n\n starting_period: 2,\n\n yes_votes: 0,\n\n no_votes: 0,\n\n processed: false,\n\n did_pass: false,\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 95, "score": 16836.80354672408 }, { "content": "\n\n contract.submit_vote(0.into(), 1);\n\n testing_env!(context_block_timestamp.build());\n\n contract.submit_vote(1.into(), 1);\n\n let member = contract.members.get(&alice()).unwrap();\n\n assert_eq!(\n\n member.highest_index_yes_vote, 1,\n\n \"Highest proposal index is wrong for alice\"\n\n );\n\n\n\n let proposal_zero = contract.proposal_queue.get(0).unwrap();\n\n assert_eq!(proposal_zero.yes_votes, 81);\n\n assert_eq!(proposal_zero.no_votes, 0);\n\n assert_eq!(proposal_zero.processed, false);\n\n assert_eq!(proposal_zero.aborted, false);\n\n\n\n let proposal_one = contract.proposal_queue.get(1).unwrap();\n\n assert_eq!(proposal_one.yes_votes, 51);\n\n assert_eq!(proposal_one.no_votes, 30);\n\n assert_eq!(proposal_one.processed, false);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 96, "score": 16836.803378366203 }, { "content": " \"Member delegate key has not been updated\"\n\n );\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = r#\"Account is not a member\"#)]\n\n fn update_delegate_key_only_member() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n let mut contract = MockMoloch::new().build();\n\n let mut context_builder = get_context_builder(false);\n\n testing_env!(context_builder\n\n .predecessor_account_id(robert().try_into().unwrap())\n\n .build());\n\n contract.update_delegate_key(\"soda\".to_string());\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = r#\"Delegate key cannot be an empty string\"#)]\n\n fn update_delegate_key_empty_string() {\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 97, "score": 16836.72308072061 }, { "content": " contract.submit_vote(0.into(), 3)\n\n }\n\n\n\n // voting has not begun\n\n #[test]\n\n #[should_panic(expected = r#\"Voting period has not begun\"#)]\n\n pub fn submit_vote_has_not_begun() {\n\n let context = get_context(false);\n\n testing_env!(context);\n\n\n\n let proposal = MockProposal::new().build();\n\n let mut contract = MockMoloch::new().add_proposal(proposal).build();\n\n contract.submit_vote(0.into(), 2)\n\n }\n\n\n\n // voting has expired\n\n #[test]\n\n #[should_panic(expected = r#\"Proposal voting period has expired\"#)]\n\n pub fn submit_vote_expired() {\n\n let context = get_context(false);\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 98, "score": 16836.605822917583 }, { "content": " \"Proposal does not exist\"\n\n );\n\n // Get the proposal\n\n let mut proposal = self.proposal_queue.get(_proposal_index).unwrap();\n\n // Check sender is the applicant\n\n assert!(\n\n env::predecessor_account_id() == proposal.applicant,\n\n \"Calling account is not the proposal applicant\"\n\n );\n\n\n\n // Check if abort window has passed\n\n let current_period = self.get_current_period();\n\n let abort_window = proposal.starting_period.saturating_add(self.abort_window);\n\n assert!(\n\n u64::from(current_period) < u64::from(abort_window),\n\n \"Abort window has passed!\"\n\n );\n\n // Check if proposal has been aborted\n\n assert!(!proposal.aborted, \"Proposal has already been aborted\");\n\n // Reset proposal params for abort\n", "file_path": "contracts/moloch/src/lib.rs", "rank": 99, "score": 16836.60095289981 } ]
Rust
src/uucore/src/lib/macros.rs
E3uka/coreutils
c7f7a222b9a2e86a68b204b417fbe23e7df01e3f
use std::sync::atomic::AtomicBool; pub static UTILITY_IS_SECOND_ARG: AtomicBool = AtomicBool::new(false); #[macro_export] macro_rules! show( ($err:expr) => ({ let e = $err; $crate::error::set_exit_code(e.code()); eprintln!("{}: {}", $crate::util_name(), e); }) ); #[macro_export] macro_rules! show_if_err( ($res:expr) => ({ if let Err(e) = $res { show!(e); } }) ); #[macro_export] macro_rules! show_error( ($($args:tt)+) => ({ eprint!("{}: ", $crate::util_name()); eprintln!($($args)+); }) ); #[macro_export] macro_rules! show_error_custom_description ( ($err:expr,$($args:tt)+) => ({ eprint!("{}: {}: ", $crate::util_name(), $err); eprintln!($($args)+); }) ); #[macro_export] macro_rules! show_warning( ($($args:tt)+) => ({ eprint!("{}: warning: ", $crate::util_name()); eprintln!($($args)+); }) ); #[macro_export] macro_rules! show_usage_error( ($($args:tt)+) => ({ eprint!("{}: ", $crate::util_name()); eprintln!($($args)+); eprintln!("Try '{} --help' for more information.", $crate::execution_phrase()); }) ); #[macro_export] macro_rules! exit( ($exit_code:expr) => ({ ::std::process::exit($exit_code) }) ); #[macro_export] macro_rules! crash( ($exit_code:expr, $($args:tt)+) => ({ $crate::show_error!($($args)+); $crate::exit!($exit_code) }) ); #[macro_export] macro_rules! crash_if_err( ($exit_code:expr, $exp:expr) => ( match $exp { Ok(m) => m, Err(f) => $crate::crash!($exit_code, "{}", f), } ) ); #[macro_export] macro_rules! safe_unwrap( ($exp:expr) => ( match $exp { Ok(m) => m, Err(f) => $crate::crash!(1, "{}", f.to_string()) } ) ); #[macro_export] macro_rules! return_if_err( ($exit_code:expr, $exp:expr) => ( match $exp { Ok(m) => m, Err(f) => { $crate::show_error!("{}", f); return $exit_code; } } ) ); #[macro_export] macro_rules! safe_writeln( ($fd:expr, $($args:tt)+) => ( match writeln!($fd, $($args)+) { Ok(_) => {} Err(f) => panic!("{}", f) } ) ); #[macro_export] macro_rules! snippet_list_join_oxford_comma { ($conjunction:expr, $valOne:expr, $valTwo:expr) => ( format!("{}, {} {}", $valOne, $conjunction, $valTwo) ); ($conjunction:expr, $valOne:expr, $valTwo:expr $(, $remaining_values:expr)*) => ( format!("{}, {}", $valOne, $crate::snippet_list_join_oxford_comma!($conjunction, $valTwo $(, $remaining_values)*)) ); } #[macro_export] macro_rules! snippet_list_join { ($conjunction:expr, $valOne:expr, $valTwo:expr) => ( format!("{} {} {}", $valOne, $conjunction, $valTwo) ); ($conjunction:expr, $valOne:expr, $valTwo:expr $(, $remaining_values:expr)*) => ( format!("{}, {}", $valOne, $crate::snippet_list_join_oxford_comma!($conjunction, $valTwo $(, $remaining_values)*)) ); } #[macro_export] macro_rules! msg_invalid_input { ($reason: expr) => { format!("invalid input: {}", $reason) }; } #[macro_export] macro_rules! msg_invalid_opt_use { ($about:expr, $flag:expr) => { $crate::msg_invalid_input!(format!("The '{}' option {}", $flag, $about)) }; ($about:expr, $long_flag:expr, $short_flag:expr) => { $crate::msg_invalid_input!(format!( "The '{}' ('{}') option {}", $long_flag, $short_flag, $about )) }; } #[macro_export] macro_rules! msg_opt_only_usable_if { ($clause:expr, $flag:expr) => { $crate::msg_invalid_opt_use!(format!("only usable if {}", $clause), $flag) }; ($clause:expr, $long_flag:expr, $short_flag:expr) => { $crate::msg_invalid_opt_use!( format!("only usable if {}", $clause), $long_flag, $short_flag ) }; } #[macro_export] macro_rules! msg_opt_invalid_should_be { ($expects:expr, $received:expr, $flag:expr) => { $crate::msg_invalid_opt_use!( format!("expects {}, but was provided {}", $expects, $received), $flag ) }; ($expects:expr, $received:expr, $long_flag:expr, $short_flag:expr) => { $crate::msg_invalid_opt_use!( format!("expects {}, but was provided {}", $expects, $received), $long_flag, $short_flag ) }; } #[macro_export] macro_rules! msg_expects_one_of { ($valOne:expr $(, $remaining_values:expr)*) => ( $crate::msg_invalid_input!(format!("expects one of {}", $crate::snippet_list_join!("or", $valOne $(, $remaining_values)*))) ); } #[macro_export] macro_rules! msg_expects_no_more_than_one_of { ($valOne:expr $(, $remaining_values:expr)*) => ( $crate::msg_invalid_input!(format!("expects no more than one of {}", $crate::snippet_list_join!("or", $valOne $(, $remaining_values)*))) ; ); }
use std::sync::atomic::AtomicBool; pub static UTILITY_IS_SECOND_ARG: AtomicBool = AtomicBool::new(false); #[macro_export] macro_rules! show( ($err:expr) => ({ let e = $err; $crate::error::set_exit_code(e.code()); eprintln!("{}: {}", $crate::util_name(), e); }) ); #[macro_export] macro_rules! show_if_err( ($res:expr) => ({ if let Err(e) = $res { show!(e); } }) ); #[macro_export] macro_rules! show_error( ($($args:tt)+) => ({ eprint!("{}: ", $crate::util_name()); eprintln!($($args)+); }) ); #[macro_export] macro_rules! show_error_custom_description ( ($err:expr,$($args:tt)+) => ({ eprint!("{}: {}: ", $crate::util_name(), $err); eprintln!($($args)+); }) ); #[macro_export] macro_rules! show_warning( ($($args:tt)+) => ({ eprint!("{}: warning: ", $crate::util_name()); eprintln!($($args)+); }) ); #[macro_export] macro_rules! show_usage_error( ($($args:tt)+) => ({ eprint!("{}: ", $crate::util_name()); eprintln!($($args)+); eprintln!("Try '{} --help' for more information.", $crate::execution_phrase()); }) ); #[macro_export] macro_rules! exit( ($exit_code:expr) => ({ ::std::process::exit($exit_code) }) ); #[macro_export] macro_rules! crash( ($exit_code:expr, $($args:tt)+) => ({ $crate::show_error!($($args)+); $crate::exit!($exit_code) }) ); #[macro_export] macro_rules! crash_if_err( ($exit_code:expr, $exp:expr) => ( match $exp { Ok(m) => m, Err(f) => $crate::crash!($exit_code, "{}", f), } ) ); #[macro_export] macro_rules! safe_unwrap( ($exp:expr) => ( match $exp { Ok(m) => m, Err(f) => $crate::crash!(1, "{}", f.to_string()) } ) ); #[macro_export] macro_rules! return_if_err( ($exit_code:expr, $exp:expr) => ( match $exp { Ok
expr, $short_flag:expr) => { $crate::msg_invalid_opt_use!( format!("only usable if {}", $clause), $long_flag, $short_flag ) }; } #[macro_export] macro_rules! msg_opt_invalid_should_be { ($expects:expr, $received:expr, $flag:expr) => { $crate::msg_invalid_opt_use!( format!("expects {}, but was provided {}", $expects, $received), $flag ) }; ($expects:expr, $received:expr, $long_flag:expr, $short_flag:expr) => { $crate::msg_invalid_opt_use!( format!("expects {}, but was provided {}", $expects, $received), $long_flag, $short_flag ) }; } #[macro_export] macro_rules! msg_expects_one_of { ($valOne:expr $(, $remaining_values:expr)*) => ( $crate::msg_invalid_input!(format!("expects one of {}", $crate::snippet_list_join!("or", $valOne $(, $remaining_values)*))) ); } #[macro_export] macro_rules! msg_expects_no_more_than_one_of { ($valOne:expr $(, $remaining_values:expr)*) => ( $crate::msg_invalid_input!(format!("expects no more than one of {}", $crate::snippet_list_join!("or", $valOne $(, $remaining_values)*))) ; ); }
(m) => m, Err(f) => { $crate::show_error!("{}", f); return $exit_code; } } ) ); #[macro_export] macro_rules! safe_writeln( ($fd:expr, $($args:tt)+) => ( match writeln!($fd, $($args)+) { Ok(_) => {} Err(f) => panic!("{}", f) } ) ); #[macro_export] macro_rules! snippet_list_join_oxford_comma { ($conjunction:expr, $valOne:expr, $valTwo:expr) => ( format!("{}, {} {}", $valOne, $conjunction, $valTwo) ); ($conjunction:expr, $valOne:expr, $valTwo:expr $(, $remaining_values:expr)*) => ( format!("{}, {}", $valOne, $crate::snippet_list_join_oxford_comma!($conjunction, $valTwo $(, $remaining_values)*)) ); } #[macro_export] macro_rules! snippet_list_join { ($conjunction:expr, $valOne:expr, $valTwo:expr) => ( format!("{} {} {}", $valOne, $conjunction, $valTwo) ); ($conjunction:expr, $valOne:expr, $valTwo:expr $(, $remaining_values:expr)*) => ( format!("{}, {}", $valOne, $crate::snippet_list_join_oxford_comma!($conjunction, $valTwo $(, $remaining_values)*)) ); } #[macro_export] macro_rules! msg_invalid_input { ($reason: expr) => { format!("invalid input: {}", $reason) }; } #[macro_export] macro_rules! msg_invalid_opt_use { ($about:expr, $flag:expr) => { $crate::msg_invalid_input!(format!("The '{}' option {}", $flag, $about)) }; ($about:expr, $long_flag:expr, $short_flag:expr) => { $crate::msg_invalid_input!(format!( "The '{}' ('{}') option {}", $long_flag, $short_flag, $about )) }; } #[macro_export] macro_rules! msg_opt_only_usable_if { ($clause:expr, $flag:expr) => { $crate::msg_invalid_opt_use!(format!("only usable if {}", $clause), $flag) }; ($clause:expr, $long_flag:
random
[ { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .about(\"A file perusal filter for CRT viewing.\")\n\n .version(crate_version!())\n\n .arg(\n\n Arg::with_name(options::SILENT)\n\n .short(\"d\")\n\n .long(options::SILENT)\n\n .help(\"Display help instead of ringing bell\"),\n\n )\n\n // The commented arguments below are unimplemented:\n\n /*\n\n .arg(\n\n Arg::with_name(options::LOGICAL)\n\n .short(\"f\")\n\n .long(options::LOGICAL)\n\n .help(\"Count logical rather than screen lines\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::NO_PAUSE)\n", "file_path": "src/uu/more/src/more.rs", "rank": 0, "score": 211698.63024461188 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::ALL)\n\n .long(options::ALL)\n\n .short(\"a\")\n\n .help(\"same as -b -d --login -p -r -t -T -u\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::BOOT)\n\n .long(options::BOOT)\n\n .short(\"b\")\n\n .help(\"time of last system boot\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::DEAD)\n\n .long(options::DEAD)\n\n .short(\"d\")\n", "file_path": "src/uu/who/src/who.rs", "rank": 1, "score": 211698.63024461188 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(SUMMARY)\n\n .arg(Arg::with_name(options::NUMBER).multiple(true))\n\n}\n", "file_path": "src/uu/factor/src/cli.rs", "rank": 2, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .usage(USAGE)\n\n .arg(\n\n Arg::with_name(options::BYTES)\n\n .short(\"c\")\n\n .long(options::BYTES)\n\n .takes_value(true)\n\n .allow_hyphen_values(true)\n\n .overrides_with_all(&[options::BYTES, options::LINES])\n\n .help(\"Number of bytes to print\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::FOLLOW)\n\n .short(\"f\")\n\n .long(options::FOLLOW)\n\n .help(\"Print the file as it grows\"),\n\n )\n", "file_path": "src/uu/tail/src/tail.rs", "rank": 3, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(crate_name!())\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(crate_description!())\n\n .usage(USAGE)\n\n .after_help(AFTER_HELP)\n\n .setting(AppSettings::AllowExternalSubcommands)\n\n .arg(Arg::with_name(\"ignore-environment\")\n\n .short(\"i\")\n\n .long(\"ignore-environment\")\n\n .help(\"start with an empty environment\"))\n\n .arg(Arg::with_name(\"chdir\")\n\n .short(\"C\") // GNU env compatibility\n\n .long(\"chdir\")\n\n .takes_value(true)\n\n .number_of_values(1)\n\n .value_name(\"DIR\")\n\n .help(\"change working directory to DIR\"))\n\n .arg(Arg::with_name(\"null\")\n", "file_path": "src/uu/env/src/env.rs", "rank": 4, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .usage(SYNTAX)\n\n}\n\n\n", "file_path": "src/uu/hostid/src/hostid.rs", "rank": 5, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .about(ABOUT)\n\n .version(crate_version!())\n\n .arg(\n\n Arg::with_name(options::ZERO)\n\n .long(options::ZERO)\n\n .short(\"z\")\n\n .help(\"separate output with NUL rather than newline\"),\n\n )\n\n .arg(Arg::with_name(options::DIR).hidden(true).multiple(true))\n\n}\n", "file_path": "src/uu/dirname/src/dirname.rs", "rank": 6, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .setting(AppSettings::AllowLeadingHyphen)\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(OPT_SEPARATOR)\n\n .short(\"s\")\n\n .long(\"separator\")\n\n .help(\"Separator character (defaults to \\\\n)\")\n\n .takes_value(true)\n\n .number_of_values(1),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_TERMINATOR)\n\n .short(\"t\")\n\n .long(\"terminator\")\n\n .help(\"Terminator character (defaults to \\\\n)\")\n\n .takes_value(true)\n\n .number_of_values(1),\n", "file_path": "src/uu/seq/src/seq.rs", "rank": 7, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n let mut app = base_common::base_app(ABOUT);\n\n for encoding in ENCODINGS {\n\n app = app.arg(Arg::with_name(encoding.0).long(encoding.0));\n\n }\n\n app\n\n}\n\n\n", "file_path": "src/uu/basenc/src/basenc.rs", "rank": 8, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .name(NAME)\n\n .version(crate_version!())\n\n .template(TEMPLATE)\n\n .usage(USAGE)\n\n .arg(\n\n Arg::with_name(options::ECHO)\n\n .short(\"e\")\n\n .long(options::ECHO)\n\n .takes_value(true)\n\n .value_name(\"ARG\")\n\n .help(\"treat each ARG as an input line\")\n\n .multiple(true)\n\n .use_delimiter(false)\n\n .min_values(0)\n\n .conflicts_with(options::INPUT_RANGE),\n\n )\n\n .arg(\n\n Arg::with_name(options::INPUT_RANGE)\n", "file_path": "src/uu/shuf/src/shuf.rs", "rank": 9, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(VERSION)\n\n .about(ABOUT)\n\n .after_help(DESCRIPTION)\n\n .arg(\n\n Arg::with_name(options::COMPUTE)\n\n .short(\"c\")\n\n .long(options::COMPUTE)\n\n .takes_value(false)\n\n .help(\"Compute process transition context before modifying.\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::USER)\n\n .short(\"u\")\n\n .long(options::USER)\n\n .takes_value(true)\n\n .value_name(\"USER\")\n\n .help(\"Set user USER in the target security context.\"),\n\n )\n", "file_path": "src/uu/runcon/src/runcon.rs", "rank": 10, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .usage(USAGE)\n\n .about(SUMMARY)\n\n .arg(\n\n Arg::with_name(options::FILE)\n\n .default_value(\"-\")\n\n .hidden(true),\n\n )\n\n}\n\n\n", "file_path": "src/uu/tsort/src/tsort.rs", "rank": 11, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(OPT_LOGICAL)\n\n .short(\"L\")\n\n .long(OPT_LOGICAL)\n\n .help(\"use PWD from environment, even if it contains symlinks\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_PHYSICAL)\n\n .short(\"P\")\n\n .long(OPT_PHYSICAL)\n\n .overrides_with(OPT_LOGICAL)\n\n .help(\"avoid all symlinks\"),\n\n )\n\n}\n", "file_path": "src/uu/pwd/src/pwd.rs", "rank": 12, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n\n\n .arg(\n\n Arg::with_name(OPT_FORCE)\n\n .short(\"f\")\n\n .long(OPT_FORCE)\n\n .multiple(true)\n\n .help(\"ignore nonexistent files and arguments, never prompt\")\n\n )\n\n .arg(\n\n Arg::with_name(OPT_PROMPT)\n\n .short(\"i\")\n\n .long(\"prompt before every removal\")\n\n )\n\n .arg(\n\n Arg::with_name(OPT_PROMPT_MORE)\n\n .short(\"I\")\n", "file_path": "src/uu/rm/src/rm.rs", "rank": 13, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(OPT_NULL)\n\n .short(\"0\")\n\n .long(OPT_NULL)\n\n .help(\"end each output line with 0 byte rather than newline\"),\n\n )\n\n .arg(\n\n Arg::with_name(ARG_VARIABLES)\n\n .multiple(true)\n\n .takes_value(true)\n\n .min_values(1),\n\n )\n\n}\n", "file_path": "src/uu/printenv/src/printenv.rs", "rank": 14, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .after_help(LONG_HELP)\n\n .arg(\n\n Arg::with_name(options::COLUMN_1)\n\n .short(options::COLUMN_1)\n\n .help(\"suppress column 1 (lines unique to FILE1)\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::COLUMN_2)\n\n .short(options::COLUMN_2)\n\n .help(\"suppress column 2 (lines unique to FILE2)\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::COLUMN_3)\n\n .short(options::COLUMN_3)\n\n .help(\"suppress column 3 (lines that appear in both files)\"),\n\n )\n", "file_path": "src/uu/comm/src/comm.rs", "rank": 15, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .usage(SYNTAX)\n\n .arg(\n\n Arg::with_name(options::NEWROOT)\n\n .hidden(true)\n\n .required(true)\n\n .index(1),\n\n )\n\n .arg(\n\n Arg::with_name(options::USER)\n\n .short(\"u\")\n\n .long(options::USER)\n\n .help(\"User (ID or name) to switch before running the program\")\n\n .value_name(\"USER\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::GROUP)\n", "file_path": "src/uu/chroot/src/chroot.rs", "rank": 16, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .name(NAME)\n\n .version(crate_version!())\n\n .about(SUMMARY)\n\n .usage(SYNTAX)\n\n .arg(Arg::with_name(options::FILE).hidden(true).multiple(true))\n\n}\n", "file_path": "src/uu/cksum/src/cksum.rs", "rank": 17, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .name(NAME)\n\n .version(crate_version!())\n\n .usage(USAGE)\n\n .arg(Arg::with_name(options::FILE).hidden(true).multiple(true))\n\n .arg(\n\n Arg::with_name(options::BODY_NUMBERING)\n\n .short(\"b\")\n\n .long(options::BODY_NUMBERING)\n\n .help(\"use STYLE for numbering body lines\")\n\n .value_name(\"SYNTAX\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::SECTION_DELIMITER)\n\n .short(\"d\")\n\n .long(options::SECTION_DELIMITER)\n\n .help(\"use CC for separating logical pages\")\n\n .value_name(\"CC\"),\n\n )\n", "file_path": "src/uu/nl/src/nl.rs", "rank": 18, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::verbosity::CHANGES)\n\n .short(\"c\")\n\n .long(options::verbosity::CHANGES)\n\n .help(\"like verbose but report only when a change is made\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::dereference::DEREFERENCE)\n\n .long(options::dereference::DEREFERENCE)\n\n .help(\n\n \"affect the referent of each symbolic link (this is the default), \\\n\n rather than the symbolic link itself\",\n\n ),\n\n )\n\n .arg(\n\n Arg::with_name(options::dereference::NO_DEREFERENCE)\n", "file_path": "src/uu/chown/src/chown.rs", "rank": 19, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .usage(USAGE)\n\n .after_help(LONG_HELP)\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(\"mode\")\n\n .short(\"m\")\n\n .long(\"mode\")\n\n .value_name(\"MODE\")\n\n .help(\"set file permission bits to MODE, not a=rw - umask\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"name\")\n\n .value_name(\"NAME\")\n\n .help(\"name of the new file\")\n\n .required(true)\n\n .index(1),\n\n )\n", "file_path": "src/uu/mknod/src/mknod.rs", "rank": 20, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(VERSION)\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::dereference::DEREFERENCE)\n\n .long(options::dereference::DEREFERENCE)\n\n .conflicts_with(options::dereference::NO_DEREFERENCE)\n\n .help(\n\n \"Affect the referent of each symbolic link (this is the default), \\\n\n rather than the symbolic link itself.\",\n\n ),\n\n )\n\n .arg(\n\n Arg::with_name(options::dereference::NO_DEREFERENCE)\n\n .short(\"h\")\n\n .long(options::dereference::NO_DEREFERENCE)\n\n .help(\"Affect symbolic links instead of any referenced file.\"),\n\n )\n\n .arg(\n", "file_path": "src/uu/chcon/src/chcon.rs", "rank": 21, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .name(NAME)\n\n .version(crate_version!())\n\n .usage(SYNTAX)\n\n .about(SUMMARY)\n\n .arg(Arg::with_name(options::FILE).hidden(true).multiple(true))\n\n .arg(\n\n Arg::with_name(options::SHOW_ALL)\n\n .short(\"A\")\n\n .long(options::SHOW_ALL)\n\n .help(\"equivalent to -vET\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::NUMBER_NONBLANK)\n\n .short(\"b\")\n\n .long(options::NUMBER_NONBLANK)\n\n .help(\"number nonempty output lines, overrides -n\")\n\n .overrides_with(options::NUMBER),\n\n )\n", "file_path": "src/uu/cat/src/cat.rs", "rank": 22, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::CHANGES)\n\n .long(options::CHANGES)\n\n .short(\"c\")\n\n .help(\"like verbose but report only when a change is made\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::QUIET)\n\n .long(options::QUIET)\n\n .visible_alias(\"silent\")\n\n .short(\"f\")\n\n .help(\"suppress most error messages\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::VERBOSE)\n\n .long(options::VERBOSE)\n", "file_path": "src/uu/chmod/src/chmod.rs", "rank": 23, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::modes::SORT)\n\n .long(options::modes::SORT)\n\n .takes_value(true)\n\n .possible_values(&[\n\n \"general-numeric\",\n\n \"human-numeric\",\n\n \"month\",\n\n \"numeric\",\n\n \"version\",\n\n \"random\",\n\n ])\n\n .conflicts_with_all(&options::modes::ALL_SORT_MODES),\n\n )\n\n .arg(make_sort_mode_arg(\n\n options::modes::HUMAN_NUMERIC,\n", "file_path": "src/uu/sort/src/sort.rs", "rank": 24, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n base_common::base_app(ABOUT)\n\n}\n", "file_path": "src/uu/base32/src/base32.rs", "rank": 25, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(NAME)\n\n .version(crate_version!())\n\n .about(\n\n \"For each pair of input lines with identical join fields, write a line to\n\nstandard output. The default join field is the first, delimited by blanks.\n\n\n\nWhen FILE1 or FILE2 (not both) is -, read standard input.\",\n\n )\n\n .help_message(\"display this help and exit\")\n\n .version_message(\"display version and exit\")\n\n .arg(\n\n Arg::with_name(\"a\")\n\n .short(\"a\")\n\n .multiple(true)\n\n .number_of_values(1)\n\n .possible_values(&[\"1\", \"2\"])\n\n .value_name(\"FILENUM\")\n\n .help(\n\n \"also print unpairable lines from file FILENUM, where\n", "file_path": "src/uu/join/src/join.rs", "rank": 26, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(OPT_DOMAIN)\n\n .short(\"d\")\n\n .long(\"domain\")\n\n .overrides_with_all(&[OPT_DOMAIN, OPT_IP_ADDRESS, OPT_FQDN, OPT_SHORT])\n\n .help(\"Display the name of the DNS domain if possible\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_IP_ADDRESS)\n\n .short(\"i\")\n\n .long(\"ip-address\")\n\n .overrides_with_all(&[OPT_DOMAIN, OPT_IP_ADDRESS, OPT_FQDN, OPT_SHORT])\n\n .help(\"Display the network address(es) of the host\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_FQDN)\n", "file_path": "src/uu/hostname/src/hostname.rs", "rank": 27, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n backup_control::arguments::backup()\n\n )\n\n .arg(\n\n backup_control::arguments::backup_no_args()\n\n )\n\n .arg(\n\n Arg::with_name(OPT_FORCE)\n\n .short(\"f\")\n\n .long(OPT_FORCE)\n\n .help(\"do not prompt before overwriting\")\n\n )\n\n .arg(\n\n Arg::with_name(OPT_INTERACTIVE)\n\n .short(\"i\")\n\n .long(OPT_INTERACTIVE)\n", "file_path": "src/uu/mv/src/mv.rs", "rank": 28, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(OPT_CROWN_MARGIN)\n\n .short(\"c\")\n\n .long(OPT_CROWN_MARGIN)\n\n .help(\n\n \"First and second line of paragraph \\\n\n may have different indentations, in which \\\n\n case the first line's indentation is preserved, \\\n\n and each subsequent line's indentation matches the second line.\",\n\n ),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_TAGGED_PARAGRAPH)\n\n .short(\"t\")\n\n .long(\"tagged-paragraph\")\n\n .help(\n", "file_path": "src/uu/fmt/src/fmt.rs", "rank": 29, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::DEREFERENCE)\n\n .short(\"L\")\n\n .long(options::DEREFERENCE)\n\n .help(\"follow links\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::FILE_SYSTEM)\n\n .short(\"f\")\n\n .long(options::FILE_SYSTEM)\n\n .help(\"display file system status instead of file status\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::TERSE)\n\n .short(\"t\")\n\n .long(options::TERSE)\n", "file_path": "src/uu/stat/src/stat.rs", "rank": 30, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::IO_BLOCKS)\n\n .short(\"o\")\n\n .long(options::IO_BLOCKS)\n\n .help(\"treat SIZE as the number of I/O blocks of the file rather than bytes (NOT IMPLEMENTED)\")\n\n )\n\n .arg(\n\n Arg::with_name(options::NO_CREATE)\n\n .short(\"c\")\n\n .long(options::NO_CREATE)\n\n .help(\"do not create files that do not exist\")\n\n )\n\n .arg(\n\n Arg::with_name(options::REFERENCE)\n\n .short(\"r\")\n\n .long(options::REFERENCE)\n", "file_path": "src/uu/truncate/src/truncate.rs", "rank": 31, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(Arg::with_name(OPT_PATH).required(true).hidden(true))\n\n}\n", "file_path": "src/uu/unlink/src/unlink.rs", "rank": 32, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::ACCESS)\n\n .short(\"a\")\n\n .help(\"change only the access time\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::sources::CURRENT)\n\n .short(\"t\")\n\n .help(\"use [[CC]YY]MMDDhhmm[.ss] instead of the current time\")\n\n .value_name(\"STAMP\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(options::sources::DATE)\n\n .short(\"d\")\n\n .long(options::sources::DATE)\n", "file_path": "src/uu/touch/src/touch.rs", "rank": 33, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n backup_control::arguments::backup()\n\n )\n\n .arg(\n\n backup_control::arguments::backup_no_args()\n\n )\n\n .arg(\n\n Arg::with_name(OPT_IGNORED)\n\n .short(\"c\")\n\n .help(\"ignored\")\n\n )\n\n .arg(\n\n Arg::with_name(OPT_COMPARE)\n\n .short(\"C\")\n\n .long(OPT_COMPARE)\n\n .help(\"compare each pair of source and destination files, and in some cases, do not modify the destination at all\")\n", "file_path": "src/uu/install/src/install.rs", "rank": 34, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::COMPLEMENT)\n\n // .visible_short_alias('C') // TODO: requires clap \"3.0.0-beta.2\"\n\n .short(\"c\")\n\n .long(options::COMPLEMENT)\n\n .help(\"use the complement of SET1\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"C\") // work around for `Arg::visible_short_alias`\n\n .short(\"C\")\n\n .help(\"same as -c\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::DELETE)\n\n .short(\"d\")\n\n .long(options::DELETE)\n", "file_path": "src/uu/tr/src/tr.rs", "rank": 35, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(OPT_DIRECTORY)\n\n .short(\"d\")\n\n .long(OPT_DIRECTORY)\n\n .help(\"Make a directory instead of a file\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_DRY_RUN)\n\n .short(\"u\")\n\n .long(OPT_DRY_RUN)\n\n .help(\"do not create anything; merely print a name (unsafe)\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_QUIET)\n\n .short(\"q\")\n\n .long(\"quiet\")\n", "file_path": "src/uu/mktemp/src/mktemp.rs", "rank": 36, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .name(NAME)\n\n .version(crate_version!())\n\n .usage(SYNTAX)\n\n .about(SUMMARY)\n\n .after_help(LONG_HELP)\n\n .arg(\n\n Arg::with_name(options::BYTES)\n\n .short(\"b\")\n\n .long(options::BYTES)\n\n .takes_value(true)\n\n .help(\"filter byte columns from the input source\")\n\n .allow_hyphen_values(true)\n\n .value_name(\"LIST\")\n\n .display_order(1),\n\n )\n\n .arg(\n\n Arg::with_name(options::CHARACTERS)\n\n .short(\"c\")\n", "file_path": "src/uu/cut/src/cut.rs", "rank": 37, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .after_help(LONG_HELP)\n\n .setting(AppSettings::TrailingVarArg)\n\n .arg(\n\n Arg::with_name(options::INPUT)\n\n .long(options::INPUT)\n\n .short(options::INPUT_SHORT)\n\n .help(\"adjust standard input stream buffering\")\n\n .value_name(\"MODE\")\n\n .required_unless_one(&[options::OUTPUT, options::ERROR]),\n\n )\n\n .arg(\n\n Arg::with_name(options::OUTPUT)\n\n .long(options::OUTPUT)\n\n .short(options::OUTPUT_SHORT)\n\n .help(\"adjust standard output stream buffering\")\n\n .value_name(\"MODE\")\n", "file_path": "src/uu/stdbuf/src/stdbuf.rs", "rank": 38, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(Arg::with_name(ARG_FILES).takes_value(true).max_values(1))\n\n}\n", "file_path": "src/uu/users/src/users.rs", "rank": 39, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .after_help(LONG_HELP)\n\n .arg(\n\n Arg::with_name(options::NUMBER)\n\n .long(options::NUMBER)\n\n .help(\"pause for NUMBER seconds\")\n\n .value_name(options::NUMBER)\n\n .index(1)\n\n .multiple(true)\n\n .required(true),\n\n )\n\n}\n\n\n", "file_path": "src/uu/sleep/src/sleep.rs", "rank": 40, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .name(NAME)\n\n .version(crate_version!())\n\n .usage(SYNTAX)\n\n .about(SUMMARY)\n\n .arg(\n\n Arg::with_name(options::BYTES)\n\n .long(options::BYTES)\n\n .short(\"b\")\n\n .help(\n\n \"count using bytes rather than columns (meaning control characters \\\n\n such as newline are not treated specially)\",\n\n )\n\n .takes_value(false),\n\n )\n\n .arg(\n\n Arg::with_name(options::SPACES)\n\n .long(options::SPACES)\n\n .short(\"s\")\n", "file_path": "src/uu/fold/src/fold.rs", "rank": 41, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .name(NAME)\n\n // TrailingVarArg specifies the final positional argument is a VarArg\n\n // and it doesn't attempts the parse any further args.\n\n // Final argument must have multiple(true) or the usage string equivalent.\n\n .setting(clap::AppSettings::TrailingVarArg)\n\n .setting(clap::AppSettings::AllowLeadingHyphen)\n\n .version(crate_version!())\n\n .about(SUMMARY)\n\n .after_help(AFTER_HELP)\n\n .usage(USAGE)\n\n .arg(\n\n Arg::with_name(options::NO_NEWLINE)\n\n .short(\"n\")\n\n .help(\"do not output the trailing newline\")\n\n .takes_value(false)\n\n .display_order(1),\n\n )\n\n .arg(\n", "file_path": "src/uu/echo/src/echo.rs", "rank": 42, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(SUMMARY)\n\n}\n", "file_path": "src/uu/logname/src/logname.rs", "rank": 43, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(OPT_ALL)\n\n .short(\"\")\n\n .long(OPT_ALL)\n\n .help(\"print the number of cores available to the system\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_IGNORE)\n\n .short(\"\")\n\n .long(OPT_IGNORE)\n\n .takes_value(true)\n\n .help(\"ignore up to N cores\"),\n\n )\n\n}\n\n\n\n#[cfg(any(\n\n target_os = \"linux\",\n\n target_vendor = \"apple\",\n\n target_os = \"freebsd\",\n\n target_os = \"netbsd\"\n\n))]\n", "file_path": "src/uu/nproc/src/nproc.rs", "rank": 44, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .setting(AppSettings::DisableHelpFlags)\n\n .setting(AppSettings::DisableVersion)\n\n}\n\n\n", "file_path": "src/uu/test/src/test.rs", "rank": 45, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(Arg::with_name(options::ALL)\n\n .short(\"a\")\n\n .long(options::ALL)\n\n .help(\"Behave as though all of the options -mnrsv were specified.\"))\n\n .arg(Arg::with_name(options::KERNELNAME)\n\n .short(\"s\")\n\n .long(options::KERNELNAME)\n\n .alias(\"sysname\") // Obsolescent option in GNU uname\n\n .help(\"print the kernel name.\"))\n\n .arg(Arg::with_name(options::NODENAME)\n\n .short(\"n\")\n\n .long(options::NODENAME)\n\n .help(\"print the nodename (the nodename may be a name that the system is known by to a communications network).\"))\n\n .arg(Arg::with_name(options::KERNELRELEASE)\n\n .short(\"r\")\n\n .long(options::KERNELRELEASE)\n", "file_path": "src/uu/uname/src/uname.rs", "rank": 46, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::SERIAL)\n\n .long(options::SERIAL)\n\n .short(\"s\")\n\n .help(\"paste one file at a time instead of in parallel\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::DELIMITER)\n\n .long(options::DELIMITER)\n\n .short(\"d\")\n\n .help(\"reuse characters from LIST instead of TABs\")\n\n .value_name(\"LIST\")\n\n .default_value(\"\\t\")\n\n .hide_default_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(options::FILE)\n\n .value_name(\"FILE\")\n\n .multiple(true)\n\n .default_value(\"-\"),\n\n )\n\n}\n\n\n", "file_path": "src/uu/paste/src/paste.rs", "rank": 47, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::BYTES)\n\n .short(\"c\")\n\n .long(options::BYTES)\n\n .help(\"print the byte counts\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::CHAR)\n\n .short(\"m\")\n\n .long(options::CHAR)\n\n .help(\"print the character counts\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::LINES)\n\n .short(\"l\")\n\n .long(options::LINES)\n", "file_path": "src/uu/wc/src/wc.rs", "rank": 48, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n}\n", "file_path": "src/uu/whoami/src/whoami.rs", "rank": 49, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .after_help(\"If a FILE is -, it refers to a file named - .\")\n\n .arg(\n\n Arg::with_name(options::APPEND)\n\n .long(options::APPEND)\n\n .short(\"a\")\n\n .help(\"append to the given FILEs, do not overwrite\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::IGNORE_INTERRUPTS)\n\n .long(options::IGNORE_INTERRUPTS)\n\n .short(\"i\")\n\n .help(\"ignore interrupt signals (ignored on non-Unix platforms)\"),\n\n )\n\n .arg(Arg::with_name(options::FILE).multiple(true))\n\n}\n\n\n", "file_path": "src/uu/tee/src/tee.rs", "rank": 50, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::LONG_FORMAT)\n\n .short(\"l\")\n\n .requires(options::USER)\n\n .help(\"produce long format output for the specified USERs\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::OMIT_HOME_DIR)\n\n .short(\"b\")\n\n .help(\"omit the user's home directory and shell in long format\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::OMIT_PROJECT_FILE)\n\n .short(\"h\")\n\n .help(\"omit the user's project file in long format\"),\n\n )\n", "file_path": "src/uu/pinky/src/pinky.rs", "rank": 51, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(SUMMARY)\n\n .after_help(LONG_HELP)\n\n .arg(\n\n Arg::with_name(options::ALL)\n\n .short(\"a\")\n\n .long(options::ALL)\n\n .help(\"write counts for all files, not just directories\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::APPARENT_SIZE)\n\n .long(options::APPARENT_SIZE)\n\n .help(\n\n \"print apparent sizes, rather than disk usage \\\n\n although the apparent size is usually smaller, it may be larger due to holes \\\n\n in ('sparse') files, internal fragmentation, indirect blocks, and the like\"\n\n )\n\n .alias(\"app\") // The GNU test suite uses this alias\n", "file_path": "src/uu/du/src/du.rs", "rank": 52, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .setting(AppSettings::TrailingVarArg)\n\n .version(crate_version!())\n\n .arg(\n\n Arg::with_name(options::ADJUSTMENT)\n\n .short(\"n\")\n\n .long(options::ADJUSTMENT)\n\n .help(\"add N to the niceness (default is 10)\")\n\n .takes_value(true)\n\n .allow_hyphen_values(true),\n\n )\n\n .arg(Arg::with_name(options::COMMAND).multiple(true))\n\n}\n", "file_path": "src/uu/nice/src/nice.rs", "rank": 53, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::SILENT)\n\n .long(options::SILENT)\n\n .visible_alias(\"quiet\")\n\n .short(\"s\")\n\n .help(\"print nothing, only return an exit status\")\n\n .required(false),\n\n )\n\n}\n", "file_path": "src/uu/tty/src/tty.rs", "rank": 54, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(\"timeout\")\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::FOREGROUND)\n\n .long(options::FOREGROUND)\n\n .help(\"when not running timeout directly from a shell prompt, allow COMMAND to read from the TTY and get TTY signals; in this mode, children of COMMAND will not be timed out\")\n\n )\n\n .arg(\n\n Arg::with_name(options::KILL_AFTER)\n\n .short(\"k\")\n\n .takes_value(true))\n\n .arg(\n\n Arg::with_name(options::PRESERVE_STATUS)\n\n .long(options::PRESERVE_STATUS)\n\n .help(\"exit with the same status as COMMAND, even when the command times out\")\n\n )\n\n .arg(\n\n Arg::with_name(options::SIGNAL)\n", "file_path": "src/uu/timeout/src/timeout.rs", "rank": 55, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(Arg::with_name(options::TARGET_DIRECTORY)\n\n .short(\"t\")\n\n .conflicts_with(options::NO_TARGET_DIRECTORY)\n\n .long(options::TARGET_DIRECTORY)\n\n .value_name(options::TARGET_DIRECTORY)\n\n .takes_value(true)\n\n .help(\"copy all SOURCE arguments into target-directory\"))\n\n .arg(Arg::with_name(options::NO_TARGET_DIRECTORY)\n\n .short(\"T\")\n\n .long(options::NO_TARGET_DIRECTORY)\n\n .conflicts_with(options::TARGET_DIRECTORY)\n\n .help(\"Treat DEST as a regular file and not a directory\"))\n\n .arg(Arg::with_name(options::INTERACTIVE)\n\n .short(\"i\")\n\n .long(options::INTERACTIVE)\n\n .conflicts_with(options::NO_CLOBBER)\n", "file_path": "src/uu/cp/src/cp.rs", "rank": 56, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::MODE)\n\n .short(\"m\")\n\n .long(options::MODE)\n\n .help(\"set file mode (not implemented on windows)\")\n\n .default_value(\"755\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::PARENTS)\n\n .short(\"p\")\n\n .long(options::PARENTS)\n\n .alias(\"parent\")\n\n .help(\"make parent directories as needed\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::VERBOSE)\n", "file_path": "src/uu/mkdir/src/mkdir.rs", "rank": 57, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .after_help(AFTER_HELP)\n\n .arg(\n\n Arg::with_name(options::FORCE)\n\n .long(options::FORCE)\n\n .short(\"f\")\n\n .help(\"change permissions to allow writing if necessary\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::ITERATIONS)\n\n .long(options::ITERATIONS)\n\n .short(\"n\")\n\n .help(\"overwrite N times instead of the default (3)\")\n\n .value_name(\"NUMBER\")\n\n .default_value(\"3\"),\n\n )\n\n .arg(\n", "file_path": "src/uu/shred/src/shred.rs", "rank": 58, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .name(NAME)\n\n .version(crate_version!())\n\n .usage(USAGE)\n\n .about(SUMMARY)\n\n .arg(\n\n Arg::with_name(options::MODE)\n\n .short(\"m\")\n\n .long(options::MODE)\n\n .help(\"file permissions for the fifo\")\n\n .default_value(\"0666\")\n\n .value_name(\"0666\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::SE_LINUX_SECURITY_CONTEXT)\n\n .short(options::SE_LINUX_SECURITY_CONTEXT)\n\n .help(\"set the SELinux security context to default type\"),\n\n )\n\n .arg(\n", "file_path": "src/uu/mkfifo/src/mkfifo.rs", "rank": 59, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .name(NAME)\n\n .version(crate_version!())\n\n .usage(BRIEF)\n\n .arg(Arg::with_name(options::FILE).hidden(true).multiple(true))\n\n .arg(\n\n Arg::with_name(options::AUTO_REFERENCE)\n\n .short(\"A\")\n\n .long(options::AUTO_REFERENCE)\n\n .help(\"output automatically generated references\")\n\n .takes_value(false),\n\n )\n\n .arg(\n\n Arg::with_name(options::TRADITIONAL)\n\n .short(\"G\")\n\n .long(options::TRADITIONAL)\n\n .help(\"behave more like System V 'ptx'\"),\n\n )\n\n .arg(\n", "file_path": "src/uu/ptx/src/ptx.rs", "rank": 60, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::POSIX)\n\n .short(\"p\")\n\n .help(\"check for most POSIX systems\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::POSIX_SPECIAL)\n\n .short(\"P\")\n\n .help(r#\"check for empty names and leading \"-\"\"#),\n\n )\n\n .arg(\n\n Arg::with_name(options::PORTABILITY)\n\n .long(options::PORTABILITY)\n\n .help(\"check for all POSIX systems (equivalent to -p -P)\"),\n\n )\n\n .arg(Arg::with_name(options::PATH).hidden(true).multiple(true))\n\n}\n\n\n", "file_path": "src/uu/pathchk/src/pathchk.rs", "rank": 61, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .after_help(LONG_HELP)\n\n .arg(\n\n Arg::with_name(options::CMD)\n\n .hidden(true)\n\n .required(true)\n\n .multiple(true),\n\n )\n\n .setting(AppSettings::TrailingVarArg)\n\n}\n\n\n", "file_path": "src/uu/nohup/src/nohup.rs", "rank": 62, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::ALL_REPEATED)\n\n .short(\"D\")\n\n .long(options::ALL_REPEATED)\n\n .possible_values(&[\n\n Delimiters::None.as_ref(), Delimiters::Prepend.as_ref(), Delimiters::Separate.as_ref()\n\n ])\n\n .help(\"print all duplicate lines. Delimiting is done with blank lines. [default: none]\")\n\n .value_name(\"delimit-method\")\n\n .min_values(0)\n\n .max_values(1),\n\n )\n\n .arg(\n\n Arg::with_name(options::GROUP)\n\n .long(options::GROUP)\n\n .possible_values(&[\n", "file_path": "src/uu/uniq/src/uniq.rs", "rank": 63, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .usage(USAGE)\n\n .arg(\n\n Arg::with_name(options::BYTES_NAME)\n\n .short(\"c\")\n\n .long(\"bytes\")\n\n .value_name(\"[-]NUM\")\n\n .takes_value(true)\n\n .help(\n\n \"\\\n\n print the first NUM bytes of each file;\\n\\\n\n with the leading '-', print all but the last\\n\\\n\n NUM bytes of each file\\\n\n \",\n\n )\n\n .overrides_with_all(&[options::BYTES_NAME, options::LINES_NAME])\n\n .allow_hyphen_values(true),\n", "file_path": "src/uu/head/src/head.rs", "rank": 64, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(backup_control::arguments::backup())\n\n .arg(backup_control::arguments::backup_no_args())\n\n // TODO: opts.arg(\n\n // Arg::with_name((\"d\", \"directory\", \"allow users with appropriate privileges to attempt \\\n\n // to make hard links to directories\");\n\n .arg(\n\n Arg::with_name(options::FORCE)\n\n .short(\"f\")\n\n .long(options::FORCE)\n\n .help(\"remove existing destination files\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::INTERACTIVE)\n\n .short(\"i\")\n\n .long(options::INTERACTIVE)\n\n .help(\"prompt whether to remove existing destination files\"),\n", "file_path": "src/uu/ln/src/ln.rs", "rank": 65, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .arg(Arg::with_name(VERSION).long(VERSION))\n\n .arg(Arg::with_name(HELP).long(HELP))\n\n}\n\n\n", "file_path": "src/uu/expr/src/expr.rs", "rank": 66, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(\n\n \"By default, ls will list the files and contents of any directories on \\\n\n the command line, expect that it will ignore files and directories \\\n\n whose names start with '.'.\",\n\n )\n\n // Format arguments\n\n .arg(\n\n Arg::with_name(options::FORMAT)\n\n .long(options::FORMAT)\n\n .help(\"Set the display format.\")\n\n .takes_value(true)\n\n .possible_values(&[\n\n \"long\",\n\n \"verbose\",\n\n \"single-column\",\n\n \"columns\",\n\n \"vertical\",\n", "file_path": "src/uu/ls/src/ls.rs", "rank": 67, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n app_from_crate!().arg(Arg::with_name(\"STRING\").index(1).multiple(true))\n\n}\n\n\n", "file_path": "src/uu/yes/src/yes.rs", "rank": 68, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(VERSION)\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::verbosity::CHANGES)\n\n .short(\"c\")\n\n .long(options::verbosity::CHANGES)\n\n .help(\"like verbose but report only when a change is made\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::verbosity::SILENT)\n\n .short(\"f\")\n\n .long(options::verbosity::SILENT),\n\n )\n\n .arg(\n\n Arg::with_name(options::verbosity::QUIET)\n\n .long(options::verbosity::QUIET)\n\n .help(\"suppress most error messages\"),\n\n )\n", "file_path": "src/uu/chgrp/src/chgrp.rs", "rank": 69, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .after_help(SUMMARY)\n\n}\n", "file_path": "src/uu/arch/src/arch.rs", "rank": 70, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::LIST)\n\n .short(\"l\")\n\n .long(options::LIST)\n\n .help(\"Lists signals\")\n\n .conflicts_with(options::TABLE)\n\n .conflicts_with(options::TABLE_OLD),\n\n )\n\n .arg(\n\n Arg::with_name(options::TABLE)\n\n .short(\"t\")\n\n .long(options::TABLE)\n\n .help(\"Lists table of signals\"),\n\n )\n\n .arg(Arg::with_name(options::TABLE_OLD).short(\"L\").hidden(true))\n\n .arg(\n", "file_path": "src/uu/kill/src/kill.rs", "rank": 71, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(\"Create output files containing consecutive or interleaved sections of input\")\n\n // strategy (mutually exclusive)\n\n .arg(\n\n Arg::with_name(OPT_BYTES)\n\n .short(\"b\")\n\n .long(OPT_BYTES)\n\n .takes_value(true)\n\n .default_value(\"2\")\n\n .help(\"use suffixes of length N (default 2)\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_LINE_BYTES)\n\n .short(\"C\")\n\n .long(OPT_LINE_BYTES)\n\n .takes_value(true)\n\n .default_value(\"2\")\n\n .help(\"put at most SIZE bytes of lines per output file\"),\n", "file_path": "src/uu/split/src/split.rs", "rank": 72, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n}\n", "file_path": "src/uu/true/src/true.rs", "rank": 73, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .after_help(LONG_HELP)\n\n .arg(\n\n Arg::with_name(options::INITIAL)\n\n .long(options::INITIAL)\n\n .short(\"i\")\n\n .help(\"do not convert tabs after non blanks\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::TABS)\n\n .long(options::TABS)\n\n .short(\"t\")\n\n .value_name(\"N, LIST\")\n\n .takes_value(true)\n\n .help(\"have tabs N characters apart, not 8 or use comma separated list of explicit tab positions\"),\n\n )\n\n .arg(\n", "file_path": "src/uu/expand/src/expand.rs", "rank": 74, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(SUMMARY)\n\n .arg(\n\n Arg::with_name(options::MULTIPLE)\n\n .short(\"a\")\n\n .long(options::MULTIPLE)\n\n .help(\"support multiple arguments and treat each as a NAME\"),\n\n )\n\n .arg(Arg::with_name(options::NAME).multiple(true).hidden(true))\n\n .arg(\n\n Arg::with_name(options::SUFFIX)\n\n .short(\"s\")\n\n .long(options::SUFFIX)\n\n .value_name(\"SUFFIX\")\n\n .help(\"remove a trailing SUFFIX; implies -a\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::ZERO)\n\n .short(\"z\")\n\n .long(options::ZERO)\n\n .help(\"end each output line with NUL, not newline\"),\n\n )\n\n}\n\n\n", "file_path": "src/uu/basename/src/basename.rs", "rank": 75, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(OPT_IGNORE_FAIL_NON_EMPTY)\n\n .long(OPT_IGNORE_FAIL_NON_EMPTY)\n\n .help(\"ignore each failure that is solely because a directory is non-empty\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_PARENTS)\n\n .short(\"p\")\n\n .long(OPT_PARENTS)\n\n .help(\n\n \"remove DIRECTORY and its ancestors; e.g.,\n\n 'rmdir -p a/b/c' is similar to rmdir a/b/c a/b a\",\n\n ),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_VERBOSE)\n", "file_path": "src/uu/rmdir/src/rmdir.rs", "rank": 76, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::OPT_AUDIT)\n\n .short(\"A\")\n\n .conflicts_with_all(&[\n\n options::OPT_GROUP,\n\n options::OPT_EFFECTIVE_USER,\n\n options::OPT_HUMAN_READABLE,\n\n options::OPT_PASSWORD,\n\n options::OPT_GROUPS,\n\n options::OPT_ZERO,\n\n ])\n\n .help(\n\n \"Display the process audit user ID and other process audit properties,\\n\\\n\n which requires privilege (not available on Linux).\",\n\n ),\n\n )\n", "file_path": "src/uu/id/src/id.rs", "rank": 77, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(SUMMARY)\n\n .after_help(LONG_HELP)\n\n .arg(\n\n Arg::with_name(options::BOURNE_SHELL)\n\n .long(\"sh\")\n\n .short(\"b\")\n\n .visible_alias(\"bourne-shell\")\n\n .help(\"output Bourne shell code to set LS_COLORS\")\n\n .display_order(1),\n\n )\n\n .arg(\n\n Arg::with_name(options::C_SHELL)\n\n .long(\"csh\")\n\n .short(\"c\")\n\n .visible_alias(\"c-shell\")\n\n .help(\"output C shell code to set LS_COLORS\")\n\n .display_order(2),\n", "file_path": "src/uu/dircolors/src/dircolors.rs", "rank": 78, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(OPT_QUIET)\n\n .short(\"q\")\n\n .long(OPT_QUIET)\n\n .help(\"Do not print warnings for invalid paths\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_STRIP)\n\n .short(\"s\")\n\n .long(OPT_STRIP)\n\n .help(\"Only strip '.' and '..' components, but don't resolve symbolic links\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_ZERO)\n\n .short(\"z\")\n\n .long(OPT_ZERO)\n", "file_path": "src/uu/realpath/src/realpath.rs", "rank": 79, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(OPT_ALL)\n\n .short(\"a\")\n\n .long(\"all\")\n\n .help(\"include dummy file systems\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_BLOCKSIZE)\n\n .short(\"B\")\n\n .long(\"block-size\")\n\n .takes_value(true)\n\n .help(\n\n \"scale sizes by SIZE before printing them; e.g.\\\n\n '-BM' prints sizes in units of 1,048,576 bytes\",\n\n ),\n\n )\n", "file_path": "src/uu/df/src/df.rs", "rank": 80, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .name(NAME)\n\n .version(crate_version!())\n\n .usage(USAGE)\n\n .about(SUMMARY)\n\n .arg(Arg::with_name(options::FILE).hidden(true).multiple(true))\n\n .arg(\n\n Arg::with_name(options::ALL)\n\n .short(\"a\")\n\n .long(options::ALL)\n\n .help(\"convert all blanks, instead of just initial blanks\")\n\n .takes_value(false),\n\n )\n\n .arg(\n\n Arg::with_name(options::FIRST_ONLY)\n\n .long(options::FIRST_ONLY)\n\n .help(\"convert only leading sequences of blanks (overrides -a)\")\n\n .takes_value(false),\n\n )\n", "file_path": "src/uu/unexpand/src/unexpand.rs", "rank": 81, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(OPT_CANONICALIZE)\n\n .short(\"f\")\n\n .long(OPT_CANONICALIZE)\n\n .help(\n\n \"canonicalize by following every symlink in every component of the \\\n\n given name recursively; all but the last component must exist\",\n\n ),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_CANONICALIZE_EXISTING)\n\n .short(\"e\")\n\n .long(\"canonicalize-existing\")\n\n .help(\n\n \"canonicalize by following every symlink in every component of the \\\n\n given name recursively, all components must exist\",\n", "file_path": "src/uu/readlink/src/readlink.rs", "rank": 82, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(OPT_DATE)\n\n .short(\"d\")\n\n .long(OPT_DATE)\n\n .takes_value(true)\n\n .help(\"display time described by STRING, not 'now'\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_FILE)\n\n .short(\"f\")\n\n .long(OPT_FILE)\n\n .takes_value(true)\n\n .help(\"like --date; once for each line of DATEFILE\"),\n\n )\n\n .arg(\n\n Arg::with_name(OPT_ISO_8601)\n", "file_path": "src/uu/date/src/date.rs", "rank": 83, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(SUMMARY)\n\n .arg(\n\n Arg::with_name(options::SUFFIX_FORMAT)\n\n .short(\"b\")\n\n .long(options::SUFFIX_FORMAT)\n\n .value_name(\"FORMAT\")\n\n .help(\"use sprintf FORMAT instead of %02d\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::PREFIX)\n\n .short(\"f\")\n\n .long(options::PREFIX)\n\n .value_name(\"PREFIX\")\n\n .help(\"use PREFIX instead of 'xx'\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::KEEP_FILES)\n", "file_path": "src/uu/csplit/src/csplit.rs", "rank": 84, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .arg(Arg::with_name(VERSION).long(VERSION))\n\n .arg(Arg::with_name(HELP).long(HELP))\n\n}\n", "file_path": "src/uu/printf/src/printf.rs", "rank": 85, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::FILE_SYSTEM)\n\n .short(\"f\")\n\n .long(options::FILE_SYSTEM)\n\n .conflicts_with(options::DATA)\n\n .help(\"sync the file systems that contain the files (Linux and Windows only)\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::DATA)\n\n .short(\"d\")\n\n .long(options::DATA)\n\n .conflicts_with(options::FILE_SYSTEM)\n\n .help(\"sync only file data, no unneeded metadata (Linux only)\"),\n\n )\n\n .arg(Arg::with_name(ARG_FILES).multiple(true).takes_value(true))\n\n}\n\n\n", "file_path": "src/uu/sync/src/sync.rs", "rank": 86, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::DIR)\n\n .short(\"d\")\n\n .takes_value(true)\n\n .help(\"If any of FROM and TO is not subpath of DIR, output absolute path instead of relative\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::TO)\n\n .required(true)\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(options::FROM)\n\n .takes_value(true),\n\n )\n\n}\n", "file_path": "src/uu/relpath/src/relpath.rs", "rank": 87, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .after_help(LONG_HELP)\n\n .setting(AppSettings::AllowNegativeNumbers)\n\n .arg(\n\n Arg::with_name(options::DELIMITER)\n\n .short(\"d\")\n\n .long(options::DELIMITER)\n\n .value_name(\"X\")\n\n .help(\"use X instead of whitespace for field delimiter\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::FIELD)\n\n .long(options::FIELD)\n\n .help(\"replace the numbers in these input fields (default=1) see FIELDS below\")\n\n .value_name(\"FIELDS\")\n\n .default_value(options::FIELD_DEFAULT),\n\n )\n", "file_path": "src/uu/numfmt/src/numfmt.rs", "rank": 88, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::FILES)\n\n .hidden(true)\n\n .required(true)\n\n .min_values(2)\n\n .max_values(2)\n\n .takes_value(true),\n\n )\n\n}\n", "file_path": "src/uu/link/src/link.rs", "rank": 89, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .name(NAME)\n\n .version(crate_version!())\n\n .usage(USAGE)\n\n .about(SUMMARY)\n\n .arg(\n\n Arg::with_name(options::BEFORE)\n\n .short(\"b\")\n\n .long(options::BEFORE)\n\n .help(\"attach the separator before instead of after\")\n\n .takes_value(false),\n\n )\n\n .arg(\n\n Arg::with_name(options::REGEX)\n\n .short(\"r\")\n\n .long(options::REGEX)\n\n .help(\"interpret the sequence as a regular expression\")\n\n .takes_value(false),\n\n )\n\n .arg(\n\n Arg::with_name(options::SEPARATOR)\n\n .short(\"s\")\n\n .long(options::SEPARATOR)\n\n .help(\"use STRING as the separator instead of newline\")\n\n .takes_value(true),\n\n )\n\n .arg(Arg::with_name(options::FILE).hidden(true).multiple(true))\n\n}\n\n\n", "file_path": "src/uu/tac/src/tac.rs", "rank": 90, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::SINCE)\n\n .short(\"s\")\n\n .long(options::SINCE)\n\n .help(\"system up since\"),\n\n )\n\n}\n\n\n", "file_path": "src/uu/uptime/src/uptime.rs", "rank": 91, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .name(NAME)\n\n .version(crate_version!())\n\n .usage(USAGE)\n\n .about(SUMMARY)\n\n .arg(Arg::with_name(options::FILE).multiple(true).hidden(true))\n\n .arg(\n\n Arg::with_name(options::BSD_COMPATIBLE)\n\n .short(options::BSD_COMPATIBLE)\n\n .help(\"use the BSD sum algorithm, use 1K blocks (default)\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::SYSTEM_V_COMPATIBLE)\n\n .short(\"s\")\n\n .long(options::SYSTEM_V_COMPATIBLE)\n\n .help(\"use System V sum algorithm, use 512 bytes blocks\"),\n\n )\n\n}\n", "file_path": "src/uu/sum/src/sum.rs", "rank": 92, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n}\n", "file_path": "src/uu/false/src/false.rs", "rank": 93, "score": 207741.96723386948 }, { "content": "pub fn uu_app() -> App<'static, 'static> {\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n Arg::with_name(options::USERS)\n\n .multiple(true)\n\n .takes_value(true)\n\n .value_name(options::USERS),\n\n )\n\n}\n", "file_path": "src/uu/groups/src/groups.rs", "rank": 94, "score": 207741.96723386948 }, { "content": "pub fn uu_app_custom() -> App<'static, 'static> {\n\n let mut app = uu_app_common();\n\n let algorithms = &[\n\n (\"md5\", \"work with MD5\"),\n\n (\"sha1\", \"work with SHA1\"),\n\n (\"sha224\", \"work with SHA224\"),\n\n (\"sha256\", \"work with SHA256\"),\n\n (\"sha384\", \"work with SHA384\"),\n\n (\"sha512\", \"work with SHA512\"),\n\n (\"sha3\", \"work with SHA3\"),\n\n (\"sha3-224\", \"work with SHA3-224\"),\n\n (\"sha3-256\", \"work with SHA3-256\"),\n\n (\"sha3-384\", \"work with SHA3-384\"),\n\n (\"sha3-512\", \"work with SHA3-512\"),\n\n (\n\n \"shake128\",\n\n \"work with SHAKE128 using BITS for the output size\",\n\n ),\n\n (\n\n \"shake256\",\n", "file_path": "src/uu/hashsum/src/hashsum.rs", "rank": 95, "score": 205870.50673267248 }, { "content": "pub fn uu_app_common() -> App<'static, 'static> {\n\n #[cfg(windows)]\n\n const BINARY_HELP: &str = \"read in binary mode (default)\";\n\n #[cfg(not(windows))]\n\n const BINARY_HELP: &str = \"read in binary mode\";\n\n #[cfg(windows)]\n\n const TEXT_HELP: &str = \"read in text mode\";\n\n #[cfg(not(windows))]\n\n const TEXT_HELP: &str = \"read in text mode (default)\";\n\n App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(\"Compute and check message digests.\")\n\n .arg(\n\n Arg::with_name(\"binary\")\n\n .short(\"b\")\n\n .long(\"binary\")\n\n .help(BINARY_HELP),\n\n )\n\n .arg(\n\n Arg::with_name(\"check\")\n", "file_path": "src/uu/hashsum/src/hashsum.rs", "rank": 96, "score": 205870.50673267248 }, { "content": "pub fn uu_app() -> clap::App<'static, 'static> {\n\n // TODO: migrate to clap to get more shell completions\n\n clap::App::new(uucore::util_name())\n\n}\n\n\n", "file_path": "src/uu/pr/src/pr.rs", "rank": 97, "score": 202046.33261077362 }, { "content": "pub fn uu_app() -> clap::App<'static, 'static> {\n\n clap::App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .usage(USAGE)\n\n .after_help(LONG_HELP)\n\n .arg(\n\n Arg::with_name(options::ADDRESS_RADIX)\n\n .short(\"A\")\n\n .long(options::ADDRESS_RADIX)\n\n .help(\"Select the base in which file offsets are printed.\")\n\n .value_name(\"RADIX\"),\n\n )\n\n .arg(\n\n Arg::with_name(options::SKIP_BYTES)\n\n .short(\"j\")\n\n .long(options::SKIP_BYTES)\n\n .help(\"Skip bytes input bytes before formatting and writing.\")\n\n .value_name(\"BYTES\"),\n\n )\n", "file_path": "src/uu/od/src/od.rs", "rank": 98, "score": 202046.33261077362 }, { "content": "pub fn uu_app() -> clap::App<'static, 'static> {\n\n clap::App::new(uucore::util_name())\n\n .version(crate_version!())\n\n .about(ABOUT)\n\n .arg(\n\n clap::Arg::with_name(options::INFILE)\n\n .long(options::INFILE)\n\n .takes_value(true)\n\n .require_equals(true)\n\n .value_name(\"FILE\")\n\n .help(\"(alternatively if=FILE) specifies the file used for input. When not specified, stdin is used instead\")\n\n )\n\n .arg(\n\n clap::Arg::with_name(options::OUTFILE)\n\n .long(options::OUTFILE)\n\n .takes_value(true)\n\n .require_equals(true)\n\n .value_name(\"FILE\")\n\n .help(\"(alternatively of=FILE) specifies the file used for output. When not specified, stdout is used instead\")\n\n )\n", "file_path": "src/uu/dd/src/dd.rs", "rank": 99, "score": 202046.33261077362 } ]
Rust
src/io.rs
finalfusion/finalfusion-utils
d37a5d7f17515bcd0d6ba56a2e4dcec5914bf07a
use std::convert::TryFrom; use std::fmt; use std::fs::File; use std::io::{BufReader, BufWriter}; use anyhow::{anyhow, bail, Context, Error, Result}; use finalfusion::compat::floret::ReadFloretText; use finalfusion::compat::text::{WriteText, WriteTextDims}; use finalfusion::compat::word2vec::WriteWord2Vec; use finalfusion::io::WriteEmbeddings; use finalfusion::prelude::*; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum EmbeddingFormat { FastText, FinalFusion, FinalFusionMmap, Floret, Word2Vec, Text, TextDims, } impl TryFrom<&str> for EmbeddingFormat { type Error = Error; fn try_from(format: &str) -> Result<Self> { use self::EmbeddingFormat::*; match format { "fasttext" => Ok(FastText), "finalfusion" => Ok(FinalFusion), "finalfusion_mmap" => Ok(FinalFusionMmap), "floret" => Ok(Floret), "word2vec" => Ok(Word2Vec), "text" => Ok(Text), "textdims" => Ok(TextDims), unknown => Err(anyhow!("Unknown embedding format: {}", unknown)), } } } impl fmt::Display for EmbeddingFormat { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use EmbeddingFormat::*; let s = match self { FastText => "fasttext", FinalFusion => "finalfusion", FinalFusionMmap => "finalfusion_mmap", Floret => "floret", Word2Vec => "word2vec", Text => "text", TextDims => "textdims", }; f.write_str(s) } } pub fn read_embeddings( filename: &str, embedding_format: EmbeddingFormat, ) -> Result<Embeddings<VocabWrap, StorageWrap>> { let f = File::open(filename).context("Cannot open embeddings file")?; let mut reader = BufReader::new(f); use self::EmbeddingFormat::*; let embeds = match embedding_format { FastText => ReadFastText::read_fasttext(&mut reader).map(Embeddings::into), FinalFusion => ReadEmbeddings::read_embeddings(&mut reader), FinalFusionMmap => MmapEmbeddings::mmap_embeddings(&mut reader), Floret => ReadFloretText::read_floret_text(&mut reader).map(Embeddings::into), Word2Vec => ReadWord2Vec::read_word2vec_binary(&mut reader).map(Embeddings::into), Text => ReadText::read_text(&mut reader).map(Embeddings::into), TextDims => ReadTextDims::read_text_dims(&mut reader).map(Embeddings::into), }; Ok(embeds?) } pub fn read_embeddings_view( filename: &str, embedding_format: EmbeddingFormat, ) -> Result<Embeddings<VocabWrap, StorageViewWrap>> { let f = File::open(filename).context("Cannot open embeddings file")?; let mut reader = BufReader::new(f); use self::EmbeddingFormat::*; let embeds = match embedding_format { FastText => ReadFastText::read_fasttext(&mut reader).map(Embeddings::into), FinalFusion => ReadEmbeddings::read_embeddings(&mut reader), FinalFusionMmap => MmapEmbeddings::mmap_embeddings(&mut reader), Floret => ReadFloretText::read_floret_text(&mut reader).map(Embeddings::into), Word2Vec => ReadWord2Vec::read_word2vec_binary(&mut reader).map(Embeddings::into), Text => ReadText::read_text(&mut reader).map(Embeddings::into), TextDims => ReadTextDims::read_text_dims(&mut reader).map(Embeddings::into), }; Ok(embeds?) } pub fn write_embeddings( embeddings: &Embeddings<VocabWrap, StorageWrap>, filename: &str, format: EmbeddingFormat, unnormalize: bool, ) -> Result<()> { let f = File::create(filename).context(format!("Cannot create embeddings file: {}", filename))?; let mut writer = BufWriter::new(f); use self::EmbeddingFormat::*; match format { FastText => bail!("Writing to the fastText format is not supported"), FinalFusion => embeddings.write_embeddings(&mut writer)?, FinalFusionMmap => bail!("Writing to memory-mapped finalfusion file is not supported"), Floret => bail!("Writing to the floret format is not supported"), Word2Vec => embeddings.write_word2vec_binary(&mut writer, unnormalize)?, Text => embeddings.write_text(&mut writer, unnormalize)?, TextDims => embeddings.write_text_dims(&mut writer, unnormalize)?, }; Ok(()) }
use std::convert::TryFrom; use std::fmt; use std::fs::File; use std::io::{BufReader, BufWriter}; use anyhow::{anyhow, bail, Context, Error, Result}; use finalfusion::compat::floret::ReadFloretText; use finalfusion::compat::text::{WriteText, WriteTextDims}; use finalfusion::compat::word2vec::WriteWord2Vec; use finalfusion::io::WriteEmbeddings; use finalfusion::prelude::*; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum EmbeddingFormat { FastText, FinalFusion, FinalFusionMmap, Floret, Word2Vec, Text, TextDims, } impl TryFrom<&str> for EmbeddingFormat { type Error = Error; fn try_from(format: &str) -> Result<Self> { use self::EmbeddingFormat::*; match
} impl fmt::Display for EmbeddingFormat { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use EmbeddingFormat::*; let s = match self { FastText => "fasttext", FinalFusion => "finalfusion", FinalFusionMmap => "finalfusion_mmap", Floret => "floret", Word2Vec => "word2vec", Text => "text", TextDims => "textdims", }; f.write_str(s) } } pub fn read_embeddings( filename: &str, embedding_format: EmbeddingFormat, ) -> Result<Embeddings<VocabWrap, StorageWrap>> { let f = File::open(filename).context("Cannot open embeddings file")?; let mut reader = BufReader::new(f); use self::EmbeddingFormat::*; let embeds = match embedding_format { FastText => ReadFastText::read_fasttext(&mut reader).map(Embeddings::into), FinalFusion => ReadEmbeddings::read_embeddings(&mut reader), FinalFusionMmap => MmapEmbeddings::mmap_embeddings(&mut reader), Floret => ReadFloretText::read_floret_text(&mut reader).map(Embeddings::into), Word2Vec => ReadWord2Vec::read_word2vec_binary(&mut reader).map(Embeddings::into), Text => ReadText::read_text(&mut reader).map(Embeddings::into), TextDims => ReadTextDims::read_text_dims(&mut reader).map(Embeddings::into), }; Ok(embeds?) } pub fn read_embeddings_view( filename: &str, embedding_format: EmbeddingFormat, ) -> Result<Embeddings<VocabWrap, StorageViewWrap>> { let f = File::open(filename).context("Cannot open embeddings file")?; let mut reader = BufReader::new(f); use self::EmbeddingFormat::*; let embeds = match embedding_format { FastText => ReadFastText::read_fasttext(&mut reader).map(Embeddings::into), FinalFusion => ReadEmbeddings::read_embeddings(&mut reader), FinalFusionMmap => MmapEmbeddings::mmap_embeddings(&mut reader), Floret => ReadFloretText::read_floret_text(&mut reader).map(Embeddings::into), Word2Vec => ReadWord2Vec::read_word2vec_binary(&mut reader).map(Embeddings::into), Text => ReadText::read_text(&mut reader).map(Embeddings::into), TextDims => ReadTextDims::read_text_dims(&mut reader).map(Embeddings::into), }; Ok(embeds?) } pub fn write_embeddings( embeddings: &Embeddings<VocabWrap, StorageWrap>, filename: &str, format: EmbeddingFormat, unnormalize: bool, ) -> Result<()> { let f = File::create(filename).context(format!("Cannot create embeddings file: {}", filename))?; let mut writer = BufWriter::new(f); use self::EmbeddingFormat::*; match format { FastText => bail!("Writing to the fastText format is not supported"), FinalFusion => embeddings.write_embeddings(&mut writer)?, FinalFusionMmap => bail!("Writing to memory-mapped finalfusion file is not supported"), Floret => bail!("Writing to the floret format is not supported"), Word2Vec => embeddings.write_word2vec_binary(&mut writer, unnormalize)?, Text => embeddings.write_text(&mut writer, unnormalize)?, TextDims => embeddings.write_text_dims(&mut writer, unnormalize)?, }; Ok(()) }
format { "fasttext" => Ok(FastText), "finalfusion" => Ok(FinalFusion), "finalfusion_mmap" => Ok(FinalFusionMmap), "floret" => Ok(Floret), "word2vec" => Ok(Word2Vec), "text" => Ok(Text), "textdims" => Ok(TextDims), unknown => Err(anyhow!("Unknown embedding format: {}", unknown)), } }
function_block-function_prefixed
[ { "content": "fn read_metadata(filename: impl AsRef<str>) -> Result<Value> {\n\n let f = File::open(filename.as_ref())\n\n .context(format!(\"Cannot open metadata file: {}\", filename.as_ref()))?;\n\n let mut reader = BufReader::new(f);\n\n let mut buf = String::new();\n\n reader\n\n .read_to_string(&mut buf)\n\n .context(format!(\"Cannot read metadata from {}\", filename.as_ref()))?;\n\n buf.parse::<Value>().context(format!(\n\n \"Cannot parse metadata TOML from {}\",\n\n filename.as_ref()\n\n ))\n\n}\n\n\n", "file_path": "src/convert.rs", "rank": 0, "score": 118968.57206423352 }, { "content": "fn read_metadata(filename: &str) -> Result<Option<Metadata>> {\n\n let f = File::open(filename).context(format!(\"Cannot open embeddings file: {}\", filename))?;\n\n let mut reader = BufReader::new(f);\n\n ReadMetadata::read_metadata(&mut reader)\n\n .context(format!(\"Cannot read metadata from {}\", filename))\n\n}\n", "file_path": "src/metadata.rs", "rank": 1, "score": 91848.9754541432 }, { "content": "fn read_analogies(reader: impl BufRead) -> Result<Vec<Instance>> {\n\n let mut section = String::new();\n\n\n\n let mut instances = Vec::new();\n\n\n\n for line in reader.lines() {\n\n let line = line.context(\"Cannot read line\")?;\n\n\n\n if line.starts_with(\": \") {\n\n section = line.chars().skip(2).collect::<String>();\n\n continue;\n\n }\n\n\n\n let quadruple: Vec<_> = line.split_whitespace().collect();\n\n\n\n instances.push(Instance {\n\n section: section.clone(),\n\n query: (\n\n quadruple[0].to_owned(),\n\n quadruple[1].to_owned(),\n\n quadruple[2].to_owned(),\n\n ),\n\n answer: quadruple[3].to_owned(),\n\n });\n\n }\n\n\n\n Ok(instances)\n\n}\n\n\n", "file_path": "src/compute_accuracy.rs", "rank": 2, "score": 83695.2276149356 }, { "content": "fn main() -> Result<()> {\n\n // Known subapplications.\n\n let apps = vec![\n\n analogy::AnalogyApp::app(),\n\n bucket_to_explicit::BucketToExplicitApp::app(),\n\n compute_accuracy::ComputeAccuracyApp::app(),\n\n convert::ConvertApp::app(),\n\n metadata::MetadataApp::app(),\n\n quantize::QuantizeApp::app(),\n\n reconstruct::ReconstructApp::app(),\n\n select::SelectApp::app(),\n\n similar::SimilarApp::app(),\n\n ];\n\n\n\n let cli = App::new(\"finalfusion\")\n\n .settings(DEFAULT_CLAP_SETTINGS)\n\n .subcommands(apps)\n\n .subcommand(\n\n SubCommand::with_name(\"completions\")\n\n .about(\"Generate completion scripts for your shell\")\n", "file_path": "src/main.rs", "rank": 3, "score": 76060.63366314303 }, { "content": "pub trait FinalfusionApp\n\nwhere\n\n Self: Sized,\n\n{\n\n fn app() -> App<'static, 'static>;\n\n\n\n fn parse(matches: &ArgMatches) -> Result<Self>;\n\n\n\n fn run(&self) -> Result<()>;\n\n}\n", "file_path": "src/traits.rs", "rank": 7, "score": 63579.817595683955 }, { "content": "fn print_missing_tokens(tokens: &[&str], successful: &[bool]) {\n\n assert_eq!(tokens.len(), successful.len());\n\n\n\n let missing = tokens\n\n .iter()\n\n .zip(successful)\n\n .filter_map(|(&token, &success)| if !success { Some(token) } else { None })\n\n .collect::<Vec<_>>();\n\n\n\n eprintln!(\"Could not compute embedding(s) for: {}\", missing.join(\", \"));\n\n}\n", "file_path": "src/analogy.rs", "rank": 8, "score": 60749.68715113388 }, { "content": "fn copy_select_embeddings(\n\n embeddings: &Embeddings<VocabWrap, StorageWrap>,\n\n select: HashSet<String>,\n\n) -> Result<Embeddings<VocabWrap, StorageWrap>> {\n\n let mut selected_vocab = Vec::new();\n\n let mut selected_storage = Array2::zeros((select.len(), embeddings.dims()));\n\n let mut selected_norms = Array1::zeros((select.len(),));\n\n\n\n for (idx, word) in select.into_iter().enumerate() {\n\n match embeddings.embedding_with_norm(&word) {\n\n Some(embed_with_norm) => {\n\n selected_storage\n\n .row_mut(idx)\n\n .assign(&embed_with_norm.embedding);\n\n selected_norms[idx] = embed_with_norm.norm;\n\n }\n\n None => bail!(\"Cannot get embedding for: {}\", word),\n\n }\n\n\n\n selected_vocab.push(word);\n", "file_path": "src/select.rs", "rank": 9, "score": 57502.82730384942 }, { "content": "pub fn l2_normalize(mut v: ArrayViewMut1<f32>) -> f32 {\n\n let norm = v.dot(&v).sqrt();\n\n\n\n if norm != 0. {\n\n v /= norm;\n\n }\n\n\n\n norm\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 10, "score": 54455.20698791439 }, { "content": "pub fn l2_normalize_array(mut v: ArrayViewMut2<f32>) -> Array1<f32> {\n\n let mut norms = Vec::with_capacity(v.nrows());\n\n for embedding in v.outer_iter_mut() {\n\n norms.push(l2_normalize(embedding));\n\n }\n\n\n\n norms.into()\n\n}\n\n\n\n#[cfg(feature = \"intel-mkl-amd\")]\n\n#[allow(dead_code)]\n\n#[no_mangle]\n\nextern \"C\" fn mkl_serv_intel_cpu_true() -> c_int {\n\n 1\n\n}\n", "file_path": "src/util.rs", "rank": 11, "score": 50885.78190902692 }, { "content": "### Using `cargo`\n\n\n\n`finalfusion-utils` can be installed using an up-to-date Rust\n\ntoolchain, which can be installed with [rustup](https://rustup.rs).\n\n\n\nWith a valid Rust toolchain, the crate is most easily installed through\n\n`cargo`:\n\n\n\n~~~shell\n\n$ cargo install finalfusion-utils\n\n~~~\n\n\n\nTypically, you will want to enable support for a BLAS/LAPACK library\n\nto speed up matrix multiplication and enable optimized product\n\nquantization support. In order to do so, run\n\n\n\n~~~shell\n\n$ cargo install finalfusion-utils --features implementation\n\n~~~\n\n\n\nwhere `implementation` is one of the following:\n\n\n\n* `accelerate`: the macOS Accelerate framework.\n\n* `intel-mkl`: Intel MKL (downloaded and statically linked).\n\n* `intel-mkl-amd`: Intel MKL, preinstalled MKL libaries expected, override\n\n CPU detection for AMD CPUs.\n\n* `netlib`: any compatible system BLAS/LAPACK implementation(s).\n\n* `openblas`: system-installed OpenBLAS. This option is discouraged,\n\n unless the system OpenBLAS library is a single-threaded build with\n\n locking. Otherwise, OpenBLAS' threading interacts badly with application\n\n threads.\n\n\n\n## Building from source\n\n\n\n`finalfusion-utils` can also be built from source,\n\nafter cloning this repository execute the following\n\ncommand in the directory to find the exectuable under\n\n`target/release/finalfusion`:\n\n\n\n~~~shell\n\n$ cargo build --release\n\n~~~\n\n\n\n## Usage\n\n\n\n`finalfusion-utils` is built as a single binary, the\n\ndifferent functionality is invoked through subcommands:\n\n\n\n### Converting embeddings\n\n\n\n~~~shell\n\n# Convert embeddings in fastText format to finalfusion\n\n$ finalfusion convert -f fasttext -t finalfusion \\\n\n embeddings.bin embeddings.fifu\n\n\n\n# Convert embeddings in word2vec format to finalfusion\n\n$ finalfusion convert -f word2vec -t finalfusion \\\n\n embeddings.w2v embeddings.fifu\n\n\n\n# Print help with all supported combinations:\n\n$ finalfusion convert --help\n\n~~~\n\n\n\n### Quantizing an embedding matrix\n\n\n\n~~~shell\n\n# Quantize embeddings in finalfusion format with a\n", "file_path": "README.md", "rank": 12, "score": 41643.2174117394 }, { "content": "[![Travis CI](https://img.shields.io/travis/finalfusion/finalfusion-utils.svg)](https://travis-ci.org/finalfusion/finalfusion-utils)\n\n\n\n# finalfusion-utils\n\n\n\n## Introduction\n\n\n\n`finalfusion-utils` is a Rust crate offering various\n\nfunctionalities to process and query embeddings.\n\n`finalfusion-utils` supports conversion between different\n\nformats, quantization of embedding matrices, similarity and\n\nanalogy queries as well as evaluation on analogy datasets.\n\n\n\n## Installation\n\n\n\n### Precompiled binaries\n\n\n\nThe following precompiled binaries can be found on the\n\n[releases page](https://github.com/finalfusion/finalfusion-utils/releases):\n\n\n\n* `x86_64-unknown-linux-gnu-mkl`: glibc Linux build, statically linked\n\n against Intel MKL. This is the recommended build for Intel (non-AMD)\n\n CPUs.\n\n* `x86_64-unknown-linux-musl`: static Linux build using the MUSL C\n\n library. This binary does not link against a BLAS/LAPACK implementation\n\n and therefore does not support optimized product quantization.\n\n* `universal-macos`: dynamic macOS build. Supports both the x86_64 and\n\n ARM64 architectures. Linked against the Accelerate framework for\n\n BLAS/LAPACK.\n\n\n\n\n", "file_path": "README.md", "rank": 13, "score": 41639.89739607488 }, { "content": "# single attempt through product quantization \n\n$ finalfusion quantize -f finalfusion -q pq -a 1 \\\n\n embeddings.pq\n\n~~~\n\n\n\n### Analogy and similarity queries\n\n\n\n~~~ shell\n\n# Get the 15 nearest neighbours of \"Tübingen\" for\n\n# embeddings in finalfusion format.\n\n$ finalfusion similar -f finalfusion -k 15 \\\n\n embeddings.fifu\n\n\n\n# Get the 5 best answers for the analogy query\n\n# \"Berlin\" is to \"Deutschland\" as \"Amsterdam\" to:\n\n$ finalfusion analogy -f finalfusion -k 5 \\\n\n Berlin Deutschland Amsterdam embeddings.fifu\n\n~~~\n\n\n\n### Evaluation on analogy datasets\n\n\n\n~~~shell\n\n# Evaluate embeddings on some analogy dataset\n\n$ finalfusion compute-accuracy embeddings.fifu \\\n\n analogies.txt\n\n~~~\n\n\n\n### Dump metadata\n\n\n\n~~~shell\n\n# Dump optionally stored metadata and store in\n\n# metadata.txt, only supported for finalfusion\n\n# format\n\n$ finalfusion metadata embeddings.fifu \\\n\n > metadata.txt\n\n~~~\n\n\n\n### Convert Bucket Vocab to Explicit Vocab\n\n~~~shell\n\n# Converts a hash-bucket based subword vocab to\n\n# one with explicitly stored n-grams.\n\n$ finalfusion bucket-to-explicit buckets.fifu \\\n\n explicit.fifu \n\n~~~\n\n\n\n### Print completion script\n\n\n\n~~~shell\n\n# Print completion script for zsh\n\n$ finalfusion completions zsh\n\n~~~\n", "file_path": "README.md", "rank": 14, "score": 41639.00245778592 }, { "content": "Copyright 2018-2021 The finalfusion-utils contributors\n\n\n\nLicensed under the [Apache License, Version\n\n2.0](http://www.apache.org/licenses/LICENSE-2.0) or the [MIT\n\nlicense](http://opensource.org/licenses/MIT), at your option.\n\n\n\nContributors:\n\n\n\nDaniël de Kok <me@danieldk.eu>\n\nSebastian Pütz <seb.puetz@gmail.com>\n\nNianheng Wu <xjtuwunianheng@gmail.com>\n", "file_path": "COPYRIGHT.md", "rank": 15, "score": 41637.56460096177 }, { "content": "fn read_embeddings(\n\n filename: &str,\n\n embedding_format: EmbeddingFormat,\n\n lossy: bool,\n\n) -> Result<Embeddings<VocabWrap, StorageWrap>> {\n\n let f = File::open(filename).context(format!(\"Cannot open embeddings file: {}\", filename))?;\n\n let mut reader = BufReader::new(f);\n\n\n\n use self::EmbeddingFormat::*;\n\n match (embedding_format, lossy) {\n\n (FastText, true) => ReadFastText::read_fasttext_lossy(&mut reader).map(Embeddings::into),\n\n (FastText, false) => ReadFastText::read_fasttext(&mut reader).map(Embeddings::into),\n\n (FinalFusion, _) => ReadEmbeddings::read_embeddings(&mut reader),\n\n (FinalFusionMmap, _) => MmapEmbeddings::mmap_embeddings(&mut reader),\n\n (Floret, _) => ReadFloretText::read_floret_text(&mut reader).map(Embeddings::into),\n\n (Word2Vec, true) => {\n\n ReadWord2Vec::read_word2vec_binary_lossy(&mut reader).map(Embeddings::into)\n\n }\n\n (Word2Vec, false) => ReadWord2Vec::read_word2vec_binary(&mut reader).map(Embeddings::into),\n\n (Text, true) => ReadText::read_text_lossy(&mut reader).map(Embeddings::into),\n\n (Text, false) => ReadText::read_text(&mut reader).map(Embeddings::into),\n\n (TextDims, true) => ReadTextDims::read_text_dims_lossy(&mut reader).map(Embeddings::into),\n\n (TextDims, false) => ReadTextDims::read_text_dims(&mut reader).map(Embeddings::into),\n\n }\n\n .context(format!(\n\n \"Cannot read {} embeddings from {}\",\n\n embedding_format, filename\n\n ))\n\n}\n", "file_path": "src/convert.rs", "rank": 16, "score": 35014.44658186379 }, { "content": "#[cfg(feature = \"opq\")]\n\nfn quantize_embeddings<V, S>(\n\n config: &QuantizeApp,\n\n embeddings: &Embeddings<V, S>,\n\n) -> Result<Embeddings<V, QuantizedArray>>\n\nwhere\n\n V: Vocab + Clone,\n\n S: StorageView,\n\n{\n\n let n_subquantizers = config\n\n .n_subquantizers\n\n .unwrap_or(embeddings.storage().shape().1 / 2);\n\n\n\n Ok(match config.quantizer.as_str() {\n\n \"pq\" => embeddings.quantize::<Pq<f32>>(\n\n n_subquantizers,\n\n config.quantizer_bits,\n\n config.n_iterations,\n\n config.n_attempts,\n\n true,\n\n )?,\n", "file_path": "src/quantize.rs", "rank": 17, "score": 31015.861457486084 }, { "content": "fn write_completion_script(mut cli: App, shell: Shell) {\n\n cli.gen_completions_to(\"finalfusion\", shell, &mut stdout());\n\n}\n", "file_path": "src/main.rs", "rank": 18, "score": 25816.665896651066 }, { "content": "fn cosine_similarity(u: ArrayView1<f32>, v: ArrayView1<f32>) -> f32 {\n\n let u_norm = u.dot(&u).sqrt();\n\n let v_norm = v.dot(&v).sqrt();\n\n u.dot(&v) / (u_norm * v_norm)\n\n}\n\n\n", "file_path": "src/quantize.rs", "rank": 19, "score": 24264.6001296033 }, { "content": "fn euclidean_distance(u: ArrayView1<f32>, v: ArrayView1<f32>) -> f32 {\n\n let dist_vec = &u - &v;\n\n dist_vec.dot(&dist_vec).sqrt()\n\n}\n\n\n", "file_path": "src/quantize.rs", "rank": 20, "score": 24264.6001296033 }, { "content": "fn print_loss(storage: &dyn StorageView, quantized_storage: &dyn Storage) {\n\n let mut cosine_similarity_sum = 0f32;\n\n let mut euclidean_distance_sum = 0f32;\n\n\n\n for (idx, embedding) in storage.view().outer_iter().enumerate() {\n\n let reconstruction = quantized_storage.embedding(idx);\n\n cosine_similarity_sum += cosine_similarity(embedding, reconstruction.view());\n\n euclidean_distance_sum += euclidean_distance(embedding, reconstruction.view());\n\n }\n\n\n\n eprintln!(\n\n \"Average cosine similarity: {}\",\n\n cosine_similarity_sum / storage.view().nrows() as f32\n\n );\n\n\n\n eprintln!(\n\n \"Average euclidean distance: {}\",\n\n euclidean_distance_sum / storage.view().nrows() as f32\n\n );\n\n}\n\n\n", "file_path": "src/quantize.rs", "rank": 21, "score": 24034.87174391691 }, { "content": "fn process_analogies(embeddings: &Embeddings<VocabWrap, StorageViewWrap>, instances: &[Instance]) {\n\n let pb = ProgressBar::new(instances.len() as u64);\n\n pb.set_style(\n\n ProgressStyle::default_bar().template(\"{bar:30} {percent}% {msg} ETA: {eta_precise}\"),\n\n );\n\n let eval = Eval::new(&embeddings);\n\n instances.par_iter().enumerate().for_each(|(i, instance)| {\n\n if i % 50 == 0 {\n\n pb.inc(50);\n\n }\n\n eval.eval_analogy(instance)\n\n });\n\n pb.finish();\n\n}\n", "file_path": "src/compute_accuracy.rs", "rank": 22, "score": 22891.071371597965 }, { "content": "use std::convert::TryFrom;\n\nuse std::fmt;\n\n\n\nuse anyhow::{anyhow, Context, Error, Result};\n\nuse clap::{Arg, ArgMatches};\n\nuse finalfusion::similarity::WordSimilarityResult;\n\n\n\nconst SIMILARITY: &str = \"similarity\";\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum SimilarityMeasure {\n\n Angular,\n\n Cosine,\n\n}\n\n\n\nimpl SimilarityMeasure {\n\n pub fn new_clap_arg() -> Arg<'static, 'static> {\n\n Arg::with_name(SIMILARITY)\n\n .short(\"s\")\n\n .long(\"similarity\")\n", "file_path": "src/similarity.rs", "rank": 25, "score": 22.923058973439055 }, { "content": "use std::convert::TryFrom;\n\nuse std::fs::File;\n\nuse std::io::{BufReader, BufWriter};\n\n\n\nuse anyhow::{Context, Result};\n\nuse clap::{App, Arg, ArgMatches};\n\nuse finalfusion::compat::fasttext::ReadFastText;\n\nuse finalfusion::io::WriteEmbeddings;\n\nuse finalfusion::prelude::*;\n\n\n\nuse crate::io::EmbeddingFormat;\n\nuse crate::FinalfusionApp;\n\n\n\n// Argument constants\n\nstatic FORMAT: &str = \"FORMAT\";\n\nstatic INPUT: &str = \"INPUT\";\n\nstatic OUTPUT: &str = \"OUTPUT\";\n\n\n\npub struct BucketToExplicitApp {\n\n input_filename: String,\n", "file_path": "src/bucket_to_explicit.rs", "rank": 27, "score": 16.87196461710946 }, { "content": "use std::convert::TryFrom;\n\nuse std::fs::File;\n\nuse std::io::{BufReader, Read};\n\n\n\nuse anyhow::{Context, Result};\n\nuse clap::{App, Arg, ArgMatches};\n\nuse finalfusion::compat::floret::ReadFloretText;\n\nuse finalfusion::io::ReadEmbeddings;\n\nuse finalfusion::metadata::Metadata;\n\nuse finalfusion::prelude::*;\n\nuse toml::Value;\n\n\n\nuse crate::io::{write_embeddings, EmbeddingFormat};\n\nuse crate::FinalfusionApp;\n\n\n\n// Option constants\n\nstatic INPUT_FORMAT: &str = \"input_format\";\n\nstatic LOSSY: &str = \"lossy\";\n\nstatic METADATA_FILENAME: &str = \"metadata_filename\";\n\nstatic OUTPUT_FORMAT: &str = \"output_format\";\n", "file_path": "src/convert.rs", "rank": 28, "score": 15.549549712893034 }, { "content": " use self::SimilarityMeasure::*;\n\n match self {\n\n Angular => result.angular_similarity(),\n\n Cosine => result.cosine_similarity(),\n\n }\n\n }\n\n}\n\n\n\nimpl TryFrom<&str> for SimilarityMeasure {\n\n type Error = Error;\n\n\n\n fn try_from(format: &str) -> Result<Self> {\n\n use self::SimilarityMeasure::*;\n\n\n\n match format {\n\n \"angular\" => Ok(Angular),\n\n \"cosine\" => Ok(Cosine),\n\n unknown => Err(anyhow!(\"Unknown similarity measure: {}\", unknown)),\n\n }\n\n }\n", "file_path": "src/similarity.rs", "rank": 29, "score": 15.46646146621216 }, { "content": "use std::collections::HashSet;\n\nuse std::convert::TryFrom;\n\nuse std::io::BufRead;\n\n\n\nuse anyhow::{bail, Context, Error, Result};\n\nuse clap::{App, Arg, ArgMatches};\n\nuse finalfusion::embeddings::Embeddings;\n\nuse finalfusion::norms::NdNorms;\n\nuse finalfusion::storage::{NdArray, StorageWrap};\n\nuse finalfusion::vocab::{SimpleVocab, Vocab, VocabWrap};\n\nuse ndarray::{Array1, Array2};\n\nuse stdinout::Input;\n\n\n\nuse super::FinalfusionApp;\n\nuse crate::io::{read_embeddings, write_embeddings, EmbeddingFormat};\n\n\n\nconst IGNORE_UNKNOWN: &str = \"IGNORE_UNKNOWN\";\n\nconst INPUT_EMBEDDINGS: &str = \"INPUT_EMBEDDINGS\";\n\nconst INPUT_FORMAT: &str = \"INPUT_FORMAT\";\n\nconst OUTPUT_EMBEDDINGS: &str = \"OUTPUT_EMBEDDINGS\";\n", "file_path": "src/select.rs", "rank": 30, "score": 14.287389075840938 }, { "content": "use std::fs::File;\n\nuse std::io::{BufReader, BufWriter, Write};\n\n\n\nuse anyhow::{Context, Result};\n\nuse clap::{App, Arg, ArgMatches};\n\nuse finalfusion::io::ReadMetadata;\n\nuse finalfusion::metadata::Metadata;\n\nuse stdinout::Output;\n\nuse toml::ser::to_string_pretty;\n\n\n\nuse crate::FinalfusionApp;\n\n\n\n// Argument constants\n\nstatic INPUT: &str = \"INPUT\";\n\nstatic OUTPUT: &str = \"OUTPUT\";\n\n\n\npub struct MetadataApp {\n\n input_filename: String,\n\n output_filename: Option<String>,\n\n}\n", "file_path": "src/metadata.rs", "rank": 31, "score": 13.767380682735517 }, { "content": "use std::fs::File;\n\nuse std::io::BufReader;\n\n\n\nuse anyhow::{Context, Result};\n\nuse clap::{App, Arg, ArgMatches};\n\nuse finalfusion::norms::NdNorms;\n\nuse finalfusion::prelude::*;\n\nuse finalfusion::storage::{NdArray, QuantizedArray, Reconstruct};\n\nuse finalfusion::vocab::Vocab;\n\nuse ndarray::{s, Array2};\n\n\n\nuse crate::io::{write_embeddings, EmbeddingFormat};\n\nuse crate::util::l2_normalize_array;\n\nuse crate::FinalfusionApp;\n\n\n\n// Argument constants\n\nstatic INPUT: &str = \"INPUT\";\n\nstatic OUTPUT: &str = \"OUTPUT\";\n\n\n\npub struct ReconstructApp {\n", "file_path": "src/reconstruct.rs", "rank": 32, "score": 13.22536906349481 }, { "content": "}\n\n\n\nimpl FinalfusionApp for AnalogyApp {\n\n fn app() -> App<'static, 'static> {\n\n App::new(\"analogy\")\n\n .about(\"Find words that fit an analogy\")\n\n .arg(\n\n Arg::with_name(\"format\")\n\n .short(\"f\")\n\n .value_name(\"FORMAT\")\n\n .takes_value(true)\n\n .possible_values(&[\n\n \"fasttext\",\n\n \"finalfusion\",\n\n \"finalfusion_mmap\",\n\n \"word2vec\",\n\n \"text\",\n\n \"textdims\",\n\n ])\n\n .default_value(\"finalfusion\"),\n", "file_path": "src/analogy.rs", "rank": 33, "score": 12.434685813346782 }, { "content": "impl FinalfusionApp for SimilarApp {\n\n fn app() -> App<'static, 'static> {\n\n App::new(\"similar\")\n\n .about(\"Find words that are similar to a given word\")\n\n .arg(\n\n Arg::with_name(\"format\")\n\n .short(\"f\")\n\n .value_name(\"FORMAT\")\n\n .takes_value(true)\n\n .possible_values(&[\n\n \"fasttext\",\n\n \"finalfusion\",\n\n \"finalfusion_mmap\",\n\n \"text\",\n\n \"textdims\",\n\n \"word2vec\",\n\n ])\n\n .default_value(\"finalfusion\"),\n\n )\n\n .arg(\n", "file_path": "src/similar.rs", "rank": 34, "score": 12.081969098786653 }, { "content": "}\n\n\n\nimpl fmt::Display for SimilarityMeasure {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n use SimilarityMeasure::*;\n\n let s = match self {\n\n Angular => \"angular\",\n\n Cosine => \"cosine\",\n\n };\n\n\n\n f.write_str(s)\n\n }\n\n}\n", "file_path": "src/similarity.rs", "rank": 35, "score": 11.83826550903875 }, { "content": "use std::convert::TryFrom;\n\nuse std::io::BufRead;\n\n\n\nuse anyhow::{Context, Result};\n\nuse clap::{App, Arg, ArgMatches};\n\nuse finalfusion::similarity::WordSimilarity;\n\nuse stdinout::Input;\n\n\n\nuse super::FinalfusionApp;\n\nuse crate::io::{read_embeddings_view, EmbeddingFormat};\n\nuse crate::similarity::SimilarityMeasure;\n\n\n\npub struct SimilarApp {\n\n embeddings_filename: String,\n\n embedding_format: EmbeddingFormat,\n\n input: Option<String>,\n\n k: usize,\n\n similarity: SimilarityMeasure,\n\n}\n\n\n", "file_path": "src/similar.rs", "rank": 36, "score": 11.52969358459912 }, { "content": " .long(\"from\")\n\n .value_name(\"FORMAT\")\n\n .takes_value(true)\n\n .possible_values(&[\n\n \"fasttext\",\n\n \"finalfusion\",\n\n \"floret\",\n\n \"text\",\n\n \"textdims\",\n\n \"word2vec\",\n\n ])\n\n .default_value(\"word2vec\"),\n\n )\n\n .arg(\n\n Arg::with_name(N_ITERATIONS)\n\n .short(\"i\")\n\n .long(\"iter\")\n\n .value_name(\"N\")\n\n .help(\"Number of iterations\")\n\n .takes_value(true)\n", "file_path": "src/quantize.rs", "rank": 37, "score": 11.520128797961124 }, { "content": "use std::collections::HashSet;\n\nuse std::convert::TryFrom;\n\nuse std::io::BufRead;\n\n\n\nuse anyhow::{ensure, Context, Result};\n\nuse clap::{App, Arg, ArgMatches};\n\nuse finalfusion::similarity::Analogy;\n\nuse stdinout::Input;\n\n\n\nuse crate::io::{read_embeddings_view, EmbeddingFormat};\n\nuse crate::similarity::SimilarityMeasure;\n\nuse crate::FinalfusionApp;\n\n\n\npub struct AnalogyApp {\n\n embeddings_filename: String,\n\n embedding_format: EmbeddingFormat,\n\n input_filename: Option<String>,\n\n excludes: [bool; 3],\n\n k: usize,\n\n similarity: SimilarityMeasure,\n", "file_path": "src/analogy.rs", "rank": 38, "score": 11.05633283569901 }, { "content": " .help(\"Ignore words for which no embedding is available\"),\n\n )\n\n .arg(\n\n Arg::with_name(INPUT_FORMAT)\n\n .short(\"f\")\n\n .value_name(\"FORMAT\")\n\n .takes_value(true)\n\n .possible_values(&[\n\n \"fasttext\",\n\n \"finalfusion\",\n\n \"finalfusion_mmap\",\n\n \"floret\",\n\n \"text\",\n\n \"textdims\",\n\n \"word2vec\",\n\n ])\n\n .default_value(\"finalfusion\")\n\n .help(\"Input format\"),\n\n )\n\n .arg(\n", "file_path": "src/select.rs", "rank": 39, "score": 10.909499019705162 }, { "content": "use std::convert::TryFrom;\n\nuse std::process;\n\n\n\nuse anyhow::{Context, Result};\n\nuse clap::{App, Arg, ArgMatches};\n\nuse finalfusion::embeddings::Quantize;\n\nuse finalfusion::prelude::*;\n\nuse finalfusion::storage::{QuantizedArray, Storage, StorageView};\n\nuse finalfusion::vocab::Vocab;\n\nuse ndarray::ArrayView1;\n\nuse rayon::ThreadPoolBuilder;\n\nuse reductive::pq::Pq;\n\n#[cfg(feature = \"opq\")]\n\nuse reductive::pq::{GaussianOpq, Opq};\n\n\n\nuse crate::io::{read_embeddings_view, write_embeddings, EmbeddingFormat};\n\nuse crate::FinalfusionApp;\n\n\n\n// Option constants\n\nstatic INPUT_FORMAT: &str = \"input_format\";\n", "file_path": "src/quantize.rs", "rank": 40, "score": 10.57480936036586 }, { "content": " .value_name(\"SIMILARITY\")\n\n .takes_value(true)\n\n .default_value(\"cosine\")\n\n .possible_values(&[\"angular\", \"cosine\"])\n\n .help(\"Similarity measure\")\n\n }\n\n\n\n pub fn parse_clap_matches(matches: &ArgMatches) -> Result<Self> {\n\n let measure = matches\n\n .value_of(SIMILARITY)\n\n .map(|s| {\n\n SimilarityMeasure::try_from(s)\n\n .context(format!(\"Cannot parse similarity measure: {}\", s))\n\n })\n\n .transpose()?\n\n .unwrap();\n\n Ok(measure)\n\n }\n\n\n\n pub fn as_f32(&self, result: &WordSimilarityResult) -> f32 {\n", "file_path": "src/similarity.rs", "rank": 41, "score": 10.018347344054119 }, { "content": " .context(\"Cannot read embeddings\")?;\n\n\n\n let select = self.read_words(&embeddings)?;\n\n\n\n let output_embeddings = copy_select_embeddings(&embeddings, select)?;\n\n\n\n write_embeddings(\n\n &output_embeddings,\n\n &self.output_filename,\n\n self.output_format,\n\n true,\n\n )\n\n }\n\n}\n\n\n\nimpl SelectApp {\n\n fn read_words(\n\n &self,\n\n embeddings: &Embeddings<VocabWrap, StorageWrap>,\n\n ) -> Result<HashSet<String>, Error> {\n", "file_path": "src/select.rs", "rank": 42, "score": 10.006272659474421 }, { "content": " \"finalfusion\",\n\n \"floret\",\n\n \"text\",\n\n \"textdims\",\n\n \"word2vec\",\n\n ])\n\n .default_value(\"word2vec\"),\n\n )\n\n .arg(\n\n Arg::with_name(LOSSY)\n\n .long(\"lossy\")\n\n .help(\"do not fail on malformed UTF-8 byte sequences\")\n\n .takes_value(false),\n\n )\n\n .arg(\n\n Arg::with_name(METADATA_FILENAME)\n\n .short(\"m\")\n\n .long(\"metadata\")\n\n .value_name(\"FILENAME\")\n\n .help(\"TOML metadata add to the embeddings\")\n", "file_path": "src/convert.rs", "rank": 43, "score": 9.038858936694934 }, { "content": "use std::collections::BTreeMap;\n\nuse std::io::BufRead;\n\nuse std::sync::{Arc, Mutex};\n\n\n\nuse anyhow::{Context, Result};\n\nuse clap::{App, AppSettings, Arg, ArgMatches};\n\nuse finalfusion::prelude::*;\n\nuse finalfusion::similarity::Analogy;\n\nuse finalfusion::vocab::Vocab;\n\nuse indicatif::{ProgressBar, ProgressStyle};\n\nuse rayon::prelude::*;\n\nuse rayon::ThreadPoolBuilder;\n\nuse stdinout::Input;\n\n\n\nuse crate::io::{read_embeddings_view, EmbeddingFormat};\n\nuse crate::FinalfusionApp;\n\n\n\nstatic DEFAULT_CLAP_SETTINGS: &[AppSettings] = &[\n\n AppSettings::DontCollapseArgsInUsage,\n\n AppSettings::UnifiedHelpMessage,\n", "file_path": "src/compute_accuracy.rs", "rank": 44, "score": 8.912818530615572 }, { "content": "use anyhow::Result;\n\nuse clap::{App, ArgMatches};\n\n\n", "file_path": "src/traits.rs", "rank": 45, "score": 8.680127132340505 }, { "content": "static UNNORMALIZE: &str = \"unnormalize\";\n\n\n\n// Argument constants\n\nstatic INPUT: &str = \"INPUT\";\n\nstatic OUTPUT: &str = \"OUTPUT\";\n\n\n\npub struct ConvertApp {\n\n input_filename: String,\n\n output_filename: String,\n\n metadata_filename: Option<String>,\n\n input_format: EmbeddingFormat,\n\n output_format: EmbeddingFormat,\n\n lossy: bool,\n\n unnormalize: bool,\n\n}\n\n\n\nimpl FinalfusionApp for ConvertApp {\n\n fn app() -> App<'static, 'static> {\n\n App::new(\"convert\")\n\n .about(\"Convert between embedding formats\")\n", "file_path": "src/convert.rs", "rank": 46, "score": 8.212210850691182 }, { "content": "];\n\n\n\n// Option constants\n\nstatic EMBEDDINGS: &str = \"EMBEDDINGS\";\n\nstatic ANALOGIES: &str = \"ANALOGIES\";\n\nstatic THREADS: &str = \"threads\";\n\n\n\npub struct ComputeAccuracyApp {\n\n analogies_filename: Option<String>,\n\n embeddings_filename: String,\n\n n_threads: usize,\n\n}\n\n\n\nimpl FinalfusionApp for ComputeAccuracyApp {\n\n fn app() -> App<'static, 'static> {\n\n App::new(\"compute-accuracy\")\n\n .about(\"Compute prediction accuracy on a set of analogies\")\n\n .settings(DEFAULT_CLAP_SETTINGS)\n\n .arg(\n\n Arg::with_name(THREADS)\n", "file_path": "src/compute_accuracy.rs", "rank": 47, "score": 8.212210850691182 }, { "content": " .arg(\n\n Arg::with_name(FORMAT)\n\n .help(\"File format\")\n\n .short(\"f\")\n\n .long(\"format\")\n\n .possible_values(&[\"finalfusion\", \"fasttext\"])\n\n .default_value(\"finalfusion\")\n\n .takes_value(true)\n\n .value_name(\"FORMAT\"),\n\n )\n\n }\n\n\n\n fn parse(matches: &ArgMatches) -> Result<Self> {\n\n let input_filename = matches.value_of(INPUT).unwrap().to_owned();\n\n let output_filename = matches.value_of(OUTPUT).unwrap().to_owned();\n\n let format = matches\n\n .value_of(FORMAT)\n\n .map(|v| {\n\n EmbeddingFormat::try_from(v).context(format!(\"Cannot parse input format: {}\", v))\n\n })\n", "file_path": "src/bucket_to_explicit.rs", "rank": 48, "score": 8.064193008005185 }, { "content": "\n\nimpl FinalfusionApp for MetadataApp {\n\n fn app() -> App<'static, 'static> {\n\n App::new(\"metadata\")\n\n .about(\"Extract metadata from finalfusion embeddings\")\n\n .arg(\n\n Arg::with_name(INPUT)\n\n .help(\"finalfusion model\")\n\n .index(1)\n\n .required(true),\n\n )\n\n .arg(Arg::with_name(OUTPUT).help(\"Output file\").index(2))\n\n }\n\n\n\n fn parse(matches: &ArgMatches) -> Result<Self> {\n\n let input_filename = matches.value_of(INPUT).unwrap().to_owned();\n\n let output_filename = matches.value_of(OUTPUT).map(ToOwned::to_owned);\n\n\n\n Ok(MetadataApp {\n\n input_filename,\n", "file_path": "src/metadata.rs", "rank": 49, "score": 7.7018676798698 }, { "content": "const OUTPUT_FORMAT: &str = \"OUTPUT_FORMAT\";\n\nconst SELECT: &str = \"SELECT\";\n\n\n\npub struct SelectApp {\n\n ignore_unknown: bool,\n\n input_filename: String,\n\n input_format: EmbeddingFormat,\n\n output_filename: String,\n\n output_format: EmbeddingFormat,\n\n select_input: Input,\n\n}\n\n\n\nimpl FinalfusionApp for SelectApp {\n\n fn app() -> App<'static, 'static> {\n\n App::new(\"select\")\n\n .about(\"Select embeddings from an embeddings file\")\n\n .arg(\n\n Arg::with_name(IGNORE_UNKNOWN)\n\n .short(\"i\")\n\n .long(\"ignore-unknown\")\n", "file_path": "src/select.rs", "rank": 50, "score": 7.699815692159236 }, { "content": " Embeddings::<VocabWrap, StorageWrap>::mmap_embeddings(&mut reader).context(\n\n \"Cannot read input embeddings. \\\n\n Only finalfusion and fastText files can be converted.\",\n\n )?\n\n }\n\n EmbeddingFormat::FastText => Embeddings::read_fasttext(&mut reader)\n\n .context(\n\n \"Cannot read input embeddings. \\\n\n Only finalfusion and fastText files can be converted.\",\n\n )?\n\n .into(),\n\n _ => unreachable!(),\n\n };\n\n let conv = embeddings.try_to_explicit()?;\n\n\n\n let mut writer = BufWriter::new(f);\n\n conv.write_embeddings(&mut writer)\n\n .context(\"Cannot write embeddings\")\n\n }\n\n}\n", "file_path": "src/bucket_to_explicit.rs", "rank": 51, "score": 7.401477712110744 }, { "content": " }\n\n\n\n fn parse(matches: &ArgMatches) -> Result<Self> {\n\n // Arguments\n\n let input_filename = matches.value_of(INPUT).unwrap().to_owned();\n\n let output_filename = matches.value_of(OUTPUT).unwrap().to_owned();\n\n\n\n Ok(ReconstructApp {\n\n input_filename,\n\n output_filename,\n\n })\n\n }\n\n\n\n fn run(&self) -> Result<()> {\n\n let f = File::open(&self.input_filename).context(\"Cannot open embeddings file\")?;\n\n let mut reader = BufReader::new(f);\n\n let embeddings: Embeddings<VocabWrap, QuantizedArray> =\n\n Embeddings::read_embeddings(&mut reader)\n\n .context(\"Cannot read quantized embedding matrix\")?;\n\n\n", "file_path": "src/reconstruct.rs", "rank": 52, "score": 7.3019528600301316 }, { "content": "mod similarity;\n\n\n\nmod traits;\n\npub use self::traits::FinalfusionApp;\n\n\n\npub mod util;\n\n\n\nstatic DEFAULT_CLAP_SETTINGS: &[AppSettings] = &[\n\n AppSettings::DontCollapseArgsInUsage,\n\n AppSettings::UnifiedHelpMessage,\n\n AppSettings::SubcommandRequiredElseHelp,\n\n];\n\n\n", "file_path": "src/main.rs", "rank": 53, "score": 7.002755869416749 }, { "content": " fn parse(matches: &ArgMatches) -> Result<Self> {\n\n let input_filename = matches.value_of(INPUT).unwrap().to_owned();\n\n let input_format = matches\n\n .value_of(INPUT_FORMAT)\n\n .map(|v| {\n\n EmbeddingFormat::try_from(v).context(format!(\"Cannot parse input format: {}\", v))\n\n })\n\n .transpose()?\n\n .unwrap();\n\n let output_filename = matches.value_of(OUTPUT).unwrap().to_owned();\n\n let output_format = matches\n\n .value_of(OUTPUT_FORMAT)\n\n .map(|v| {\n\n EmbeddingFormat::try_from(v).context(format!(\"Cannot parse output format: {}\", v))\n\n })\n\n .transpose()?\n\n .unwrap();\n\n\n\n let metadata_filename = matches.value_of(METADATA_FILENAME).map(ToOwned::to_owned);\n\n\n", "file_path": "src/convert.rs", "rank": 54, "score": 6.946672969702329 }, { "content": " let similarity = SimilarityMeasure::parse_clap_matches(&matches)?;\n\n\n\n Ok(SimilarApp {\n\n embeddings_filename,\n\n embedding_format,\n\n input,\n\n k,\n\n similarity,\n\n })\n\n }\n\n\n\n fn run(&self) -> Result<()> {\n\n let embeddings = read_embeddings_view(&self.embeddings_filename, self.embedding_format)\n\n .context(\"Cannot read embeddings\")?;\n\n\n\n let input = Input::from(self.input.as_ref());\n\n let reader = input.buf_read().context(\"Cannot open input for reading\")?;\n\n\n\n for line in reader.lines() {\n\n let line = line.context(\"Cannot read line\")?.trim().to_owned();\n", "file_path": "src/similar.rs", "rank": 55, "score": 6.728303441466955 }, { "content": " .possible_values(&[\"a\", \"b\", \"c\"])\n\n .multiple(true)\n\n .takes_value(true)\n\n .max_values(3),\n\n )\n\n .arg(Arg::with_name(\"INPUT\").help(\"Input words\").index(2))\n\n }\n\n\n\n fn parse(matches: &ArgMatches) -> Result<Self> {\n\n let embeddings_filename = matches.value_of(\"EMBEDDINGS\").unwrap().to_owned();\n\n\n\n let input_filename = matches.value_of(\"INPUT\").map(ToOwned::to_owned);\n\n\n\n let embedding_format = matches\n\n .value_of(\"format\")\n\n .map(|f| EmbeddingFormat::try_from(f).context(\"Cannot parse embedding format\"))\n\n .transpose()?\n\n .unwrap();\n\n\n\n let k = matches\n", "file_path": "src/analogy.rs", "rank": 56, "score": 6.454882726704209 }, { "content": " .arg(Arg::with_name(SELECT).help(\"Words to select\").index(3))\n\n }\n\n\n\n fn parse(matches: &ArgMatches) -> Result<Self> {\n\n let input_filename = matches.value_of(INPUT_EMBEDDINGS).unwrap().to_owned();\n\n let output_filename = matches.value_of(OUTPUT_EMBEDDINGS).unwrap().to_owned();\n\n let select_input = Input::from(matches.value_of(\"SELECT\"));\n\n\n\n let ignore_unknown = matches.is_present(IGNORE_UNKNOWN);\n\n\n\n let input_format = matches\n\n .value_of(INPUT_FORMAT)\n\n .map(|f| {\n\n EmbeddingFormat::try_from(f)\n\n .context(format!(\"Cannot parse embedding format: {}\", f))\n\n })\n\n .transpose()?\n\n .unwrap();\n\n\n\n let output_format = matches\n", "file_path": "src/select.rs", "rank": 57, "score": 6.342317806373871 }, { "content": " Arg::with_name(N_THREADS)\n\n .short(\"t\")\n\n .long(\"threads\")\n\n .value_name(\"N\")\n\n .help(\"Number of threads (default: logical_cpus /2)\")\n\n .takes_value(true),\n\n )\n\n }\n\n\n\n fn parse(matches: &ArgMatches) -> Result<Self> {\n\n // Arguments\n\n let input_filename = matches.value_of(INPUT).unwrap().to_owned();\n\n let output_filename = matches.value_of(OUTPUT).unwrap().to_owned();\n\n\n\n // Options\n\n let input_format = matches\n\n .value_of(INPUT_FORMAT)\n\n .map(|v| {\n\n EmbeddingFormat::try_from(v).context(format!(\"Cannot parse input format: {}\", v))\n\n })\n", "file_path": "src/quantize.rs", "rank": 58, "score": 6.268022404419984 }, { "content": " input_filename,\n\n excludes,\n\n k,\n\n similarity,\n\n })\n\n }\n\n\n\n fn run(&self) -> Result<()> {\n\n let embeddings = read_embeddings_view(&self.embeddings_filename, self.embedding_format)\n\n .context(\"Cannot read embeddings\")?;\n\n let input = Input::from(self.input_filename.as_ref());\n\n let reader = input.buf_read().context(\"Cannot open input for reading\")?;\n\n\n\n for line in reader.lines() {\n\n let line = line.context(\"Cannot read line\")?.trim().to_owned();\n\n if line.is_empty() {\n\n continue;\n\n }\n\n\n\n let split_line: Vec<&str> = line.split_whitespace().collect();\n", "file_path": "src/analogy.rs", "rank": 59, "score": 6.221002079856903 }, { "content": " let mut words = HashSet::new();\n\n\n\n for word in self\n\n .select_input\n\n .buf_read()\n\n .context(\"Cannot open selection file\")?\n\n .lines()\n\n {\n\n let word = word?;\n\n\n\n match embeddings.vocab().idx(&word) {\n\n Some(_) => {\n\n words.insert(word);\n\n }\n\n None => {\n\n if !self.ignore_unknown {\n\n bail!(\"Cannot get embedding for: {}\", word)\n\n }\n\n }\n\n };\n\n }\n\n\n\n Ok(words)\n\n }\n\n}\n\n\n", "file_path": "src/select.rs", "rank": 60, "score": 6.175821627363732 }, { "content": " .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(OUTPUT_FORMAT)\n\n .short(\"t\")\n\n .long(\"to\")\n\n .value_name(\"FORMAT\")\n\n .takes_value(true)\n\n .possible_values(&[\"finalfusion\", \"text\", \"textdims\", \"word2vec\"])\n\n .default_value(\"finalfusion\"),\n\n )\n\n .arg(\n\n Arg::with_name(UNNORMALIZE)\n\n .short(\"u\")\n\n .long(\"unnormalize\")\n\n .help(\"unnormalize embeddings (does not affect finalfusion format)\")\n\n .takes_value(false),\n\n )\n\n }\n\n\n", "file_path": "src/convert.rs", "rank": 61, "score": 6.037882980220848 }, { "content": " Arg::with_name(OUTPUT_FORMAT)\n\n .short(\"t\")\n\n .value_name(\"FORMAT\")\n\n .takes_value(true)\n\n .possible_values(&[\"finalfusion\", \"text\", \"textdims\", \"word2vec\"])\n\n .default_value(\"finalfusion\")\n\n .help(\"Output format\"),\n\n )\n\n .arg(\n\n Arg::with_name(INPUT_EMBEDDINGS)\n\n .help(\"Input embeddings\")\n\n .index(1)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(OUTPUT_EMBEDDINGS)\n\n .help(\"Output embeddings\")\n\n .index(2)\n\n .required(true),\n\n )\n", "file_path": "src/select.rs", "rank": 62, "score": 5.872856524432848 }, { "content": " .transpose()?\n\n .unwrap();\n\n\n\n Ok(BucketToExplicitApp {\n\n input_filename,\n\n output_filename,\n\n format,\n\n })\n\n }\n\n\n\n fn run(&self) -> Result<()> {\n\n let f = File::open(&self.input_filename)\n\n .context(format!(\"Cannot open input file: {}\", self.input_filename))?;\n\n let mut reader = BufReader::new(f);\n\n let f = File::create(&self.output_filename).context(format!(\n\n \"Cannot create embeddings file for writing: {}\",\n\n self.output_filename\n\n ))?;\n\n let embeddings = match self.format {\n\n EmbeddingFormat::FinalFusion => {\n", "file_path": "src/bucket_to_explicit.rs", "rank": 63, "score": 5.83306855431394 }, { "content": " let embeddings_filename = matches.value_of(\"EMBEDDINGS\").unwrap().to_owned();\n\n\n\n let embedding_format = matches\n\n .value_of(\"format\")\n\n .map(|f| {\n\n EmbeddingFormat::try_from(f)\n\n .context(format!(\"Cannot parse embedding format: {}\", f))\n\n })\n\n .transpose()?\n\n .unwrap();\n\n\n\n let k = matches\n\n .value_of(\"neighbors\")\n\n .map(|k| {\n\n k.parse()\n\n .context(format!(\"Cannot parse number of neighbors: {}\", k))\n\n })\n\n .transpose()?\n\n .unwrap();\n\n\n", "file_path": "src/similar.rs", "rank": 64, "score": 5.566171385602306 }, { "content": " .transpose()?\n\n .unwrap();\n\n let n_attempts = matches\n\n .value_of(N_ATTEMPTS)\n\n .map(|a| {\n\n a.parse()\n\n .context(format!(\"Cannot parse number of attempts: {}\", a))\n\n })\n\n .transpose()?\n\n .unwrap();\n\n let n_iterations = matches\n\n .value_of(N_ITERATIONS)\n\n .map(|i| {\n\n i.parse()\n\n .context(format!(\"Cannot parse number of iterations: {}\", i))\n\n })\n\n .transpose()?\n\n .unwrap();\n\n let n_subquantizers = matches\n\n .value_of(N_SUBQUANTIZERS)\n", "file_path": "src/quantize.rs", "rank": 65, "score": 5.5338303082695885 }, { "content": " .map(|n| {\n\n n.parse()\n\n .context(format!(\"Cannot parse number of subquantizers: {}\", n))\n\n })\n\n .transpose()?;\n\n let n_threads = matches\n\n .value_of(N_THREADS)\n\n .map(|n| {\n\n n.parse()\n\n .context(format!(\"Cannot parse number of threads: {}\", n))\n\n })\n\n .transpose()?\n\n .unwrap_or(num_cpus::get() / 2);\n\n let quantizer = matches.value_of(QUANTIZER).map(ToOwned::to_owned).unwrap();\n\n let quantizer_bits = matches\n\n .value_of(QUANTIZER_BITS)\n\n .map(|n| {\n\n n.parse()\n\n .context(format!(\"Cannot parse number of quantizer_bits: {}\", n))\n\n })\n", "file_path": "src/quantize.rs", "rank": 66, "score": 5.530632924529087 }, { "content": " if line.is_empty() {\n\n continue;\n\n }\n\n\n\n let results = match embeddings.word_similarity(&line, self.k) {\n\n Some(results) => results,\n\n None => {\n\n eprintln!(\"Could not compute embedding for: {}\", line);\n\n continue;\n\n }\n\n };\n\n\n\n for similar in results {\n\n println!(\"{}\\t{}\", similar.word(), self.similarity.as_f32(&similar));\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/similar.rs", "rank": 67, "score": 5.319052657000125 }, { "content": "static N_ATTEMPTS: &str = \"n_attempts\";\n\nstatic N_ITERATIONS: &str = \"n_iterations\";\n\nstatic N_SUBQUANTIZERS: &str = \"n_subquantizers\";\n\nstatic N_THREADS: &str = \"n_threads\";\n\nstatic QUANTIZER: &str = \"quantizer\";\n\nstatic QUANTIZER_BITS: &str = \"quantizer_bits\";\n\n\n\n// Argument constants\n\nstatic INPUT: &str = \"INPUT\";\n\nstatic OUTPUT: &str = \"OUTPUT\";\n\n\n\npub struct QuantizeApp {\n\n input_filename: String,\n\n input_format: EmbeddingFormat,\n\n n_attempts: usize,\n\n n_iterations: usize,\n\n n_subquantizers: Option<usize>,\n\n n_threads: usize,\n\n output_filename: String,\n\n quantizer: String,\n", "file_path": "src/quantize.rs", "rank": 68, "score": 5.189191400481973 }, { "content": " ensure!(\n\n split_line.len() == 3,\n\n \"Query does not consist of three tokens: {}\",\n\n line\n\n );\n\n\n\n let results = match embeddings.analogy_masked(\n\n [&split_line[0], &split_line[1], &split_line[2]],\n\n self.excludes,\n\n self.k,\n\n ) {\n\n Ok(results) => results,\n\n Err(success) => {\n\n print_missing_tokens(&split_line, &success);\n\n continue;\n\n }\n\n };\n\n\n\n for analogy in results {\n\n println!(\"{}\\t{}\", analogy.word(), self.similarity.as_f32(&analogy));\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/analogy.rs", "rank": 69, "score": 4.772723143576506 }, { "content": " output_filename,\n\n })\n\n }\n\n\n\n fn run(&self) -> Result<()> {\n\n let output = Output::from(self.output_filename.as_ref());\n\n let mut writer = BufWriter::new(output.write().context(\"Cannot open output for writing\")?);\n\n\n\n if let Some(metadata) = read_metadata(&self.input_filename)? {\n\n writer\n\n .write_all(\n\n to_string_pretty(&*metadata)\n\n .context(\"Cannot serialize metadata to TOML\")?\n\n .as_bytes(),\n\n )\n\n .context(\"Cannot write metadata\")?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 70, "score": 4.752107369992057 }, { "content": " .vocab()\n\n .idx(&instance.answer)\n\n .and_then(|idx| idx.word())\n\n .is_none()\n\n {\n\n let mut section_counts = self.section_counts.lock().unwrap();\n\n let counts = section_counts.entry(instance.section.clone()).or_default();\n\n counts.n_skipped += 1;\n\n return;\n\n }\n\n\n\n // If the model is not able to provide a query result, it is counted\n\n // as an error.\n\n let (is_correct, cos) = self\n\n .embeddings\n\n .analogy([&instance.query.0, &instance.query.1, &instance.query.2], 1)\n\n .map(|r| {\n\n let result = r.first().unwrap();\n\n (result.word() == instance.answer, result.cosine_similarity())\n\n })\n", "file_path": "src/compute_accuracy.rs", "rank": 71, "score": 4.720127344092351 }, { "content": " .long(\"threads\")\n\n .value_name(\"N\")\n\n .help(\"Number of threads (default: logical_cpus / 2)\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(EMBEDDINGS)\n\n .help(\"Embedding file\")\n\n .index(1)\n\n .required(true),\n\n )\n\n .arg(Arg::with_name(ANALOGIES).help(\"Analogy file\").index(2))\n\n }\n\n\n\n fn parse(matches: &ArgMatches) -> Result<Self> {\n\n let embeddings_filename = matches.value_of(EMBEDDINGS).unwrap().to_owned();\n\n let analogies_filename = matches.value_of(ANALOGIES).map(ToOwned::to_owned);\n\n let n_threads = matches\n\n .value_of(\"threads\")\n\n .map(|v| {\n", "file_path": "src/compute_accuracy.rs", "rank": 72, "score": 4.557023120659587 }, { "content": " .value_of(\"neighbors\")\n\n .map(|v| v.parse().context(\"Cannot parse k\"))\n\n .transpose()?\n\n .unwrap();\n\n let excludes = matches\n\n .values_of(\"include\")\n\n .map(|v| {\n\n let set = v.collect::<HashSet<_>>();\n\n let exclude_a = !set.contains(\"a\");\n\n let exclude_b = !set.contains(\"b\");\n\n let exclude_c = !set.contains(\"c\");\n\n [exclude_a, exclude_b, exclude_c]\n\n })\n\n .unwrap_or_else(|| [true, true, true]);\n\n\n\n let similarity = SimilarityMeasure::parse_clap_matches(&matches)?;\n\n\n\n Ok(AnalogyApp {\n\n embeddings_filename,\n\n embedding_format,\n", "file_path": "src/analogy.rs", "rank": 73, "score": 4.469870322398613 }, { "content": "#[cfg(feature = \"accelerate\")]\n\nextern crate accelerate_src;\n\n\n\n#[cfg(feature = \"netlib\")]\n\nextern crate netlib_src;\n\n\n\n#[cfg(feature = \"openblas\")]\n\nextern crate openblas_src;\n\n\n\n#[cfg(feature = \"intel-mkl\")]\n\nextern crate intel_mkl_src;\n\n\n\n#[cfg(feature = \"intel-mkl-amd\")]\n\nextern crate intel_mkl_src;\n\n\n\nuse std::io::stdout;\n\n\n\nuse anyhow::Result;\n\nuse clap::{App, AppSettings, Arg, Shell, SubCommand};\n\n\n", "file_path": "src/main.rs", "rank": 74, "score": 4.419282991083775 }, { "content": " Arg::with_name(\"neighbors\")\n\n .short(\"k\")\n\n .value_name(\"K\")\n\n .help(\"Return K nearest neighbors\")\n\n .takes_value(true)\n\n .default_value(\"10\"),\n\n )\n\n .arg(SimilarityMeasure::new_clap_arg())\n\n .arg(\n\n Arg::with_name(\"EMBEDDINGS\")\n\n .help(\"Embeddings file\")\n\n .index(1)\n\n .required(true),\n\n )\n\n .arg(Arg::with_name(\"INPUT\").help(\"Input words\").index(2))\n\n }\n\n\n\n fn parse(matches: &ArgMatches) -> Result<Self> {\n\n let input = matches.value_of(\"INPUT\").map(ToOwned::to_owned);\n\n\n", "file_path": "src/similar.rs", "rank": 75, "score": 4.264422867817817 }, { "content": " Ok(ConvertApp {\n\n input_filename,\n\n output_filename,\n\n input_format,\n\n output_format,\n\n metadata_filename,\n\n lossy: matches.is_present(LOSSY),\n\n unnormalize: matches.is_present(UNNORMALIZE),\n\n })\n\n }\n\n\n\n fn run(&self) -> Result<()> {\n\n let metadata = self\n\n .metadata_filename\n\n .as_ref()\n\n .map(read_metadata)\n\n .transpose()?\n\n .map(Metadata::new);\n\n\n\n let mut embeddings = read_embeddings(&self.input_filename, self.input_format, self.lossy)?;\n", "file_path": "src/convert.rs", "rank": 76, "score": 4.227848090041227 }, { "content": " quantizer_bits: u32,\n\n}\n\n\n\nimpl FinalfusionApp for QuantizeApp {\n\n fn app() -> App<'static, 'static> {\n\n App::new(\"quantize\")\n\n .about(\"Quantize embedding matrices\")\n\n .arg(\n\n Arg::with_name(INPUT)\n\n .help(\"finalfusion model\")\n\n .index(1)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(OUTPUT)\n\n .help(\"Output file\")\n\n .index(2)\n\n .required(true),\n\n )\n\n .arg(\n", "file_path": "src/quantize.rs", "rank": 77, "score": 4.029257235528536 }, { "content": " v.parse()\n\n .context(format!(\"Cannot parse number of threads: {}\", v))\n\n })\n\n .transpose()?\n\n .unwrap_or(num_cpus::get() / 2);\n\n\n\n Ok(ComputeAccuracyApp {\n\n analogies_filename,\n\n embeddings_filename,\n\n n_threads,\n\n })\n\n }\n\n\n\n fn run(&self) -> Result<()> {\n\n ThreadPoolBuilder::new()\n\n .num_threads(self.n_threads)\n\n .build_global()\n\n .unwrap();\n\n\n\n let embeddings =\n", "file_path": "src/compute_accuracy.rs", "rank": 78, "score": 3.9756632856635656 }, { "content": " input_filename: String,\n\n output_filename: String,\n\n}\n\n\n\nimpl FinalfusionApp for ReconstructApp {\n\n fn app() -> App<'static, 'static> {\n\n App::new(\"reconstruct\")\n\n .about(\"Reconstruct quantized embedding matrices\")\n\n .arg(\n\n Arg::with_name(INPUT)\n\n .help(\"quantized finalfusion embeddings\")\n\n .index(1)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(OUTPUT)\n\n .help(\"reconstructed finalfusion embeddings\")\n\n .index(2)\n\n .required(true),\n\n )\n", "file_path": "src/reconstruct.rs", "rank": 79, "score": 3.8440246246143057 }, { "content": " .value_of(OUTPUT_FORMAT)\n\n .map(|f| {\n\n EmbeddingFormat::try_from(f)\n\n .context(format!(\"Cannot parse embedding format: {}\", f))\n\n })\n\n .transpose()?\n\n .unwrap();\n\n\n\n Ok(SelectApp {\n\n ignore_unknown,\n\n input_filename,\n\n input_format,\n\n output_filename,\n\n output_format,\n\n select_input,\n\n })\n\n }\n\n\n\n fn run(&self) -> Result<()> {\n\n let embeddings = read_embeddings(&self.input_filename, self.input_format)\n", "file_path": "src/select.rs", "rank": 80, "score": 3.8193904250360537 }, { "content": " output_filename: String,\n\n format: EmbeddingFormat,\n\n}\n\n\n\nimpl FinalfusionApp for BucketToExplicitApp {\n\n fn app() -> App<'static, 'static> {\n\n App::new(\"bucket-to-explicit\")\n\n .about(\"Convert embeddings with bucket-vocab to explicit vocab.\")\n\n .arg(\n\n Arg::with_name(INPUT)\n\n .help(\"Input file\")\n\n .index(1)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(OUTPUT)\n\n .help(\"Output file\")\n\n .index(2)\n\n .required(true),\n\n )\n", "file_path": "src/bucket_to_explicit.rs", "rank": 81, "score": 3.774614150824166 }, { "content": " Ok(())\n\n }\n\n \"compute-accuracy\" => compute_accuracy::ComputeAccuracyApp::parse(\n\n matches.subcommand_matches(\"compute-accuracy\").unwrap(),\n\n )?\n\n .run(),\n\n \"convert\" => {\n\n convert::ConvertApp::parse(matches.subcommand_matches(\"convert\").unwrap())?.run()\n\n }\n\n \"metadata\" => {\n\n metadata::MetadataApp::parse(matches.subcommand_matches(\"metadata\").unwrap())?.run()\n\n }\n\n \"quantize\" => {\n\n quantize::QuantizeApp::parse(matches.subcommand_matches(\"quantize\").unwrap())?.run()\n\n }\n\n \"reconstruct\" => {\n\n reconstruct::ReconstructApp::parse(matches.subcommand_matches(\"reconstruct\").unwrap())?\n\n .run()\n\n }\n\n \"select\" => select::SelectApp::parse(matches.subcommand_matches(\"select\").unwrap())?.run(),\n\n \"similar\" => {\n\n similar::SimilarApp::parse(matches.subcommand_matches(\"similar\").unwrap())?.run()\n\n }\n\n _unknown => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 82, "score": 3.390467889781473 }, { "content": " .setting(AppSettings::ArgRequiredElseHelp)\n\n .arg(Arg::with_name(\"shell\").possible_values(&Shell::variants())),\n\n );\n\n let matches = cli.clone().get_matches();\n\n\n\n match matches.subcommand_name().unwrap() {\n\n \"analogy\" => {\n\n analogy::AnalogyApp::parse(matches.subcommand_matches(\"analogy\").unwrap())?.run()\n\n }\n\n \"bucket-to-explicit\" => bucket_to_explicit::BucketToExplicitApp::parse(\n\n matches.subcommand_matches(\"bucket-to-explicit\").unwrap(),\n\n )?\n\n .run(),\n\n \"completions\" => {\n\n let shell = matches\n\n .subcommand_matches(\"completions\")\n\n .unwrap()\n\n .value_of(\"shell\")\n\n .unwrap();\n\n write_completion_script(cli, shell.parse::<Shell>().unwrap());\n", "file_path": "src/main.rs", "rank": 83, "score": 3.3131780048915864 }, { "content": "#[cfg(feature = \"intel-mkl-amd\")]\n\nuse std::os::raw::c_int;\n\n\n\nuse ndarray::{Array1, ArrayViewMut1, ArrayViewMut2};\n\n\n", "file_path": "src/util.rs", "rank": 84, "score": 3.113859386223977 }, { "content": " read_embeddings_view(&self.embeddings_filename, EmbeddingFormat::FinalFusion)\n\n .context(\"Cannot read embeddings\")?;\n\n\n\n let analogies_file = Input::from(self.analogies_filename.as_ref());\n\n let reader = analogies_file\n\n .buf_read()\n\n .context(\"Cannot open analogy file for reading\")?;\n\n\n\n let instances = read_analogies(reader)?;\n\n process_analogies(&embeddings, &instances);\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/compute_accuracy.rs", "rank": 85, "score": 2.5973631513381354 }, { "content": "mod analogy;\n\n\n\nmod bucket_to_explicit;\n\n\n\nmod compute_accuracy;\n\n\n\nmod convert;\n\n\n\npub mod io;\n\n\n\nmod metadata;\n\n\n\nmod quantize;\n\n\n\nmod reconstruct;\n\n\n\nmod select;\n\n\n\nmod similar;\n\n\n", "file_path": "src/main.rs", "rank": 86, "score": 2.4278648953221156 }, { "content": "\n\n // Overwrite metadata if provided, otherwise retain existing metadata.\n\n if metadata.is_some() {\n\n embeddings.set_metadata(metadata);\n\n }\n\n\n\n write_embeddings(\n\n &embeddings,\n\n &self.output_filename,\n\n self.output_format,\n\n self.unnormalize,\n\n )\n\n .context(\"Cannot write embeddings\")\n\n }\n\n}\n\n\n", "file_path": "src/convert.rs", "rank": 87, "score": 2.243029553329467 }, { "content": " .arg(\n\n Arg::with_name(INPUT)\n\n .help(\"finalfusion model\")\n\n .index(1)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(OUTPUT)\n\n .help(\"Output file\")\n\n .index(2)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(INPUT_FORMAT)\n\n .short(\"f\")\n\n .long(\"from\")\n\n .value_name(\"FORMAT\")\n\n .takes_value(true)\n\n .possible_values(&[\n\n \"fasttext\",\n", "file_path": "src/convert.rs", "rank": 88, "score": 2.134861880820272 }, { "content": " .transpose()?\n\n .unwrap();\n\n\n\n Ok(QuantizeApp {\n\n input_filename,\n\n input_format,\n\n n_attempts,\n\n n_iterations,\n\n n_subquantizers,\n\n n_threads,\n\n output_filename,\n\n quantizer,\n\n quantizer_bits,\n\n })\n\n }\n\n\n\n fn run(&self) -> Result<()> {\n\n env_logger::init();\n\n\n\n ThreadPoolBuilder::new()\n", "file_path": "src/quantize.rs", "rank": 89, "score": 2.0903642220116376 }, { "content": " .unwrap_or((false, 0.));\n\n\n\n let mut section_counts = self.section_counts.lock().unwrap();\n\n let counts = section_counts.entry(instance.section.clone()).or_default();\n\n counts.n_instances += 1;\n\n if is_correct {\n\n counts.n_correct += 1;\n\n }\n\n counts.sum_cos += cos;\n\n }\n\n\n\n /// Print the accuracy for a section.\n\n fn print_section_accuracy(&self, section: &str, counts: &Counts) {\n\n if counts.n_instances == 0 {\n\n eprintln!(\"{}: no evaluation instances\", section);\n\n return;\n\n }\n\n\n\n println!(\n\n \"{}: {}/{} correct, accuracy: {:.2}, avg cos: {:1.2}, skipped: {}\",\n", "file_path": "src/compute_accuracy.rs", "rank": 90, "score": 1.8707623913174047 }, { "content": " .num_threads(self.n_threads)\n\n .build_global()\n\n .unwrap();\n\n\n\n let embeddings = read_embeddings_view(&self.input_filename, self.input_format)\n\n .context(\"Cannot read embeddings\")?;\n\n\n\n // Quantize\n\n let quantized_embeddings = quantize_embeddings(&self, &embeddings)?.into();\n\n write_embeddings(\n\n &quantized_embeddings,\n\n &self.output_filename,\n\n EmbeddingFormat::FinalFusion,\n\n false,\n\n )?;\n\n\n\n print_loss(embeddings.storage(), quantized_embeddings.storage());\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/quantize.rs", "rank": 91, "score": 1.8438470259624817 }, { "content": " section,\n\n counts.n_correct,\n\n counts.n_instances,\n\n (counts.n_correct as f64 / counts.n_instances as f64) * 100.,\n\n (counts.sum_cos / counts.n_instances as f32),\n\n counts.n_skipped,\n\n );\n\n }\n\n}\n\n\n\nimpl<'a> Drop for Eval<'a> {\n\n fn drop(&mut self) {\n\n let section_counts = self.section_counts.lock().unwrap();\n\n\n\n // Print out counts for all sections.\n\n for (section, counts) in section_counts.iter() {\n\n self.print_section_accuracy(section, counts);\n\n }\n\n\n\n let n_correct = section_counts.values().map(|c| c.n_correct).sum::<usize>();\n", "file_path": "src/compute_accuracy.rs", "rank": 92, "score": 1.828481311005997 }, { "content": " let (metadata, vocab, quantized_storage, norms) = embeddings.into_parts();\n\n\n\n let mut array: Array2<f32> = quantized_storage.reconstruct().into();\n\n\n\n let norms = match norms {\n\n Some(norms) => norms,\n\n None => NdNorms::new(l2_normalize_array(\n\n array.view_mut().slice_mut(s![0..vocab.words_len(), ..]),\n\n )),\n\n };\n\n\n\n let embeddings = Embeddings::new(metadata, vocab, NdArray::from(array).into(), norms);\n\n\n\n write_embeddings(\n\n &embeddings,\n\n &self.output_filename,\n\n EmbeddingFormat::FinalFusion,\n\n false,\n\n )\n\n }\n\n}\n", "file_path": "src/reconstruct.rs", "rank": 93, "score": 1.6377517569291262 } ]
Rust
src/agent/coverage/src/cobertura.rs
tonybaloney/onefuzz
e0f2e9ed5aae006e0054387de7a0ff8c83c8f722
use crate::source::SourceCoverage; use crate::source::SourceCoverageLocation; use crate::source::SourceFileCoverage; use anyhow::Context; use anyhow::Error; use anyhow::Result; use std::time::{SystemTime, UNIX_EPOCH}; use xml::writer::{EmitterConfig, XmlEvent}; pub fn cobertura(source_coverage: SourceCoverage) -> Result<String, Error> { let mut backing: Vec<u8> = Vec::new(); let mut emitter = EmitterConfig::new() .perform_indent(true) .create_writer(&mut backing); let unixtime = SystemTime::now() .duration_since(UNIX_EPOCH) .context("system time before unix epoch")? .as_secs(); emitter.write( XmlEvent::start_element("coverage") .attr("line-rate", "0") .attr("branch-rate", "0") .attr("lines-covered", "0") .attr("lines-valid", "0") .attr("branches-covered", "0") .attr("branches-valid", "0") .attr("complexity", "0") .attr("version", "0.1") .attr("timestamp", &format!("{}", unixtime)), )?; emitter.write(XmlEvent::start_element("packages"))?; emitter.write( XmlEvent::start_element("package") .attr("name", "0") .attr("line-rate", "0") .attr("branch-rate", "0") .attr("complexity", "0"), )?; emitter.write(XmlEvent::start_element("classes"))?; let files: Vec<SourceFileCoverage> = source_coverage.files; for file in files { emitter.write( XmlEvent::start_element("class") .attr("name", "0") .attr("filename", &file.file) .attr("line-rate", "0") .attr("branch-rate", "0") .attr("complexity", "0"), )?; let locations: Vec<SourceCoverageLocation> = file.locations; emitter.write(XmlEvent::start_element("lines"))?; for location in locations { emitter.write( XmlEvent::start_element("line") .attr("number", &location.line.to_string()) .attr("hits", &location.count.to_string()) .attr("branch", "false"), )?; emitter.write(XmlEvent::end_element())?; } emitter.write(XmlEvent::end_element())?; emitter.write(XmlEvent::end_element())?; } emitter.write(XmlEvent::end_element())?; emitter.write(XmlEvent::end_element())?; emitter.write(XmlEvent::end_element())?; emitter.write(XmlEvent::end_element())?; Ok(String::from_utf8(backing)?) } #[cfg(test)] mod tests { use super::*; use anyhow::Result; #[test] fn test_source_to_cobertura() -> Result<()> { let mut coverage_locations_vec1: Vec<SourceCoverageLocation> = Vec::new(); coverage_locations_vec1.push(SourceCoverageLocation { line: 5, column: None, count: 3, }); coverage_locations_vec1.push(SourceCoverageLocation { line: 10, column: None, count: 0, }); let mut coverage_locations_vec2: Vec<SourceCoverageLocation> = Vec::new(); coverage_locations_vec2.push(SourceCoverageLocation { line: 0, column: None, count: 0, }); let mut file_coverage_vec1: Vec<SourceFileCoverage> = Vec::new(); file_coverage_vec1.push(SourceFileCoverage { locations: coverage_locations_vec1, file: "C:/Users/file1.txt".to_string(), }); file_coverage_vec1.push(SourceFileCoverage { locations: coverage_locations_vec2, file: "C:/Users/file2.txt".to_string(), }); let source_coverage_result = cobertura(SourceCoverage { files: file_coverage_vec1, }); let mut backing_test: Vec<u8> = Vec::new(); let mut _emitter_test = EmitterConfig::new() .perform_indent(true) .create_writer(&mut backing_test); let unixtime = SystemTime::now() .duration_since(UNIX_EPOCH) .context("system time before unix epoch")? .as_secs(); _emitter_test.write( XmlEvent::start_element("coverage") .attr("line-rate", "0") .attr("branch-rate", "0") .attr("lines-covered", "0") .attr("lines-valid", "0") .attr("branches-covered", "0") .attr("branches-valid", "0") .attr("complexity", "0") .attr("version", "0.1") .attr("timestamp", &format!("{}", unixtime)), )?; _emitter_test.write(XmlEvent::start_element("packages"))?; _emitter_test.write( XmlEvent::start_element("package") .attr("name", "0") .attr("line-rate", "0") .attr("branch-rate", "0") .attr("complexity", "0"), )?; _emitter_test.write(XmlEvent::start_element("classes"))?; _emitter_test.write( XmlEvent::start_element("class") .attr("name", "0") .attr("filename", "C:/Users/file1.txt") .attr("line-rate", "0") .attr("branch-rate", "0") .attr("complexity", "0"), )?; _emitter_test.write(XmlEvent::start_element("lines"))?; _emitter_test.write( XmlEvent::start_element("line") .attr("number", "5") .attr("hits", "3") .attr("branch", "false"), )?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write( XmlEvent::start_element("line") .attr("number", "10") .attr("hits", "0") .attr("branch", "false"), )?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write( XmlEvent::start_element("class") .attr("name", "0") .attr("filename", "C:/Users/file2.txt") .attr("line-rate", "0") .attr("branch-rate", "0") .attr("complexity", "0"), )?; _emitter_test.write(XmlEvent::start_element("lines"))?; _emitter_test.write( XmlEvent::start_element("line") .attr("number", "0") .attr("hits", "0") .attr("branch", "false"), )?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; assert_eq!(source_coverage_result?, String::from_utf8(backing_test)?); Ok(()) } }
use crate::source::SourceCoverage; use crate::source::SourceCoverageLocation; use crate::source::SourceFileCoverage; use anyhow::Context; use anyhow::Error; use anyhow::Result; use std::time::{SystemTime, UNIX_EPOCH}; use xml::writer::{EmitterConfig, XmlEvent};
#[cfg(test)] mod tests { use super::*; use anyhow::Result; #[test] fn test_source_to_cobertura() -> Result<()> { let mut coverage_locations_vec1: Vec<SourceCoverageLocation> = Vec::new(); coverage_locations_vec1.push(SourceCoverageLocation { line: 5, column: None, count: 3, }); coverage_locations_vec1.push(SourceCoverageLocation { line: 10, column: None, count: 0, }); let mut coverage_locations_vec2: Vec<SourceCoverageLocation> = Vec::new(); coverage_locations_vec2.push(SourceCoverageLocation { line: 0, column: None, count: 0, }); let mut file_coverage_vec1: Vec<SourceFileCoverage> = Vec::new(); file_coverage_vec1.push(SourceFileCoverage { locations: coverage_locations_vec1, file: "C:/Users/file1.txt".to_string(), }); file_coverage_vec1.push(SourceFileCoverage { locations: coverage_locations_vec2, file: "C:/Users/file2.txt".to_string(), }); let source_coverage_result = cobertura(SourceCoverage { files: file_coverage_vec1, }); let mut backing_test: Vec<u8> = Vec::new(); let mut _emitter_test = EmitterConfig::new() .perform_indent(true) .create_writer(&mut backing_test); let unixtime = SystemTime::now() .duration_since(UNIX_EPOCH) .context("system time before unix epoch")? .as_secs(); _emitter_test.write( XmlEvent::start_element("coverage") .attr("line-rate", "0") .attr("branch-rate", "0") .attr("lines-covered", "0") .attr("lines-valid", "0") .attr("branches-covered", "0") .attr("branches-valid", "0") .attr("complexity", "0") .attr("version", "0.1") .attr("timestamp", &format!("{}", unixtime)), )?; _emitter_test.write(XmlEvent::start_element("packages"))?; _emitter_test.write( XmlEvent::start_element("package") .attr("name", "0") .attr("line-rate", "0") .attr("branch-rate", "0") .attr("complexity", "0"), )?; _emitter_test.write(XmlEvent::start_element("classes"))?; _emitter_test.write( XmlEvent::start_element("class") .attr("name", "0") .attr("filename", "C:/Users/file1.txt") .attr("line-rate", "0") .attr("branch-rate", "0") .attr("complexity", "0"), )?; _emitter_test.write(XmlEvent::start_element("lines"))?; _emitter_test.write( XmlEvent::start_element("line") .attr("number", "5") .attr("hits", "3") .attr("branch", "false"), )?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write( XmlEvent::start_element("line") .attr("number", "10") .attr("hits", "0") .attr("branch", "false"), )?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write( XmlEvent::start_element("class") .attr("name", "0") .attr("filename", "C:/Users/file2.txt") .attr("line-rate", "0") .attr("branch-rate", "0") .attr("complexity", "0"), )?; _emitter_test.write(XmlEvent::start_element("lines"))?; _emitter_test.write( XmlEvent::start_element("line") .attr("number", "0") .attr("hits", "0") .attr("branch", "false"), )?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; _emitter_test.write(XmlEvent::end_element())?; assert_eq!(source_coverage_result?, String::from_utf8(backing_test)?); Ok(()) } }
pub fn cobertura(source_coverage: SourceCoverage) -> Result<String, Error> { let mut backing: Vec<u8> = Vec::new(); let mut emitter = EmitterConfig::new() .perform_indent(true) .create_writer(&mut backing); let unixtime = SystemTime::now() .duration_since(UNIX_EPOCH) .context("system time before unix epoch")? .as_secs(); emitter.write( XmlEvent::start_element("coverage") .attr("line-rate", "0") .attr("branch-rate", "0") .attr("lines-covered", "0") .attr("lines-valid", "0") .attr("branches-covered", "0") .attr("branches-valid", "0") .attr("complexity", "0") .attr("version", "0.1") .attr("timestamp", &format!("{}", unixtime)), )?; emitter.write(XmlEvent::start_element("packages"))?; emitter.write( XmlEvent::start_element("package") .attr("name", "0") .attr("line-rate", "0") .attr("branch-rate", "0") .attr("complexity", "0"), )?; emitter.write(XmlEvent::start_element("classes"))?; let files: Vec<SourceFileCoverage> = source_coverage.files; for file in files { emitter.write( XmlEvent::start_element("class") .attr("name", "0") .attr("filename", &file.file) .attr("line-rate", "0") .attr("branch-rate", "0") .attr("complexity", "0"), )?; let locations: Vec<SourceCoverageLocation> = file.locations; emitter.write(XmlEvent::start_element("lines"))?; for location in locations { emitter.write( XmlEvent::start_element("line") .attr("number", &location.line.to_string()) .attr("hits", &location.count.to_string()) .attr("branch", "false"), )?; emitter.write(XmlEvent::end_element())?; } emitter.write(XmlEvent::end_element())?; emitter.write(XmlEvent::end_element())?; } emitter.write(XmlEvent::end_element())?; emitter.write(XmlEvent::end_element())?; emitter.write(XmlEvent::end_element())?; emitter.write(XmlEvent::end_element())?; Ok(String::from_utf8(backing)?) }
function_block-full_function
[ { "content": " def is_used(self) -> bool:\n\n if len(self.get_forwards()) == 0:\n\n logging.info(PROXY_LOG_PREFIX + \"no forwards: %s\", self.region)\n\n return False\n", "file_path": "src/api-service/__app__/onefuzzlib/proxy.py", "rank": 0, "score": 51893.43127703405 }, { "content": "def target_uses_input(config: TaskConfig) -> bool:\n\n if config.task.target_options is not None:\n\n for option in config.task.target_options:\n\n if \"{input}\" in option:\n\n return True\n\n if config.task.target_env is not None:\n\n for value in config.task.target_env.values():\n\n if \"{input}\" in value:\n\n return True\n\n\n", "file_path": "src/api-service/__app__/onefuzzlib/tasks/config.py", "rank": 1, "score": 49580.98023869291 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse onefuzz::blob::BlobContainerUrl;\n\nuse uuid::Uuid;\n\n\n\nuse crate::coordinator::double::*;\n\nuse crate::reboot::double::*;\n\nuse crate::scheduler::*;\n\nuse crate::setup::double::*;\n\nuse crate::work::double::*;\n\nuse crate::work::*;\n\nuse crate::worker::double::*;\n\nuse crate::worker::WorkerEvent;\n\nuse onefuzz::process::ExitStatus;\n\n\n\nuse super::*;\n\n\n", "file_path": "src/agent/onefuzz-supervisor/src/agent/tests.rs", "rank": 2, "score": 6.922147481435067 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::collections::BTreeMap;\n\nuse std::convert::TryInto;\n\nuse std::ffi::OsStr;\n\nuse std::process::Command;\n\nuse std::sync::mpsc;\n\nuse std::thread;\n\nuse std::time::{Duration, Instant};\n\n\n\nuse anyhow::{format_err, Context, Result};\n\nuse pete::{Ptracer, Restart, Signal, Stop, Tracee};\n\nuse procfs::process::{MMapPath, MemoryMap, Process};\n\n\n\nuse crate::block::CommandBlockCov;\n\nuse crate::cache::ModuleCache;\n\nuse crate::code::{CmdFilter, ModulePath};\n\nuse crate::demangle::Demangler;\n\nuse crate::region::Region;\n", "file_path": "src/agent/coverage/src/block/linux.rs", "rank": 3, "score": 6.877617839044287 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::collections::HashMap;\n\nuse std::path::{Path, PathBuf};\n\nuse std::process::{Command, Stdio};\n\nuse std::sync::{Arc, Mutex};\n\nuse std::time::Duration;\n\n\n\nuse anyhow::{Context, Result};\n\nuse async_trait::async_trait;\n\nuse coverage::block::CommandBlockCov;\n\nuse coverage::cache::ModuleCache;\n\nuse coverage::cobertura::cobertura;\n\nuse coverage::code::{CmdFilter, CmdFilterDef};\n\nuse coverage::debuginfo::DebugInfo;\n\nuse onefuzz::expand::{Expand, PlaceHolder};\n\nuse onefuzz::syncdir::SyncedDir;\n\nuse onefuzz_telemetry::{warn, Event::coverage_data, EventData};\n\nuse serde::de::DeserializeOwned;\n", "file_path": "src/agent/onefuzz-agent/src/tasks/coverage/generic.rs", "rank": 4, "score": 6.876725462480177 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::path::PathBuf;\n\n\n\nuse anyhow::Result;\n\nuse onefuzz::blob::BlobContainerUrl;\n\nuse onefuzz::process::ExitStatus;\n\nuse structopt::StructOpt;\n\nuse url::Url;\n\nuse uuid::Uuid;\n\n\n\nuse crate::coordinator::*;\n\nuse crate::work::*;\n\nuse crate::worker::*;\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(rename_all = \"snake_case\")]\n\npub enum DebugOpt {\n\n NodeEvent(NodeEventOpt),\n\n RunWorker(RunWorkerOpt),\n\n}\n\n\n", "file_path": "src/agent/onefuzz-supervisor/src/debug.rs", "rank": 6, "score": 6.764625484790714 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::path::{Path, PathBuf};\n\nuse std::process::Stdio;\n\n\n\nuse anyhow::{Context, Result};\n\nuse downcast_rs::Downcast;\n\nuse onefuzz::az_copy;\n\nuse onefuzz::process::Output;\n\nuse tokio::fs;\n\nuse tokio::process::Command;\n\n\n\nuse crate::work::*;\n\n\n\nconst SETUP_PATH_ENV: &str = \"ONEFUZZ_TARGET_SETUP_PATH\";\n\n\n\npub type SetupOutput = Option<Output>;\n\n\n\n#[async_trait]\n", "file_path": "src/agent/onefuzz-supervisor/src/setup.rs", "rank": 7, "score": 6.753829245269883 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::{Error, Result};\n\nuse tokio::time;\n\n\n\nuse crate::coordinator::*;\n\nuse crate::done::set_done_lock;\n\nuse crate::heartbeat::{AgentHeartbeatClient, HeartbeatSender};\n\nuse crate::reboot::*;\n\nuse crate::scheduler::*;\n\nuse crate::setup::*;\n\nuse crate::work::IWorkQueue;\n\nuse crate::worker::IWorkerRunner;\n\n\n\nconst PENDING_COMMANDS_DELAY: time::Duration = time::Duration::from_secs(10);\n\nconst BUSY_DELAY: time::Duration = time::Duration::from_secs(1);\n\n\n\npub struct Agent {\n\n coordinator: Box<dyn ICoordinator>,\n", "file_path": "src/agent/onefuzz-supervisor/src/agent.rs", "rank": 8, "score": 6.735242850786164 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::fs;\n\nuse std::io;\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse anyhow::Result;\n\nuse symbolic::{\n\n debuginfo::Object,\n\n symcache::{SymCache, SymCacheWriter},\n\n};\n\n\n\n#[cfg(windows)]\n\nuse goblin::pe::PE;\n\n\n\n#[cfg(windows)]\n\nuse symbolic::debuginfo::pe;\n\n\n", "file_path": "src/agent/coverage/src/debuginfo.rs", "rank": 9, "score": 6.733065343873312 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::fmt;\n\n\n\nuse anyhow::Result;\n\nuse onefuzz::process::Output;\n\n\n\nuse crate::commands::add_ssh_key;\n\nuse crate::coordinator::{NodeCommand, NodeState};\n\nuse crate::reboot::RebootContext;\n\nuse crate::setup::ISetupRunner;\n\nuse crate::work::*;\n\nuse crate::worker::*;\n\n\n\npub enum Scheduler {\n\n Free(State<Free>),\n\n SettingUp(State<SettingUp>),\n\n PendingReboot(State<PendingReboot>),\n\n Ready(State<Ready>),\n", "file_path": "src/agent/onefuzz-supervisor/src/scheduler.rs", "rank": 10, "score": 6.72778191794322 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::proxy;\n\nuse anyhow::Result;\n\nuse onefuzz_telemetry::{\n\n set_appinsights_clients, EventData, InstanceTelemetryKey, MicrosoftTelemetryKey, Role,\n\n};\n\nuse reqwest_retry::SendRetry;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{fs::File, io::BufReader, path::PathBuf};\n\nuse storage_queue::QueueClient;\n\nuse thiserror::Error;\n\nuse url::Url;\n\nuse uuid::Uuid;\n\n\n\n#[derive(Error, Debug)]\n\npub enum ProxyError {\n\n #[error(\"missing argument {0}\")]\n\n MissingArg(String),\n", "file_path": "src/proxy-manager/src/config.rs", "rank": 11, "score": 6.72356114354415 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::cmp::Ordering;\n\nuse std::error::Error;\n\nuse std::fmt;\n\n\n\nuse log::*;\n\n\n\nuse nom::bytes::complete::{tag, take_till1, take_while};\n\nuse nom::character::complete::line_ending;\n\nuse nom::combinator::{eof, map_res, opt};\n\nuse nom::multi::many0;\n\nuse nom::IResult;\n\n\n\n/// A module name and an offset\n\n#[derive(Clone, Eq, PartialEq, Hash)]\n\npub struct ModOff {\n\n pub module: String,\n\n pub offset: usize,\n", "file_path": "src/agent/srcview/src/modoff.rs", "rank": 12, "score": 6.721301448400321 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\n\n\nuse anyhow::{Context, Result};\n\nuse downcast_rs::Downcast;\n\nuse tokio::fs;\n\n\n\nuse crate::work::*;\n\n\n\n#[async_trait]\n", "file_path": "src/agent/onefuzz-supervisor/src/reboot.rs", "rank": 13, "score": 6.719718483782256 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::borrow::Borrow;\n\nuse std::ffi::OsStr;\n\nuse std::fmt;\n\nuse std::ops::Range;\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse anyhow::{bail, Result};\n\nuse regex::RegexSet;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::filter::Filter;\n\nuse crate::region::{Region, RegionIndex};\n\n\n\n/// `PathBuf` that is guaranteed to be canonicalized and have a file name.\n\n#[derive(Clone, Debug, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize)]\n\n#[serde(transparent)]\n\npub struct ModulePath {\n", "file_path": "src/agent/coverage/src/code.rs", "rank": 14, "score": 6.717735472454255 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::collections::{BTreeMap, BTreeSet};\n\nuse std::fmt;\n\nuse std::path::{Path, PathBuf};\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\n\n\nuse anyhow::{format_err, Context, Result};\n\nuse log::warn;\n\nuse regex::Regex;\n\nuse xml::writer::{EmitterConfig, XmlEvent};\n\n\n\nuse crate::{SrcLine, SrcView};\n\n\n\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\n", "file_path": "src/agent/srcview/src/report.rs", "rank": 15, "score": 6.714833451340103 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::path::{Path, PathBuf};\n\nuse std::time::{Duration, Instant};\n\nuse std::{process::Command, process::Stdio};\n\n\n\nuse anyhow::Result;\n\nuse coverage::block::CommandBlockCov as Coverage;\n\nuse coverage::cache::ModuleCache;\n\nuse coverage::code::CmdFilter;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(Debug, PartialEq, StructOpt)]\n", "file_path": "src/agent/coverage/examples/src-cov.rs", "rank": 16, "score": 6.71120944895079 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::{Context, Error, Result};\n\nuse downcast_rs::Downcast;\n\nuse onefuzz::{auth::AccessToken, http::ResponseExt, process::Output};\n\nuse reqwest::{Client, RequestBuilder, Response, StatusCode};\n\nuse reqwest_retry::{\n\n is_auth_failure, RetryCheck, SendRetry, DEFAULT_RETRY_PERIOD, MAX_RETRY_ATTEMPTS,\n\n};\n\nuse serde::Serialize;\n\nuse uuid::Uuid;\n\n\n\nuse crate::commands::SshKeyInfo;\n\nuse crate::config::Registration;\n\nuse crate::work::{TaskId, WorkSet};\n\nuse crate::worker::WorkerEvent;\n\n\n\n#[derive(Debug, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct StopTask {\n", "file_path": "src/agent/onefuzz-supervisor/src/coordinator.rs", "rank": 17, "score": 6.704263523411586 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::Result;\n\nuse async_trait::async_trait;\n\nuse reqwest::Url;\n\nuse std::path::Path;\n\n\n\nuse super::*;\n\n\n", "file_path": "src/agent/onefuzz-agent/src/tasks/generic/input_poller/tests.rs", "rank": 18, "score": 6.691707234757598 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse anyhow::Result;\n\nuse async_trait::async_trait;\n\nuse reqwest::Url;\n\nuse storage_queue::Message;\n\nuse storage_queue::QueueClient;\n\n\n\n#[async_trait]\n", "file_path": "src/agent/onefuzz-agent/src/tasks/generic/input_poller/callback.rs", "rank": 19, "score": 6.690723832370131 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::path::{Path, PathBuf};\n\nuse std::time::{Duration, Instant};\n\nuse std::{process::Command, process::Stdio};\n\n\n\nuse anyhow::Result;\n\nuse coverage::block::CommandBlockCov as Coverage;\n\nuse coverage::cache::ModuleCache;\n\nuse coverage::code::{CmdFilter, CmdFilterDef};\n\nuse structopt::StructOpt;\n\n\n\n#[derive(Debug, PartialEq, StructOpt)]\n", "file_path": "src/agent/coverage/examples/coverage.rs", "rank": 20, "score": 6.68949498577143 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::collections::BTreeMap;\n\nuse std::process::Command;\n\nuse std::time::{Duration, Instant};\n\n\n\nuse anyhow::{Context, Result};\n\nuse debugger::{BreakpointId, BreakpointType, DebugEventHandler, Debugger, ModuleLoadInfo};\n\n\n\nuse crate::block::CommandBlockCov;\n\nuse crate::cache::ModuleCache;\n\nuse crate::code::{CmdFilter, ModulePath};\n\n\n", "file_path": "src/agent/coverage/src/block/windows.rs", "rank": 21, "score": 6.68949498577143 }, { "content": "use crate::tasks::config::CommonConfig;\n\nuse crate::tasks::utils::parse_key_value;\n\nuse anyhow::Result;\n\nuse backoff::{future::retry, Error as BackoffError, ExponentialBackoff};\n\nuse clap::{App, Arg, ArgMatches};\n\nuse flume::Sender;\n\nuse onefuzz::{blob::url::BlobContainerUrl, monitor::DirectoryMonitor, syncdir::SyncedDir};\n\nuse path_absolutize::Absolutize;\n\nuse reqwest::Url;\n\nuse std::{\n\n collections::HashMap,\n\n env::current_dir,\n\n path::{Path, PathBuf},\n\n time::Duration,\n\n};\n\nuse uuid::Uuid;\n\n\n\npub const SETUP_DIR: &str = \"setup_dir\";\n\npub const INPUTS_DIR: &str = \"inputs_dir\";\n\npub const CRASHES_DIR: &str = \"crashes_dir\";\n", "file_path": "src/agent/onefuzz-agent/src/local/common.rs", "rank": 22, "score": 6.688489901521454 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::io::ErrorKind;\n\nuse std::path::PathBuf;\n\n\n\nuse anyhow::{Context, Result};\n\nuse downcast_rs::Downcast;\n\nuse onefuzz::{auth::Secret, blob::BlobContainerUrl, http::is_auth_error};\n\nuse storage_queue::{Message as QueueMessage, QueueClient};\n\nuse tokio::fs;\n\nuse uuid::Uuid;\n\n\n\nuse crate::config::Registration;\n\n\n\npub type JobId = Uuid;\n\n\n\npub type TaskId = Uuid;\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n", "file_path": "src/agent/onefuzz-supervisor/src/work.rs", "rank": 23, "score": 6.676285386281723 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::env;\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::process::Command;\n\n\n", "file_path": "src/proxy-manager/build.rs", "rank": 24, "score": 6.668684294561511 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse anyhow::{Context, Result};\n\nuse futures::stream::TryStreamExt;\n\nuse reqwest::{Body, RequestBuilder, Response, Url};\n\nuse reqwest_retry::SendRetry;\n\nuse serde::Serialize;\n\nuse tokio::{fs, io};\n\nuse tokio_util::codec;\n\n\n\n#[derive(Clone)]\n\npub struct BlobClient {\n\n client: reqwest::Client,\n\n}\n\n\n\nimpl Default for BlobClient {\n\n fn default() -> Self {\n", "file_path": "src/agent/onefuzz/src/blob/client.rs", "rank": 25, "score": 6.6679205859337385 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::{Context, Result};\n\nuse futures::stream::StreamExt;\n\nuse std::{\n\n ffi::OsStr,\n\n path::{Path, PathBuf},\n\n};\n\nuse tokio_stream::wrappers::ReadDirStream;\n\n\n\nuse std::process::Stdio;\n\nuse tokio::fs;\n\nuse tokio::process::Command;\n\n\n\nconst ONEFUZZ_ROOT_ENV: &str = \"ONEFUZZ_ROOT\";\n\n\n", "file_path": "src/agent/onefuzz/src/fs.rs", "rank": 26, "score": 6.665372603738918 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#[macro_use]\n\nextern crate onefuzz_telemetry;\n\n#[macro_use]\n\nextern crate anyhow;\n\n#[macro_use]\n\nextern crate clap;\n\n\n\nmod config;\n\nmod proxy;\n\n\n\nuse anyhow::Result;\n\nuse clap::{App, Arg, SubCommand};\n\nuse config::{Config, ProxyError::MissingArg};\n\nuse std::{\n\n io::{stdout, Write},\n\n time::Instant,\n\n};\n", "file_path": "src/proxy-manager/src/main.rs", "rank": 27, "score": 6.657196592524523 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::tasks::{config::CommonConfig, heartbeat::HeartbeatSender, utils};\n\nuse anyhow::{Context, Result};\n\nuse onefuzz::{\n\n expand::Expand, fs::set_executable, http::ResponseExt, jitter::delay_with_jitter,\n\n syncdir::SyncedDir,\n\n};\n\nuse reqwest::Url;\n\nuse reqwest_retry::SendRetry;\n\nuse serde::Deserialize;\n\nuse std::{\n\n collections::HashMap,\n\n path::{Path, PathBuf},\n\n process::Stdio,\n\n sync::Arc,\n\n};\n\nuse storage_queue::{QueueClient, EMPTY_QUEUE_DELAY};\n\nuse tokio::process::Command;\n", "file_path": "src/agent/onefuzz-agent/src/tasks/merge/generic.rs", "rank": 28, "score": 6.657176849327482 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::tasks::{\n\n config::CommonConfig,\n\n report::{crash_report::CrashTestResult, libfuzzer_report},\n\n utils::default_bool_true,\n\n};\n\n\n\nuse anyhow::{Context, Result};\n\nuse reqwest::Url;\n\n\n\nuse super::common::{self, RegressionHandler};\n\nuse async_trait::async_trait;\n\nuse onefuzz::syncdir::SyncedDir;\n\nuse serde::Deserialize;\n\nuse std::{collections::HashMap, path::PathBuf};\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Config {\n", "file_path": "src/agent/onefuzz-agent/src/tasks/regression/libfuzzer.rs", "rank": 29, "score": 6.646484898632489 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::blob::url::redact_query_sas_sig;\n\nuse anyhow::{Context, Result};\n\nuse backoff::{self, future::retry_notify, ExponentialBackoff};\n\nuse std::{\n\n ffi::{OsStr, OsString},\n\n fmt,\n\n path::Path,\n\n process::Stdio,\n\n sync::atomic::{AtomicUsize, Ordering},\n\n time::Duration,\n\n};\n\nuse tempfile::tempdir;\n\nuse tokio::fs;\n\nuse tokio::process::Command;\n\nuse url::Url;\n\n\n\nconst RETRY_INTERVAL: Duration = Duration::from_secs(5);\n", "file_path": "src/agent/onefuzz/src/az_copy.rs", "rank": 30, "score": 6.646484898632489 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::process::Command;\n\nuse std::{env, process::Stdio};\n\n\n", "file_path": "src/agent/onefuzz-supervisor/build.rs", "rank": 31, "score": 6.645819232868792 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::process::Command;\n\nuse std::{env, process::Stdio};\n\n\n", "file_path": "src/agent/onefuzz-agent/build.rs", "rank": 32, "score": 6.645819232868792 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::{\n\n fmt::{self, Display, Formatter},\n\n hash::{Hash, Hasher},\n\n};\n\n\n\nuse anyhow::Result;\n\nuse fnv::FnvHasher;\n\nuse log::trace;\n\nuse serde::{Serialize, Serializer};\n\nuse win_util::memory;\n\nuse winapi::{shared::minwindef::DWORD, um::winnt::HANDLE};\n\n\n\nuse crate::dbghelp::{self, DebugHelpGuard, ModuleInfo, SymInfo, SymLineInfo};\n\n\n\nconst UNKNOWN_MODULE: &str = \"<UnknownModule>\";\n\n\n\n/// The file and line number for frames in the call stack.\n", "file_path": "src/agent/debugger/src/stack.rs", "rank": 33, "score": 6.6393702635668355 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#![allow(clippy::trivially_copy_pass_by_ref)]\n\nuse anyhow::Result;\n\nuse pete::{\n\n Pid, Ptracer, Restart, Siginfo,\n\n Signal::{self, *},\n\n Stop, Tracee,\n\n};\n\nuse proc_maps::MapRange;\n\nuse serde::Serialize;\n\n\n\nuse std::collections::BTreeMap;\n\nuse std::fmt;\n\nuse std::process::Command;\n\n\n\npub struct TriageCommand {\n\n tracer: Ptracer,\n\n tracee: Tracee,\n", "file_path": "src/agent/onefuzz/src/triage.rs", "rank": 34, "score": 6.63278846588086 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#![allow(clippy::manual_swap)]\n\n\n\nuse std::{fs::File, path::Path};\n\n\n\nuse anyhow::{bail, Context, Result};\n\nuse fixedbitset::FixedBitSet;\n\nuse goblin::pe::PE;\n\nuse memmap2::Mmap;\n\nuse pdb::{\n\n AddressMap, FallibleIterator, PdbInternalSectionOffset, ProcedureSymbol, TypeIndex, PDB,\n\n};\n\nuse winapi::um::winnt::{HANDLE, IMAGE_FILE_MACHINE_AMD64, IMAGE_FILE_MACHINE_I386};\n\n\n\nuse crate::intel;\n\n\n", "file_path": "src/agent/coverage/src/pe.rs", "rank": 35, "score": 6.632270843733737 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::tasks::{\n\n config::CommonConfig,\n\n report::{crash_report::CrashTestResult, generic},\n\n utils::default_bool_true,\n\n};\n\nuse anyhow::Result;\n\nuse async_trait::async_trait;\n\nuse onefuzz::syncdir::SyncedDir;\n\nuse reqwest::Url;\n\nuse serde::Deserialize;\n\nuse std::{collections::HashMap, path::PathBuf};\n\n\n\nuse super::common::{self, RegressionHandler};\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Config {\n\n pub target_exe: PathBuf,\n", "file_path": "src/agent/onefuzz-agent/src/tasks/regression/generic.rs", "rank": 36, "score": 6.625186590376789 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::path::Path;\n\n\n\nuse anyhow::{Context, Result};\n\nuse futures::stream::TryStreamExt;\n\nuse reqwest::{Body, Client, Response, StatusCode, Url};\n\nuse reqwest_retry::{\n\n send_retry_reqwest_default, RetryCheck, SendRetry, DEFAULT_RETRY_PERIOD, MAX_RETRY_ATTEMPTS,\n\n};\n\nuse serde::Serialize;\n\nuse tokio::{fs, io};\n\nuse tokio_util::codec;\n\n\n\n#[derive(Clone)]\n\npub struct BlobUploader {\n\n client: Client,\n\n url: Url,\n\n}\n", "file_path": "src/agent/onefuzz/src/uploader.rs", "rank": 37, "score": 6.625186590376789 }, { "content": "use std::path::PathBuf;\n\nuse std::process::{Command, Stdio};\n\n\n\nuse anyhow::{Context, Result};\n\nuse onefuzz::{\n\n machine_id::{get_machine_id, get_scaleset_name},\n\n process::ExitStatus,\n\n};\n\nuse onefuzz_telemetry::{self as telemetry, EventData, Role};\n\nuse std::io::{self, Write};\n\nuse structopt::StructOpt;\n\nuse uuid::Uuid;\n\n\n\npub mod agent;\n\npub mod buffer;\n\npub mod commands;\n\npub mod config;\n\npub mod coordinator;\n\npub mod debug;\n\npub mod done;\n", "file_path": "src/agent/onefuzz-supervisor/src/main.rs", "rank": 38, "score": 6.6181174549476784 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::{\n\n az_copy,\n\n blob::{BlobClient, BlobContainerUrl},\n\n fs::{exists, sync, SyncPath},\n\n jitter::delay_with_jitter,\n\n monitor::DirectoryMonitor,\n\n uploader::BlobUploader,\n\n};\n\nuse anyhow::{Context, Result};\n\nuse dunce::canonicalize;\n\nuse onefuzz_telemetry::{Event, EventData};\n\nuse reqwest::{StatusCode, Url};\n\nuse reqwest_retry::{RetryCheck, SendRetry, DEFAULT_RETRY_PERIOD, MAX_RETRY_ATTEMPTS};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{env::current_dir, path::PathBuf, str, time::Duration};\n\nuse tokio::fs;\n\n\n", "file_path": "src/agent/onefuzz/src/syncdir.rs", "rank": 39, "score": 6.6130128195956015 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse super::crash_report::*;\n\nuse crate::tasks::{\n\n config::CommonConfig,\n\n generic::input_poller::*,\n\n heartbeat::{HeartbeatSender, TaskHeartbeatClient},\n\n utils::default_bool_true,\n\n};\n\nuse anyhow::{Context, Result};\n\nuse async_trait::async_trait;\n\nuse onefuzz::{blob::BlobUrl, libfuzzer::LibFuzzer, sha256, syncdir::SyncedDir};\n\nuse reqwest::Url;\n\nuse serde::Deserialize;\n\nuse std::{\n\n collections::HashMap,\n\n path::{Path, PathBuf},\n\n sync::Arc,\n\n};\n", "file_path": "src/agent/onefuzz-agent/src/tasks/report/libfuzzer_report.rs", "rank": 40, "score": 6.61106338910508 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::{Context, Result};\n\nuse onefuzz::{\n\n auth::{ClientCredentials, Credentials, ManagedIdentityCredentials},\n\n http::{is_auth_error_code, ResponseExt},\n\n jitter::delay_with_jitter,\n\n};\n\nuse onefuzz_telemetry::{InstanceTelemetryKey, MicrosoftTelemetryKey};\n\nuse reqwest_retry::SendRetry;\n\nuse std::{\n\n path::{Path, PathBuf},\n\n time::{Duration, Instant},\n\n};\n\nuse tokio::fs;\n\nuse url::Url;\n\nuse uuid::Uuid;\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq)]\n", "file_path": "src/agent/onefuzz-supervisor/src/config.rs", "rank": 41, "score": 6.61106338910508 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::{fmt, path::PathBuf};\n\n\n\nuse anyhow::{Context, Result};\n\nuse onefuzz::{blob::BlobUrl, jitter::delay_with_jitter, syncdir::SyncedDir};\n\nuse reqwest::Url;\n\nuse tempfile::{tempdir, TempDir};\n\nuse tokio::{fs, time::Duration};\n\n\n\nmod callback;\n\npub use callback::*;\n\n\n\nconst POLL_INTERVAL: Duration = Duration::from_secs(10);\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n\n#[derive(Debug)]\n", "file_path": "src/agent/onefuzz-agent/src/tasks/generic/input_poller.rs", "rank": 42, "score": 6.600521357851723 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::local::common::UiEvent;\n\nuse anyhow::Result;\n\nuse crossterm::{\n\n event::{self, Event, KeyCode},\n\n execute,\n\n terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},\n\n};\n\nuse futures::{StreamExt, TryStreamExt};\n\nuse log::Level;\n\nuse onefuzz::utils::try_wait_all_join_handles;\n\nuse onefuzz_telemetry::{self, EventData};\n\nuse std::{\n\n collections::HashMap,\n\n io::{self, Stdout},\n\n iter::once,\n\n mem::{discriminant, Discriminant},\n\n path::PathBuf,\n", "file_path": "src/agent/onefuzz-agent/src/local/tui.rs", "rank": 43, "score": 6.589991128764781 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse super::crash_report::{CrashReport, CrashTestResult, InputBlob, NoCrash};\n\nuse crate::tasks::{\n\n config::CommonConfig,\n\n generic::input_poller::{CallbackImpl, InputPoller, Processor},\n\n heartbeat::{HeartbeatSender, TaskHeartbeatClient},\n\n utils::default_bool_true,\n\n};\n\nuse anyhow::{Context, Result};\n\nuse async_trait::async_trait;\n\nuse onefuzz::{blob::BlobUrl, input_tester::Tester, sha256, syncdir::SyncedDir};\n\nuse reqwest::Url;\n\nuse serde::Deserialize;\n\nuse std::{\n\n collections::HashMap,\n\n path::{Path, PathBuf},\n\n};\n\nuse storage_queue::{Message, QueueClient};\n", "file_path": "src/agent/onefuzz-agent/src/tasks/report/generic.rs", "rank": 44, "score": 6.581824150792359 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::tasks::{config::CommonConfig, heartbeat::HeartbeatSender, utils::default_bool_true};\n\nuse anyhow::{Context, Result};\n\nuse arraydeque::{ArrayDeque, Wrapping};\n\nuse futures::future::try_join_all;\n\nuse onefuzz::{\n\n fs::list_files,\n\n libfuzzer::{LibFuzzer, LibFuzzerLine},\n\n process::ExitStatus,\n\n syncdir::{continuous_sync, SyncOperation::Pull, SyncedDir},\n\n};\n\nuse onefuzz_telemetry::{\n\n Event::{new_coverage, new_result, runtime_stats},\n\n EventData,\n\n};\n\nuse serde::Deserialize;\n\nuse std::{collections::HashMap, path::PathBuf, sync::Arc};\n\nuse tempfile::{tempdir_in, TempDir};\n", "file_path": "src/agent/onefuzz-agent/src/tasks/fuzz/libfuzzer_fuzz.rs", "rank": 45, "score": 6.581824150792359 }, { "content": "/// We use dbghlp Sym apis to walk a stack. dbghlp apis are documented as not being thread safe,\n\n/// so we provide a lock around our use of these apis.\n\n///\n\n/// Note that Rust itself also uses dbghlp to get a stack trace, e.g. when you panic and set\n\n/// RUST_BACKTRACE.\n\n///\n\n/// This function is based on the `backtrace` crate which is also used in Rust std. Here\n\n/// we use the same named local mutex to hopefully avoid any unsynchronized uses of dbghlp\n\n/// in std.\n\npub fn lock() -> Result<DebugHelpGuard> {\n\n use core::sync::atomic::{AtomicUsize, Ordering};\n\n\n\n static LOCK: AtomicUsize = AtomicUsize::new(0);\n\n let mut lock = LOCK.load(Ordering::SeqCst);\n\n if lock == 0 {\n\n lock = unsafe {\n\n CreateMutexA(\n\n std::ptr::null_mut(),\n\n 0,\n\n \"Local\\\\RustBacktraceMutex\\0\".as_ptr() as _,\n\n ) as usize\n\n };\n\n\n\n if lock == 0 {\n\n return Err(last_os_error());\n\n }\n\n\n\n // Handle the race between threads creating our mutex by closing ours if another\n\n // thread created the mutex first.\n", "file_path": "src/agent/debugger/src/dbghelp.rs", "rank": 46, "score": 6.5772049314145455 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::{\n\n env,\n\n path::{Path, PathBuf},\n\n process::Stdio,\n\n sync::Arc,\n\n};\n\n\n\nuse anyhow::{Context, Result};\n\nuse onefuzz::{fs::has_files, sha256::digest_file};\n\nuse tempfile::{tempdir, TempDir};\n\nuse tokio::{\n\n fs,\n\n process::{Child, Command},\n\n};\n\n\n\nuse crate::tasks::coverage::libfuzzer_coverage::Config;\n\n\n", "file_path": "src/agent/onefuzz-agent/src/tasks/coverage/recorder.rs", "rank": 47, "score": 6.5772049314145455 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::tasks::{\n\n config::CommonConfig,\n\n heartbeat::HeartbeatSender,\n\n utils::{self, default_bool_true},\n\n};\n\nuse anyhow::{Context, Result};\n\nuse onefuzz::{\n\n http::ResponseExt,\n\n jitter::delay_with_jitter,\n\n libfuzzer::{LibFuzzer, LibFuzzerMergeOutput},\n\n syncdir::{SyncOperation, SyncedDir},\n\n};\n\nuse reqwest::Url;\n\nuse reqwest_retry::SendRetry;\n\nuse serde::Deserialize;\n\nuse std::{\n\n collections::HashMap,\n", "file_path": "src/agent/onefuzz-agent/src/tasks/merge/libfuzzer_merge.rs", "rank": 48, "score": 6.576526304434908 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::path::Path;\n\n\n\nuse anyhow::{Context, Result};\n\nuse sha2::{Digest, Sha256};\n\nuse tokio::fs;\n\n\n", "file_path": "src/agent/onefuzz/src/sha256.rs", "rank": 49, "score": 6.56565159217397 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::fs::{onefuzz_etc, write_file};\n\nuse anyhow::{Context, Result};\n\nuse reqwest_retry::SendRetry;\n\n#[cfg(target_os = \"linux\")]\n\nuse std::path::Path;\n\n#[cfg(target_os = \"macos\")]\n\nuse std::process::Stdio;\n\nuse std::time::Duration;\n\nuse tokio::fs;\n\n#[cfg(target_os = \"macos\")]\n\nuse tokio::process::Command;\n\nuse uuid::Uuid;\n\n\n\n// https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-metadata-service#tracking-vm-running-on-azure\n\nconst IMS_ID_URL: &str =\n\n \"http://169.254.169.254/metadata/instance/compute/vmId?api-version=2020-06-01&format=text\";\n\n\n", "file_path": "src/agent/onefuzz/src/machine_id.rs", "rank": 50, "score": 6.563251761078369 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::onefuzz::heartbeat::HeartbeatClient;\n\nuse crate::onefuzz::machine_id::{get_machine_id, get_machine_name};\n\nuse anyhow::Result;\n\nuse reqwest::Url;\n\nuse serde::{self, Deserialize, Serialize};\n\nuse std::time::Duration;\n\nuse uuid::Uuid;\n\n\n\n#[derive(Debug, Deserialize, Serialize, Hash, Eq, PartialEq, Clone)]\n\n#[serde(tag = \"type\")]\n\npub enum HeartbeatData {\n\n TaskAlive,\n\n MachineAlive,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize, Clone)]\n", "file_path": "src/agent/onefuzz-agent/src/tasks/heartbeat.rs", "rank": 51, "score": 6.544802972611826 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::{\n\n env::{get_path_with_directory, LD_LIBRARY_PATH, PATH},\n\n expand::Expand,\n\n fs::{list_files, write_file},\n\n input_tester::{TestResult, Tester},\n\n};\n\nuse anyhow::{Context, Result};\n\nuse rand::seq::SliceRandom;\n\nuse rand::thread_rng;\n\nuse std::{\n\n collections::HashMap,\n\n ffi::OsString,\n\n path::{Path, PathBuf},\n\n process::Stdio,\n\n};\n\nuse tempfile::tempdir;\n\nuse tokio::process::{Child, Command};\n", "file_path": "src/agent/onefuzz/src/libfuzzer.rs", "rank": 52, "score": 6.544802972611826 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#![allow(clippy::upper_case_acronyms)]\n\n\n\nuse std::ffi::OsStr;\n\n\n\nuse anyhow::Result;\n\nuse log::error;\n\nuse winapi::{\n\n shared::minwindef::{DWORD, TRUE},\n\n um::werapi::{WerAddExcludedApplication, WerRemoveExcludedApplication},\n\n};\n\nuse winreg::{enums::HKEY_LOCAL_MACHINE, RegKey};\n\n\n\nuse crate::{check_hr, string};\n\n\n", "file_path": "src/agent/win-util/src/wer.rs", "rank": 53, "score": 6.5402164975066635 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#[cfg(feature = \"intel_instructions\")]\n\nuse iced_x86::{Code as IntelInstructionCode, Mnemonic as IntelInstructionMnemonic};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::fmt;\n\nuse std::sync::{LockResult, RwLockReadGuard, RwLockWriteGuard};\n\nuse uuid::Uuid;\n\n#[cfg(feature = \"z3\")]\n\nuse z3_sys::ErrorCode as Z3ErrorCode;\n\n\n\npub use appinsights::telemetry::SeverityLevel::{Critical, Error, Information, Verbose, Warning};\n\nuse tokio::sync::broadcast::{self, Receiver};\n\n#[macro_use]\n\nextern crate lazy_static;\n\n\n\n#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]\n\n#[serde(transparent)]\n\npub struct MicrosoftTelemetryKey(Uuid);\n", "file_path": "src/agent/onefuzz-telemetry/src/lib.rs", "rank": 54, "score": 6.532529607939354 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::fmt;\n\n\n\nuse crate::http::ResponseExt;\n\nuse anyhow::{Context, Result};\n\nuse reqwest_retry::SendRetry;\n\nuse url::Url;\n\nuse uuid::Uuid;\n\n\n\n#[derive(Clone, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct Secret<T>(T);\n\n\n\nimpl<T> Secret<T> {\n\n pub fn expose(self) -> T {\n\n self.0\n\n }\n\n\n\n pub fn expose_ref(&self) -> &T {\n", "file_path": "src/agent/onefuzz/src/auth.rs", "rank": 55, "score": 6.531034301834543 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::local::{\n\n common::add_common_config, generic_analysis, generic_crash_report, generic_generator,\n\n libfuzzer, libfuzzer_crash_report, libfuzzer_fuzz, libfuzzer_merge, libfuzzer_regression,\n\n libfuzzer_test_input, radamsa, test_input, tui::TerminalUi,\n\n};\n\n#[cfg(any(target_os = \"linux\", target_os = \"windows\"))]\n\nuse crate::local::{coverage, libfuzzer_coverage};\n\nuse anyhow::{Context, Result};\n\nuse clap::{App, Arg, SubCommand};\n\nuse crossterm::tty::IsTty;\n\nuse std::str::FromStr;\n\nuse std::time::Duration;\n\nuse strum::IntoEnumIterator;\n\nuse strum_macros::{EnumIter, EnumString, IntoStaticStr};\n\nuse tokio::{select, time::timeout};\n\n\n\n#[derive(Debug, PartialEq, EnumString, IntoStaticStr, EnumIter)]\n\n#[strum(serialize_all = \"kebab-case\")]\n", "file_path": "src/agent/onefuzz-agent/src/local/cmd.rs", "rank": 56, "score": 6.530994397821465 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#[macro_use]\n\nextern crate async_trait;\n\n#[macro_use]\n\nextern crate downcast_rs;\n\n#[macro_use]\n\nextern crate clap;\n\n#[macro_use]\n\nextern crate anyhow;\n\n#[macro_use]\n\nextern crate onefuzz_telemetry;\n\nextern crate onefuzz;\n\n\n\nuse crate::{\n\n config::StaticConfig, coordinator::StateUpdateEvent, heartbeat::init_agent_heartbeat,\n\n panic::set_panic_handler, work::WorkSet, worker::WorkerEvent,\n\n};\n\nuse std::fs::OpenOptions;\n", "file_path": "src/agent/onefuzz-supervisor/src/main.rs", "rank": 57, "score": 6.529055798951024 }, { "content": "use anyhow::{Context, Result};\n\nuse coverage::cache::ModuleCache;\n\nuse log::{error, info, trace, warn};\n\nuse num_cpus;\n\nuse rayon::{prelude::*, ThreadPoolBuilder};\n\nuse sha2::{Digest, Sha256};\n\n\n\nuse crate::{\n\n appverifier::{self, AppVerifierController, AppVerifierState},\n\n crash_detector::{self, DebuggerResult},\n\n logging,\n\n summary::Summary,\n\n test_result::{new_test_result, Exception, ExceptionCode, ExceptionDescription, TestResult},\n\n};\n\n\n\nmacro_rules! writecrlf {\n\n ($dst:expr) => (\n\n write!($dst, \"\\r\\n\")\n\n );\n\n ($dst:expr,) => (\n", "file_path": "src/agent/input-tester/src/tester.rs", "rank": 58, "score": 6.529055798951024 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::tasks::{\n\n config::CommonConfig, heartbeat::HeartbeatSender, report::crash_report::monitor_reports,\n\n};\n\nuse anyhow::{Context, Result};\n\nuse onefuzz::{az_copy, blob::url::BlobUrl};\n\nuse onefuzz::{\n\n expand::Expand,\n\n fs::{set_executable, OwnedDir},\n\n jitter::delay_with_jitter,\n\n process::monitor_process,\n\n syncdir::SyncedDir,\n\n};\n\nuse serde::Deserialize;\n\nuse std::process::Stdio;\n\nuse std::{\n\n collections::HashMap,\n\n path::{Path, PathBuf},\n", "file_path": "src/agent/onefuzz-agent/src/tasks/analysis/generic.rs", "rank": 59, "score": 6.521210594432346 }, { "content": "use onefuzz::{fs::list_files, libfuzzer::LibFuzzer, syncdir::SyncedDir};\n\nuse onefuzz_telemetry::{Event::coverage_data, EventData};\n\nuse reqwest::Url;\n\nuse serde::Deserialize;\n\nuse std::collections::{BTreeMap, HashMap};\n\nuse std::{\n\n ffi::OsString,\n\n path::{Path, PathBuf},\n\n sync::Arc,\n\n};\n\nuse storage_queue::{Message, QueueClient};\n\nuse tokio::fs;\n\n\n\nconst TOTAL_COVERAGE: &str = \"total.cov\";\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Config {\n\n pub target_exe: PathBuf,\n\n pub target_env: HashMap<String, String>,\n\n pub target_options: Vec<String>,\n", "file_path": "src/agent/onefuzz-agent/src/tasks/coverage/libfuzzer_coverage.rs", "rank": 60, "score": 6.520710240200719 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#![allow(clippy::large_enum_variant)]\n\n#[cfg(any(target_os = \"linux\", target_os = \"windows\"))]\n\nuse crate::tasks::coverage;\n\nuse crate::tasks::{\n\n analysis, fuzz,\n\n heartbeat::{init_task_heartbeat, TaskHeartbeatClient},\n\n merge, regression, report,\n\n};\n\nuse anyhow::Result;\n\nuse onefuzz::machine_id::{get_machine_id, get_scaleset_name};\n\nuse onefuzz_telemetry::{\n\n self as telemetry, Event::task_start, EventData, InstanceTelemetryKey, MicrosoftTelemetryKey,\n\n Role,\n\n};\n\nuse reqwest::Url;\n\nuse serde::{self, Deserialize};\n\nuse std::{path::PathBuf, sync::Arc, time::Duration};\n", "file_path": "src/agent/onefuzz-agent/src/tasks/config.rs", "rank": 61, "score": 6.513862186062005 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n//\n\n// Tested with data logged to Application Insights from OneFuzz 1.0.0 through 2.1.0\n\n\n\n#[macro_use]\n\nextern crate serde;\n\n\n\nuse anyhow::Result;\n\nuse chrono::{DateTime, Duration, Utc};\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::fs::File;\n\nuse std::io::{self, BufRead, BufReader};\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/utils/telemetry-stats/src/main.rs", "rank": 62, "score": 6.512747042466373 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::jitter::delay_with_jitter;\n\nuse anyhow::Result;\n\nuse async_trait::async_trait;\n\nuse std::time::Duration;\n\n\n\n#[async_trait]\n", "file_path": "src/agent/onefuzz/src/utils.rs", "rank": 63, "score": 6.510517900328138 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::path::PathBuf;\n\nuse std::sync::{self, mpsc::Receiver as SyncReceiver};\n\nuse std::time::Duration;\n\n\n\nuse anyhow::Result;\n\nuse notify::{DebouncedEvent, Watcher};\n\nuse tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver};\n\nuse tokio::task::{self, JoinHandle};\n\n\n\npub struct DirectoryMonitor {\n\n dir: PathBuf,\n\n notify_events: UnboundedReceiver<DebouncedEvent>,\n\n watcher: notify::RecommendedWatcher,\n\n}\n\n\n\nimpl DirectoryMonitor {\n\n pub fn new(dir: impl Into<PathBuf>) -> Self {\n", "file_path": "src/agent/onefuzz/src/monitor.rs", "rank": 64, "score": 6.497787694573958 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::{jitter::random_delay, utils::CheckNotify};\n\nuse anyhow::Result;\n\nuse futures::Future;\n\nuse reqwest::Url;\n\nuse std::{\n\n collections::HashSet,\n\n sync::{Arc, Mutex},\n\n time::Duration,\n\n};\n\nuse storage_queue::QueueClient;\n\nuse tokio::{sync::Notify, task, task::JoinHandle, time::sleep};\n\n\n\nconst DEFAULT_HEARTBEAT_PERIOD: Duration = Duration::from_secs(60 * 5);\n\n\n\npub struct HeartbeatContext<TContext, T> {\n\n pub state: TContext,\n\n pub queue_client: QueueClient,\n", "file_path": "src/agent/onefuzz/src/heartbeat.rs", "rank": 65, "score": 6.497787694573958 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::{machine_id::get_machine_id, sha256::digest_file_blocking};\n\nuse anyhow::{format_err, Context, Result};\n\nuse onefuzz_telemetry::{InstanceTelemetryKey, MicrosoftTelemetryKey};\n\nuse std::path::{Path, PathBuf};\n\nuse std::{collections::HashMap, hash::Hash};\n\nuse strum::IntoEnumIterator;\n\nuse strum_macros::EnumIter;\n\nuse uuid::Uuid;\n\n\n\npub enum ExpandedValue<'a> {\n\n Path(String),\n\n Scalar(String),\n\n List(&'a [String]),\n\n Mapping(Box<dyn Fn(&Expand<'a>, &str) -> Result<Option<ExpandedValue<'a>>> + Send>),\n\n}\n\n\n\n#[derive(PartialEq, Eq, Hash, EnumIter)]\n", "file_path": "src/agent/onefuzz/src/expand.rs", "rank": 66, "score": 6.493404054567643 }, { "content": "use crate::failure::save_failure;\n\nuse backtrace::Backtrace;\n\nuse std::{panic, sync::Once};\n\n\n", "file_path": "src/agent/onefuzz-supervisor/src/panic.rs", "rank": 67, "score": 6.490129994425242 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::{Context, Result};\n\nuse onefuzz::{blob::BlobUrl, monitor::DirectoryMonitor, syncdir::SyncedDir};\n\nuse onefuzz_telemetry::{\n\n Event::{\n\n new_report, new_unable_to_reproduce, new_unique_report, regression_report,\n\n regression_unable_to_reproduce,\n\n },\n\n EventData,\n\n};\n\nuse serde::{Deserialize, Serialize};\n\nuse stacktrace_parser::CrashLog;\n\nuse std::path::{Path, PathBuf};\n\nuse uuid::Uuid;\n\n\n\n#[derive(Debug, Deserialize, Serialize, Default)]\n\npub struct CrashReport {\n\n pub input_sha256: String,\n", "file_path": "src/agent/onefuzz-agent/src/tasks/report/crash_report.rs", "rank": 68, "score": 6.490017407875267 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::{\n\n collections::btree_map::Range,\n\n fs,\n\n ops::RangeBounds,\n\n path::{Path, PathBuf},\n\n};\n\n\n\nuse anyhow::Result;\n\nuse log::error;\n\nuse win_util::{file, handle::Handle};\n\nuse winapi::um::{\n\n handleapi::INVALID_HANDLE_VALUE,\n\n winnt::{HANDLE, IMAGE_FILE_MACHINE_AMD64, IMAGE_FILE_MACHINE_I386},\n\n};\n\n\n\nuse crate::{\n\n breakpoint::{BreakpointCollection, ResolvedBreakpoint},\n", "file_path": "src/agent/debugger/src/module.rs", "rank": 69, "score": 6.485507370325466 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#[cfg(target_os = \"linux\")]\n\npub mod linux;\n\n\n\n#[cfg(target_os = \"windows\")]\n\npub mod pe_provider;\n\n\n\n#[cfg(target_os = \"windows\")]\n\npub mod windows;\n\n\n\nuse std::collections::{btree_map, BTreeMap};\n\nuse std::convert::TryFrom;\n\n\n\nuse anyhow::Result;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::code::ModulePath;\n\nuse crate::debuginfo::DebugInfo;\n", "file_path": "src/agent/coverage/src/block.rs", "rank": 70, "score": 6.476478044839162 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::onefuzz::heartbeat::HeartbeatClient;\n\nuse crate::onefuzz::machine_id::{get_machine_id, get_machine_name};\n\nuse anyhow::Result;\n\nuse reqwest::Url;\n\nuse serde::{self, Deserialize, Serialize};\n\nuse uuid::Uuid;\n\n\n\n#[derive(Debug, Deserialize, Serialize, Hash, Eq, PartialEq, Clone)]\n\n#[serde(tag = \"type\")]\n\npub enum HeartbeatData {\n\n MachineAlive,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize, Clone)]\n", "file_path": "src/agent/onefuzz-supervisor/src/heartbeat.rs", "rank": 71, "score": 6.476478044839162 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\nuse std::{\n\n path::{Path, PathBuf},\n\n process::{Child, ChildStderr, ChildStdout, Command, Stdio},\n\n thread::{self, JoinHandle},\n\n};\n\n\n\nuse anyhow::{format_err, Context as AnyhowContext, Result};\n\nuse downcast_rs::Downcast;\n\nuse onefuzz::process::{ExitStatus, Output};\n\nuse tokio::fs;\n\n\n\nuse crate::buffer::TailBuffer;\n\nuse crate::work::*;\n\n\n\n// Max length of captured output streams from worker child processes.\n\nconst MAX_TAIL_LEN: usize = 4096;\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n", "file_path": "src/agent/onefuzz-supervisor/src/worker.rs", "rank": 72, "score": 6.47453245337461 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::{Context, Result};\n\nuse async_trait::async_trait;\n\nuse backoff::{self, future::retry_notify, ExponentialBackoff};\n\nuse onefuzz_telemetry::debug;\n\nuse reqwest::{Response, StatusCode};\n\nuse std::{\n\n sync::atomic::{AtomicUsize, Ordering},\n\n time::Duration,\n\n};\n\n\n\npub const DEFAULT_RETRY_PERIOD: Duration = Duration::from_secs(5);\n\npub const MAX_RETRY_ATTEMPTS: usize = 5;\n\n\n\npub enum RetryCheck {\n\n Retry,\n\n Fail,\n\n Succeed,\n\n}\n\n\n", "file_path": "src/agent/reqwest-retry/src/lib.rs", "rank": 73, "score": 6.467473826214995 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#[macro_use]\n\nextern crate anyhow;\n\n#[macro_use]\n\nextern crate clap;\n\n#[macro_use]\n\nextern crate onefuzz_telemetry;\n\nextern crate onefuzz;\n\n\n\nuse anyhow::Result;\n\nuse clap::{App, ArgMatches, SubCommand};\n\nuse std::io::{stdout, Write};\n\n\n\nmod local;\n\nmod managed;\n\nmod tasks;\n\n\n\nconst LICENSE_CMD: &str = \"licenses\";\n\nconst LOCAL_CMD: &str = \"local\";\n\nconst MANAGED_CMD: &str = \"managed\";\n\n\n", "file_path": "src/agent/onefuzz-agent/src/main.rs", "rank": 74, "score": 6.467473826214995 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::{Context, Result};\n\nuse process_control::{self, ChildExt, Timeout};\n\nuse std::path::Path;\n\nuse std::process::Command;\n\nuse std::time::Duration;\n\nuse std::{collections::HashMap, process::Stdio};\n\nuse tokio::{\n\n io::{AsyncBufReadExt, AsyncRead, BufReader},\n\n process::Child,\n\n sync::Notify,\n\n};\n\n\n\n// Chosen to be significantly below the 32k ApplicationInsights message size\n\nconst MAX_LOG_LINE_LENGTH: usize = 8192;\n\n\n\n/// Serializable representation of a process output.\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n", "file_path": "src/agent/onefuzz/src/process.rs", "rank": 75, "score": 6.466817653026028 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::{Context, Result};\n\nuse onefuzz::{auth::Secret, machine_id::get_scaleset_name};\n\nuse std::process::Stdio;\n\nuse tokio::{fs, io::AsyncWriteExt, process::Command};\n\n\n\n#[cfg(target_family = \"windows\")]\n\nuse std::{env, path::PathBuf};\n\n\n\n#[cfg(target_family = \"windows\")]\n\nuse tokio::sync::{OnceCell, SetError};\n\n\n\n#[cfg(target_family = \"unix\")]\n\nuse users::{get_user_by_name, os::unix::UserExt};\n\n\n\n#[cfg(target_family = \"unix\")]\n\nconst ONEFUZZ_SERVICE_USER: &str = \"onefuzz\";\n\n\n", "file_path": "src/agent/onefuzz-supervisor/src/commands.rs", "rank": 76, "score": 6.4591212161066975 }, { "content": " use crate::tasks::config::CommonConfig;\n\n use onefuzz::blob::BlobContainerUrl;\n\n use onefuzz::syncdir::SyncedDir;\n\n use reqwest::Url;\n\n use std::collections::HashMap;\n\n use std::env;\n\n use tempfile::tempdir;\n\n\n\n let crashes_temp = tempfile::tempdir()?;\n\n let crashes: &std::path::Path = crashes_temp.path();\n\n\n\n let inputs_temp = tempfile::tempdir()?;\n\n let inputs: &std::path::Path = inputs_temp.path();\n\n let input_file = inputs.join(\"seed.txt\");\n\n tokio::fs::write(input_file, \"test\").await?;\n\n\n\n let generator_options: Vec<String> = vec![\n\n \"-o\",\n\n \"{generated_inputs}/input-%n-%s\",\n\n \"-n\",\n", "file_path": "src/agent/onefuzz-agent/src/tasks/fuzz/generator.rs", "rank": 77, "score": 6.4591212161066975 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#![allow(clippy::single_match)]\n\n\n\nuse std::{io, num::NonZeroU64, path::Path};\n\n\n\nuse anyhow::{format_err, Result};\n\nuse log::{debug, error, trace};\n\nuse rand::{thread_rng, Rng};\n\nuse win_util::process;\n\nuse winapi::{\n\n shared::{\n\n minwindef::{DWORD, LPCVOID},\n\n winerror::ERROR_ACCESS_DENIED,\n\n },\n\n um::{\n\n processthreadsapi::{ResumeThread, SuspendThread},\n\n winbase::Wow64SuspendThread,\n\n winnt::HANDLE,\n", "file_path": "src/agent/debugger/src/target.rs", "rank": 78, "score": 6.4495402918430536 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#![allow(clippy::unreadable_literal)]\n\n\n\nuse std::mem::size_of;\n\n\n\nuse anyhow::{bail, Result};\n\nuse win_util::process;\n\nuse win_util::UNION; // Ideally this would be exported from winapi.\n\nuse winapi::{\n\n shared::{\n\n basetsd::UINT64,\n\n minwindef::{DWORD, LPCVOID},\n\n ntdef::LPWSTR,\n\n },\n\n um::winnt::{EXCEPTION_RECORD, HANDLE},\n\n STRUCT,\n\n};\n\n\n", "file_path": "src/agent/input-tester/src/test_result/asan.rs", "rank": 79, "score": 6.446878277317959 }, { "content": " let mut tasks = handles;\n\n loop {\n\n let (result, _, remaining_tasks) = futures::future::select_all(tasks).await;\n\n result??;\n\n\n\n if remaining_tasks.is_empty() {\n\n return Ok(());\n\n } else {\n\n tasks = remaining_tasks\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use anyhow::Result;\n\n use futures::*;\n\n use std::sync::Arc;\n\n use tokio::{spawn, sync::Notify, task::JoinHandle, time::sleep};\n", "file_path": "src/agent/onefuzz/src/utils.rs", "rank": 80, "score": 6.446878277317959 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::collections::BTreeMap;\n\nuse std::fs::File;\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse anyhow::{format_err, Result};\n\nuse log::*;\n\nuse pdb::{FallibleIterator, SymbolData, PDB};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::SrcLine;\n\n\n\n#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]\n\npub struct PdbCache {\n\n offset_to_line: BTreeMap<usize, SrcLine>,\n\n symbol_to_lines: BTreeMap<String, Vec<SrcLine>>,\n\n path_to_symbols: BTreeMap<PathBuf, Vec<String>>,\n\n path_to_lines: BTreeMap<PathBuf, Vec<usize>>,\n", "file_path": "src/agent/srcview/src/pdbcache.rs", "rank": 81, "score": 6.44616452426882 }, { "content": "use storage_queue::{Message, QueueClient};\n\nuse tokio::fs;\n\nuse tokio::task::spawn_blocking;\n\nuse tokio_stream::wrappers::ReadDirStream;\n\nuse url::Url;\n\n\n\nuse crate::tasks::config::CommonConfig;\n\nuse crate::tasks::generic::input_poller::{CallbackImpl, InputPoller, Processor};\n\nuse crate::tasks::heartbeat::{HeartbeatSender, TaskHeartbeatClient};\n\n\n\nconst MAX_COVERAGE_RECORDING_ATTEMPTS: usize = 2;\n\nconst COVERAGE_FILE: &str = \"coverage.json\";\n\nconst SOURCE_COVERAGE_FILE: &str = \"source-coverage.json\";\n\nconst COBERTURA_COVERAGE_FILE: &str = \"cobertura-coverage.xml\";\n\nconst MODULE_CACHE_FILE: &str = \"module-cache.json\";\n\n\n\nconst DEFAULT_TARGET_TIMEOUT: Duration = Duration::from_secs(5);\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Config {\n", "file_path": "src/agent/onefuzz-agent/src/tasks/coverage/generic.rs", "rank": 82, "score": 6.44616452426882 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::tasks::{\n\n config::CommonConfig,\n\n heartbeat::{HeartbeatSender, TaskHeartbeatClient},\n\n utils::{self, default_bool_true},\n\n};\n\nuse anyhow::{Context, Result};\n\nuse onefuzz::{\n\n expand::Expand,\n\n fs::set_executable,\n\n input_tester::Tester,\n\n process::monitor_process,\n\n sha256,\n\n syncdir::{continuous_sync, SyncOperation::Pull, SyncedDir},\n\n};\n\nuse onefuzz_telemetry::Event::new_result;\n\nuse serde::Deserialize;\n\nuse std::collections::HashMap;\n", "file_path": "src/agent/onefuzz-agent/src/tasks/fuzz/generator.rs", "rank": 83, "score": 6.431705937564878 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::tasks::{\n\n heartbeat::{HeartbeatSender, TaskHeartbeatClient},\n\n report::crash_report::{parse_report_file, CrashTestResult, RegressionReport},\n\n};\n\nuse anyhow::{Context, Result};\n\nuse async_trait::async_trait;\n\nuse onefuzz::syncdir::SyncedDir;\n\nuse reqwest::Url;\n\nuse std::path::PathBuf;\n\n\n\n/// Defines implementation-provided callbacks for all implementers of regression tasks.\n\n///\n\n/// Shared regression task behavior is implemented in this module.\n\n#[async_trait]\n", "file_path": "src/agent/onefuzz-agent/src/tasks/regression/common.rs", "rank": 84, "score": 6.431705937564878 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::{Context, Result};\n\nuse fixedbitset::FixedBitSet;\n\nuse iced_x86::{Decoder, DecoderOptions, FlowControl, Instruction, OpKind};\n\n\n\nuse crate::pe::TryInsert;\n\n\n", "file_path": "src/agent/coverage/src/intel.rs", "rank": 85, "score": 6.429531917488082 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::{Context, Result};\n\nuse async_trait::async_trait;\n\nuse onefuzz::{http::ResponseExt, jitter::delay_with_jitter};\n\nuse reqwest::{Client, Url};\n\nuse reqwest_retry::SendRetry;\n\nuse std::path::{Path, PathBuf};\n\nuse std::time::Duration;\n\nuse tokio::{fs, io};\n\n\n\npub async fn download_input(input_url: Url, dst: impl AsRef<Path>) -> Result<PathBuf> {\n\n let file_name = input_url.path_segments().unwrap().last().unwrap();\n\n let file_path = dst.as_ref().join(file_name);\n\n\n\n if input_url.scheme().to_lowercase() == \"file\" {\n\n let input_file_path = input_url\n\n .to_file_path()\n\n .map_err(|_| anyhow!(\"Invalid file Url\"))?;\n", "file_path": "src/agent/onefuzz-agent/src/tasks/utils.rs", "rank": 86, "score": 6.426128766092484 }, { "content": " },\n\n};\n\nuse onefuzz_telemetry::Event::{new_coverage, new_result};\n\nuse serde::Deserialize;\n\nuse std::{\n\n collections::HashMap,\n\n path::{Path, PathBuf},\n\n process::Stdio,\n\n time::Duration,\n\n};\n\nuse tempfile::tempdir;\n\nuse tokio::{\n\n process::{Child, Command},\n\n sync::Notify,\n\n};\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct SupervisorConfig {\n\n pub inputs: SyncedDir,\n\n pub crashes: SyncedDir,\n", "file_path": "src/agent/onefuzz-agent/src/tasks/fuzz/supervisor.rs", "rank": 87, "score": 6.414873667791003 }, { "content": " info!(\"starting supervisor '{:?}'\", cmd);\n\n let child = cmd\n\n .spawn()\n\n .with_context(|| format!(\"supervisor failed to start: {:?}\", cmd))?;\n\n Ok(child)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::tasks::stats::afl::read_stats;\n\n use onefuzz::process::monitor_process;\n\n use onefuzz_telemetry::EventData;\n\n use std::collections::HashMap;\n\n use std::time::Instant;\n\n\n\n const MAX_FUZZ_TIME_SECONDS: u64 = 120;\n\n\n\n async fn has_stats(path: &PathBuf) -> bool {\n\n if let Ok(stats) = read_stats(path).await {\n", "file_path": "src/agent/onefuzz-agent/src/tasks/fuzz/supervisor.rs", "rank": 88, "score": 6.413969942886982 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse crate::{\n\n local::{\n\n common::{build_local_context, DirectoryMonitorQueue, UiEvent},\n\n generic_crash_report::{build_report_config, build_shared_args as build_crash_args},\n\n generic_generator::{build_fuzz_config, build_shared_args as build_fuzz_args},\n\n },\n\n tasks::{config::CommonConfig, fuzz::generator::GeneratorTask, report::generic::ReportTask},\n\n};\n\nuse anyhow::{Context, Result};\n\nuse clap::{App, SubCommand};\n\nuse flume::Sender;\n\nuse onefuzz::utils::try_wait_all_join_handles;\n\nuse std::collections::HashSet;\n\nuse tokio::task::spawn;\n\nuse uuid::Uuid;\n\n\n\npub async fn run(args: &clap::ArgMatches<'_>, event_sender: Option<Sender<UiEvent>>) -> Result<()> {\n", "file_path": "src/agent/onefuzz-agent/src/local/radamsa.rs", "rank": 89, "score": 6.406217170975802 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::fs::metadata;\n\nuse std::path::PathBuf;\n\n\n\nuse anyhow::{Context, Result};\n\nuse onefuzz::fs::onefuzz_root;\n\nuse tokio::fs;\n\n\n\npub async fn set_done_lock() -> Result<()> {\n\n let path = done_path()?;\n\n fs::write(&path, \"\")\n\n .await\n\n .with_context(|| format!(\"unable to write done lock: {}\", path.display()))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/agent/onefuzz-supervisor/src/done.rs", "rank": 90, "score": 6.40427596232487 }, { "content": " pub count: u32,\n\n}\n\n\n\nimpl BlockCov {\n\n pub fn new(offset: u32) -> Self {\n\n Self { offset, count: 0 }\n\n }\n\n}\n\n\n\nmod array {\n\n use std::collections::BTreeMap;\n\n use std::fmt;\n\n\n\n use serde::de::{self, Deserializer, Visitor};\n\n use serde::ser::{SerializeSeq, Serializer};\n\n\n\n use super::BlockCov;\n\n\n\n type BlockCovMap = BTreeMap<u32, BlockCov>;\n\n\n", "file_path": "src/agent/coverage/src/block.rs", "rank": 91, "score": 6.40427596232487 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#![allow(clippy::useless_format)]\n\n#![allow(clippy::upper_case_acronyms)]\n\n\n\nuse std::{\n\n env,\n\n ffi::{OsStr, OsString},\n\n path::{Path, PathBuf},\n\n process::{Command, Stdio},\n\n str::FromStr,\n\n sync::{Arc, Mutex},\n\n};\n\n\n\nuse anyhow::{Context, Result};\n\nuse log::debug;\n\nuse win_util::process;\n\n\n\nuse crate::logging;\n", "file_path": "src/agent/input-tester/src/appverifier.rs", "rank": 92, "score": 6.40427596232487 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\n#![allow(clippy::uninit_vec)]\n\n\n\nuse std::{\n\n collections::{btree_map::Range, BTreeMap},\n\n ops::RangeBounds,\n\n};\n\n\n\nuse anyhow::Result;\n\nuse win_util::process;\n\nuse winapi::um::winnt::HANDLE;\n\n\n\nuse crate::debugger::{BreakpointId, BreakpointType};\n\n\n\npub(crate) enum ExtraInfo {\n\n Rva(u64),\n\n Function(String),\n\n}\n", "file_path": "src/agent/debugger/src/breakpoint.rs", "rank": 93, "score": 6.40427596232487 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::path::PathBuf;\n\n\n\nuse anyhow::Result;\n\nuse onefuzz::input_tester::Tester;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(Debug, PartialEq, StructOpt)]\n\n#[structopt(name = \"test-input\")]\n", "file_path": "src/agent/onefuzz/examples/test-input.rs", "rank": 94, "score": 6.402982476674523 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse rand::prelude::*;\n\nuse std::time::Duration;\n\nuse tokio::time::sleep;\n\n\n", "file_path": "src/agent/onefuzz/src/jitter.rs", "rank": 95, "score": 6.400827827969365 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::{anyhow, Context, Result};\n\nuse reqwest::Url;\n\nuse serde::{de::DeserializeOwned, Deserialize, Deserializer, Serialize};\n\nuse std::time::Duration;\n\nuse uuid::Uuid;\n\n\n\npub const EMPTY_QUEUE_DELAY: Duration = Duration::from_secs(10);\n\npub mod azure_queue;\n\npub mod local_queue;\n\n\n\nuse azure_queue::{AzureQueueClient, AzureQueueMessage};\n\nuse local_queue::{ChannelQueueClient, FileQueueClient, LocalQueueMessage};\n\n\n\n#[derive(Debug, Clone)]\n\npub enum QueueClient {\n\n AzureQueue(AzureQueueClient),\n\n FileQueueClient(Box<FileQueueClient>),\n", "file_path": "src/agent/storage-queue/src/lib.rs", "rank": 96, "score": 6.390668934522001 }, { "content": "//! each input. To do this, the following must be in the `$PATH`:\n\n//!\n\n//! ### Linux\n\n//! - `python3` (3.6)\n\n//! - `gdb` (8.1)\n\n//!\n\n//! ### Windows\n\n//! - `powershell.exe` (5.1)\n\n//! - `cdb.exe` (10.0)\n\n//!\n\n//! Versions in parentheses have been tested.\n\n\n\nuse crate::tasks::heartbeat::*;\n\nuse crate::tasks::{config::CommonConfig, generic::input_poller::*};\n\nuse crate::tasks::{\n\n coverage::{recorder::CoverageRecorder, total::TotalCoverage},\n\n utils::default_bool_true,\n\n};\n\nuse anyhow::{Context, Result};\n\nuse async_trait::async_trait;\n", "file_path": "src/agent/onefuzz-agent/src/tasks/coverage/libfuzzer_coverage.rs", "rank": 97, "score": 6.383185253215818 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse anyhow::{anyhow, Result};\n\nuse backoff::{future::retry_notify, ExponentialBackoff};\n\nuse queue_file::QueueFile;\n\nuse serde::Serialize;\n\nuse std::path::PathBuf;\n\nuse std::sync::{Arc, Mutex};\n\nuse std::time::Duration;\n\n\n\npub const EMPTY_QUEUE_DELAY: Duration = Duration::from_secs(10);\n\npub const SEND_RETRY_DELAY: Duration = Duration::from_millis(500);\n\npub const RECEIVE_RETRY_DELAY: Duration = Duration::from_millis(500);\n\npub const MAX_SEND_ATTEMPTS: i32 = 5;\n\npub const MAX_RECEIVE_ATTEMPTS: i32 = 5;\n\npub const MAX_ELAPSED_TIME: Duration = Duration::from_secs(2 * 60);\n\n\n\npub struct LocalQueueMessage {\n\n pub data: Vec<u8>,\n", "file_path": "src/agent/storage-queue/src/local_queue.rs", "rank": 98, "score": 6.375653886961779 }, { "content": "// Copyright (c) Microsoft Corporation.\n\n// Licensed under the MIT License.\n\n\n\nuse std::path::PathBuf;\n\nuse std::{env, process};\n\n\n\nuse srcview::SrcView;\n\n\n", "file_path": "src/agent/srcview/examples/dump_paths.rs", "rank": 99, "score": 6.3657913060308005 } ]
Rust
gui/src/app/root_stack/oc_page/stats_grid.rs
ashleysmithgpu/LACT
0bfc6c5acdd800d33ce92527328a9477c215b180
use daemon::gpu_controller::GpuStats; use gtk::*; #[derive(Clone)] pub struct StatsGrid { pub container: Grid, vram_usage_bar: LevelBar, vram_usage_label: Label, gpu_clock_label: Label, vram_clock_label: Label, gpu_voltage_label: Label, power_usage_label: Label, gpu_temperature_label: Label, gpu_usage_label: Label, } impl StatsGrid { pub fn new() -> Self { let container = Grid::new(); container.set_column_homogeneous(true); container.set_row_spacing(7); container.attach(&Label::new(Some("VRAM Usage")), 0, 0, 1, 1); let vram_usage_overlay = Overlay::new(); let vram_usage_bar = LevelBar::new(); let vram_usage_label = Label::new(None); { vram_usage_bar.set_orientation(Orientation::Horizontal); vram_usage_bar.set_value(1.0); vram_usage_label.set_text("0/0 MiB"); vram_usage_overlay.add(&vram_usage_bar); vram_usage_overlay.add_overlay(&vram_usage_label); container.attach(&vram_usage_overlay, 1, 0, 2, 1); } let gpu_clock_label = Label::new(None); { let gpu_clock_box = Box::new(Orientation::Horizontal, 5); gpu_clock_box.pack_start(&Label::new(Some("GPU Clock:")), false, false, 2); gpu_clock_label.set_markup("<b>0MHz</b>"); gpu_clock_box.pack_start(&gpu_clock_label, false, false, 2); gpu_clock_box.set_halign(Align::Center); container.attach(&gpu_clock_box, 0, 1, 1, 1); } let vram_clock_label = Label::new(None); { let vram_clock_box = Box::new(Orientation::Horizontal, 5); vram_clock_box.pack_start(&Label::new(Some("VRAM Clock:")), false, false, 2); vram_clock_label.set_markup("<b>0MHz</b>"); vram_clock_box.pack_start(&vram_clock_label, false, false, 2); vram_clock_box.set_halign(Align::Center); container.attach(&vram_clock_box, 1, 1, 1, 1); } let gpu_voltage_label = Label::new(None); { let gpu_voltage_box = Box::new(Orientation::Horizontal, 5); gpu_voltage_box.pack_start(&Label::new(Some("GPU Voltage:")), false, false, 2); gpu_voltage_label.set_markup("<b>0.000V</b>"); gpu_voltage_box.pack_start(&gpu_voltage_label, false, false, 2); gpu_voltage_box.set_halign(Align::Center); container.attach(&gpu_voltage_box, 2, 1, 1, 1); } let power_usage_label = Label::new(None); { let power_usage_box = Box::new(Orientation::Horizontal, 5); power_usage_box.pack_start(&Label::new(Some("Power Usage:")), false, false, 2); power_usage_label.set_markup("<b>00/000W</b>"); power_usage_box.pack_start(&power_usage_label, false, false, 2); power_usage_box.set_halign(Align::Center); container.attach(&power_usage_box, 0, 2, 1, 1); } let gpu_temperature_label = Label::new(None); { let gpu_temperature_box = Box::new(Orientation::Horizontal, 5); gpu_temperature_box.pack_start(&Label::new(Some("GPU Temperature:")), false, false, 2); gpu_temperature_box.pack_start(&gpu_temperature_label, false, false, 2); gpu_temperature_box.set_halign(Align::Center); container.attach(&gpu_temperature_box, 1, 2, 1, 1); } let gpu_usage_label = Label::new(None); { let gpu_usage_box = Box::new(Orientation::Horizontal, 5); gpu_usage_box.pack_start(&Label::new(Some("GPU Usage:")), false, false, 2); gpu_usage_box.pack_start(&gpu_usage_label, false, false, 2); gpu_usage_box.set_halign(Align::Center); container.attach(&gpu_usage_box, 2, 2, 1, 1); } Self { container, vram_usage_bar, vram_usage_label, gpu_clock_label, vram_clock_label, gpu_voltage_label, power_usage_label, gpu_temperature_label, gpu_usage_label, } } pub fn set_stats(&self, stats: &GpuStats) { self.vram_usage_bar.set_value( stats.mem_used.unwrap_or_else(|| 0) as f64 / stats.mem_total.unwrap_or_else(|| 0) as f64, ); self.vram_usage_label.set_text(&format!( "{}/{} MiB", stats.mem_used.unwrap_or_else(|| 0), stats.mem_total.unwrap_or_else(|| 0) )); self.gpu_clock_label.set_markup(&format!( "<b>{}MHz</b>", stats.gpu_freq.unwrap_or_else(|| 0) )); self.vram_clock_label.set_markup(&format!( "<b>{}MHz</b>", stats.mem_freq.unwrap_or_else(|| 0) )); self.gpu_voltage_label.set_markup(&format!( "<b>{}V</b>", stats.voltage.unwrap_or_else(|| 0) as f64 / 1000f64 )); self.power_usage_label.set_markup(&format!( "<b>{}/{}W</b>", stats.power_avg.unwrap_or_else(|| 0), stats.power_cap.unwrap_or_else(|| 0) )); self.gpu_temperature_label .set_markup(&format!("<b>{}°C</b>", stats.gpu_temp.unwrap_or_default())); self.gpu_usage_label .set_markup(&format!("<b>{}%</b>", stats.gpu_usage.unwrap_or_default())); } }
use daemon::gpu_controller::GpuStats; use gtk::*; #[derive(Clone)] pub struct StatsGrid { pub container: Grid, vram_usage_bar: LevelBar, vram_usage_label: Label, gpu_clock_label: Label, vram_clock_label: Label, gpu_voltage_label: Label, power_usage_label: Label, gpu_temperature_label: Label, gpu_usage_label: Label, } impl StatsGrid { pub fn new() -> Self { let container = Grid::new(); container.set_column_homogeneous(true); container.set_row_spacing(7); container.attach(&Label::new(Some("VRAM Usage")), 0, 0, 1, 1); let vram_usage_overlay = Overlay::new(); let vram_usage_bar = LevelBar::new(); let vram_usage_label = Label::new(None); { vram_usage_bar.set_orientation(Orientation::Horizontal); vram_usage_bar.set_value(1.0); vram_usage_label.set_text("0/0 MiB"); vram_usage_overlay.add(&vram_usage_bar); vram_usage_overlay.add_overlay(&vram_usage_label); container.attach(&vram_usage_overlay, 1, 0, 2, 1); } let gpu_clock_label = Label::new(None); { let gpu_clock_box = Box::new(Orientation::Horizontal, 5); gpu_clock_box.pack_start(&Label::new(Some("GPU Clock:")), false, false, 2); gpu_clock_label.set_markup("<b>0MHz</b>"); gpu_clock_box.pack_start(&gpu_clock_label, false, false, 2); gpu_clock_box.set_halign(Align::Center); container.attach(&gpu_clock_box, 0, 1, 1, 1); } let vram_clock_label = Label::new(None); { let vram_clock_box = Box::new(Orientation::Horizontal, 5); vram_clock_box.pack_start(&Label::new(Some("VRAM Clock:")), false, false, 2); vram_clock_label.set_markup("<b>0MHz</b>"); vram_clock_box.pack_start(&vram_clock_label, false, false, 2); vram_clock_box.set_halign(Align::Center); container.attach(&vram_clock_box, 1, 1, 1, 1); } let gpu_voltage_label = Label::new(None); { let gpu_voltage_box = Box::new(Orientation::Horizontal, 5); gpu_voltage_box.pack_start(&Label::new(Some("GPU Voltage:")), false, false, 2); gpu_voltage_label.set_markup("<b>0.000V</b>"); gpu_voltage_box.pack_start(&gpu_voltage_label, false, false, 2); gpu_voltage_box.set_halign(Align::Center); container.attach(&gpu_voltage_box, 2, 1, 1, 1); } let power_usage_label = Label::new(None); { let power_usage_box = Box::new(Orientation::Horizontal, 5); power_usage_box.pack_start(&Label::new(Some("Power Usage:")), false, false, 2); power_usage_label.set_markup("<b>00/000W</b>"); power_usage_box.pack_start(&power_usage_label, false, false, 2); power_usage_box.set_halign(Align::Center); container.attach(&power_usage_box, 0, 2, 1, 1); } let gpu_temperature_label = Label::new(None); { let gpu_temperature_box = Box::new(Orientation::Horizontal, 5); gpu_temperature_box.pack_start(&Label::new(Some("GPU Temperature:")), false, false, 2); gpu_temperature_box.pack_start(&gpu_temperature_label, false, false, 2); gpu_temperature_box.set_halign(Align::Center); container.attach(&gpu_temperature_box, 1, 2, 1, 1); } let gpu_usage_label = Label::new(None); { let gpu_usage_box = Box::new(Orientation::Horizontal, 5); gpu_usage_box.pack_start(&Label::new(Some("GPU Usage:")), false, false, 2); gpu_usage_box.pack_start(&gpu_usage_label, false, false, 2); gpu_usage_box.set_halign(Align::Center); container.attach(&gpu_usage_box, 2, 2, 1, 1); } Self { container, vram_usage_bar, vram_usage_label, gpu_clock_label, vram_clock_label, gpu_voltage_label, power_usage_label, gpu_temperature_label, gpu_usage_label, } } pub fn set_stats(&self, stats: &GpuStats) { self.vram_usage_bar.set_value( stats.mem_used.unwrap_or_else(|| 0) as f64 / stats.mem_total.unwrap_or_else(|| 0) as f64, ); self.vram_usage_label.set_text(&format!( "{}/{} MiB", stats.mem_used.unwrap_or_else(|| 0), stats.mem_total.unwrap_or_else(|| 0) )); self.gpu_clock_label.set_markup(&format!( "<b>{}MHz</b>", stats.gpu_freq.unwrap_or_else(|| 0) )); self.vram_clock_label.set_markup(&format!( "<b>{}MHz</b>", stats.mem_freq.unwrap_or_else(|| 0) )); self.gpu_voltage_label.set_markup(&format!( "<b>{}V</b>", stats.voltage.unwrap_or_else(|| 0) as f64 / 1000f64 )); self.power_usage_label.set_markup(&format!( "<b>{}/{}W</b>", stats.power_avg.unwrap_or_else(|| 0),
}
stats.power_cap.unwrap_or_else(|| 0) )); self.gpu_temperature_label .set_markup(&format!("<b>{}°C</b>", stats.gpu_temp.unwrap_or_default())); self.gpu_usage_label .set_markup(&format!("<b>{}%</b>", stats.gpu_usage.unwrap_or_default())); }
function_block-function_prefix_line
[ { "content": "fn print_stats(d: &DaemonConnection, gpu_id: u32) {\n\n let gpu_stats = d.get_gpu_stats(gpu_id).unwrap();\n\n println!(\n\n \"{} {}/{}{}\",\n\n \"VRAM Usage:\".green(),\n\n gpu_stats.mem_used.unwrap_or_default().to_string().bold(),\n\n gpu_stats.mem_total.unwrap_or_default().to_string().bold(),\n\n \"MiB\".bold(),\n\n );\n\n println!(\n\n \"{} {}{}\",\n\n \"Temperature:\".green(),\n\n gpu_stats.gpu_temp.unwrap_or_default().to_string().bold(),\n\n \"°C\".bold(),\n\n );\n\n println!(\n\n \"{} {}/{}{}\",\n\n \"Fan Speed:\".green(),\n\n gpu_stats.fan_speed.unwrap_or_default().to_string().bold(),\n\n gpu_stats\n", "file_path": "cli/src/main.rs", "rank": 0, "score": 58969.14169676955 }, { "content": "fn main() {\n\n env_logger::init();\n\n let d = Daemon::new(false);\n\n let mut signals = Signals::new(&[SIGTERM, SIGINT]).unwrap();\n\n\n\n thread::spawn(move || {\n\n for _ in signals.forever() {\n\n log::info!(\"Shutting down\");\n\n let d = DaemonConnection::new().unwrap();\n\n d.shutdown();\n\n }\n\n });\n\n\n\n d.listen();\n\n}\n", "file_path": "daemon/src/main.rs", "rank": 10, "score": 43204.67721141482 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n let opt = Opt::from_args();\n\n\n\n let d = DaemonConnection::new().unwrap();\n\n log::trace!(\"connection established\");\n\n\n\n match opt {\n\n Opt::Gpus => {\n\n let gpus = d.get_gpus();\n\n println!(\"{:?}\", gpus);\n\n }\n\n Opt::Metrics { gpu_id } => {\n\n let mut gpu_ids: Vec<u32> = Vec::new();\n\n\n\n if let Some(gpu_id) = gpu_id {\n\n gpu_ids.push(gpu_id);\n\n } else {\n\n for (gpu_id, _) in d.get_gpus().unwrap() {\n", "file_path": "cli/src/main.rs", "rank": 11, "score": 43204.67721141482 }, { "content": "fn main() {\n\n env_logger::init();\n\n if gtk::init().is_err() {\n\n panic!(\"Cannot initialize GTK\");\n\n }\n\n\n\n let connection = connect_daemon();\n\n\n\n ask_for_online_update(&connection);\n\n\n\n let app = App::new(connection);\n\n\n\n app.run().unwrap();\n\n}\n\n\n", "file_path": "gui/src/main.rs", "rank": 12, "score": 43204.67721141482 }, { "content": "fn connect_daemon() -> DaemonConnection {\n\n match DaemonConnection::new() {\n\n Ok(connection) => {\n\n println!(\"Connection to daemon established\");\n\n connection\n\n }\n\n Err(e) => {\n\n println!(\"Error {:?} connecting to daemon\", e);\n\n println!(\"Starting unprivileged daemon instance\");\n\n\n\n thread::spawn(move || {\n\n let daemon = Daemon::new(true);\n\n daemon.listen();\n\n });\n\n\n\n let dialog = MessageDialog::new(\n\n None::<&gtk::Window>,\n\n DialogFlags::empty(),\n\n gtk::MessageType::Warning,\n\n gtk::ButtonsType::Ok,\n", "file_path": "gui/src/main.rs", "rank": 13, "score": 38461.1204844293 }, { "content": "fn print_config(d: &DaemonConnection) {\n\n let config = d.get_config().unwrap();\n\n\n\n println!(\n\n \"{} {:?}\",\n\n \"Online PCI DB updating:\".purple(),\n\n config.allow_online_update\n\n );\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 14, "score": 37340.182505078716 }, { "content": "fn disable_online_update(d: &DaemonConnection) {\n\n let mut config = d.get_config().unwrap();\n\n config.allow_online_update = Some(false);\n\n d.set_config(config).unwrap();\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 15, "score": 36388.527472080386 }, { "content": "fn enable_online_update(d: &DaemonConnection) {\n\n let mut config = d.get_config().unwrap();\n\n config.allow_online_update = Some(true);\n\n d.set_config(config).unwrap();\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 16, "score": 36388.527472080386 }, { "content": "fn ask_for_online_update(connection: &DaemonConnection) {\n\n let mut config = connection.get_config().unwrap();\n\n\n\n if let None = config.allow_online_update {\n\n log::trace!(\"Online access permission not configured! Showing the dialog\");\n\n\n\n let diag = MessageDialog::new(\n\n None::<&Window>,\n\n DialogFlags::empty(),\n\n MessageType::Warning,\n\n ButtonsType::YesNo,\n\n \"Do you wish to use the online database for GPU identification?\",\n\n );\n\n match diag.run() {\n\n ResponseType::Yes => config.allow_online_update = Some(true),\n\n ResponseType::No => config.allow_online_update = Some(false),\n\n _ => unreachable!(),\n\n }\n\n diag.hide();\n\n\n\n connection.set_config(config).unwrap();\n\n }\n\n}\n\n\n", "file_path": "gui/src/main.rs", "rank": 17, "score": 35498.20590205867 }, { "content": "fn print_info(d: &DaemonConnection, gpu_id: u32) {\n\n let gpu_info = d.get_gpu_info(gpu_id).unwrap();\n\n println!(\n\n \"{} {}\",\n\n \"GPU Model:\".blue(),\n\n gpu_info.vendor_data.card_model.unwrap_or_default().bold()\n\n );\n\n println!(\n\n \"{} {}\",\n\n \"GPU Vendor:\".blue(),\n\n gpu_info.vendor_data.gpu_vendor.unwrap_or_default().bold()\n\n );\n\n println!(\"{} {}\", \"Driver in use:\".blue(), gpu_info.driver.bold());\n\n println!(\n\n \"{} {}\",\n\n \"VBIOS Version:\".blue(),\n\n gpu_info.vbios_version.bold()\n\n );\n\n println!(\n\n \"{} {}\",\n\n \"VRAM Size:\".blue(),\n\n gpu_info.vram_size.to_string().bold()\n\n );\n\n println!(\"{} {}\", \"Link Speed:\".blue(), gpu_info.link_speed.bold());\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 18, "score": 33043.48784660768 }, { "content": "fn print_fan_curve(d: &DaemonConnection, gpu_id: u32) {\n\n let fan_control = d.get_fan_control(gpu_id).unwrap();\n\n\n\n if fan_control.enabled {\n\n println!(\"{}\", \"Fan curve:\".yellow());\n\n\n\n for (temp, fan_speed) in fan_control.curve {\n\n println!(\n\n \"{}{}: {}{}\",\n\n temp.to_string().yellow(),\n\n \"C°\".yellow(),\n\n fan_speed.round().to_string().bold(),\n\n \"%\".bold()\n\n );\n\n }\n\n } else {\n\n println!(\"{}\", \"Automatic fan control used\".yellow());\n\n }\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 19, "score": 32259.29608034151 }, { "content": "use daemon::gpu_controller::ClocksTable;\n\nuse gtk::*;\n\n\n\npub struct ClocksSettings {\n\n pub gpu_clock: i64,\n\n pub vram_clock: i64,\n\n pub gpu_voltage: i64,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ClocksFrame {\n\n pub container: Frame,\n\n gpu_clock_adjustment: Adjustment,\n\n gpu_voltage_adjustment: Adjustment,\n\n vram_clock_adjustment: Adjustment,\n\n apply_button: Button,\n\n}\n\n\n\nimpl ClocksFrame {\n\n pub fn new() -> Self {\n", "file_path": "gui/src/app/root_stack/oc_page/clocks_frame.rs", "rank": 20, "score": 26763.8408470167 }, { "content": "\n\n root_grid.attach(&apply_button, 0, 3, 2, 1);\n\n\n\n container.add(&root_grid);\n\n }\n\n\n\n Self {\n\n container,\n\n gpu_clock_adjustment,\n\n gpu_voltage_adjustment,\n\n vram_clock_adjustment,\n\n apply_button,\n\n }\n\n }\n\n\n\n pub fn get_visibility(&self) -> bool {\n\n self.container.get_visible()\n\n }\n\n\n\n pub fn set_clocks(&self, clocks_table: &ClocksTable) {\n", "file_path": "gui/src/app/root_stack/oc_page/clocks_frame.rs", "rank": 21, "score": 26758.03798634381 }, { "content": " let container = Frame::new(None);\n\n\n\n container.set_margin_start(10);\n\n container.set_margin_end(10);\n\n\n\n container.set_shadow_type(ShadowType::None);\n\n\n\n container.set_label_widget(Some(&{\n\n let label = Label::new(None);\n\n label.set_markup(\"<span font_desc='11'><b>Maximum Clocks</b></span>\");\n\n label\n\n }));\n\n container.set_label_align(0.2, 0.0);\n\n\n\n let gpu_clock_adjustment = Adjustment::new(0.0, 0.0, 0.0, 1.0, 0.0, 0.0);\n\n\n\n let gpu_voltage_adjustment = Adjustment::new(1.0, 0.0, 0.0, 0.05, 0.0, 0.0);\n\n\n\n let vram_clock_adjustment = Adjustment::new(0.0, 0.0, 0.0, 1.0, 0.0, 0.0);\n\n\n", "file_path": "gui/src/app/root_stack/oc_page/clocks_frame.rs", "rank": 22, "score": 26755.902095244448 }, { "content": " self.gpu_clock_adjustment.set_value(*gpu_clockspeed as f64);\n\n\n\n self.gpu_voltage_adjustment\n\n .set_value(*gpu_voltage as f64 / 1000.0);\n\n\n\n let (vram_clockspeed, _) =\n\n clocks_table.mem_power_levels.iter().next_back().unwrap().1;\n\n\n\n self.vram_clock_adjustment\n\n .set_value(*vram_clockspeed as f64);\n\n }\n\n ClocksTable::New(clocks_table) => {\n\n self.gpu_clock_adjustment\n\n .set_lower(clocks_table.gpu_clocks_range.0 as f64);\n\n self.gpu_clock_adjustment\n\n .set_upper(clocks_table.gpu_clocks_range.1 as f64);\n\n\n\n /* self.gpu_voltage_adjustment\n\n .set_lower(clocks_table.voltage_range.0 as f64 / 1000.0);\n\n self.gpu_voltage_adjustment\n", "file_path": "gui/src/app/root_stack/oc_page/clocks_frame.rs", "rank": 23, "score": 26752.220486024587 }, { "content": " let vram_clock_scale =\n\n Scale::new(Orientation::Horizontal, Some(&vram_clock_adjustment));\n\n\n\n vram_clock_scale.set_value_pos(PositionType::Right);\n\n\n\n root_grid.attach(&vram_clock_scale, 1, 2, 1, 1);\n\n\n\n root_grid.attach_next_to(\n\n &Label::new(Some(\"VRAM Clock (MHz)\")),\n\n Some(&vram_clock_scale),\n\n PositionType::Left,\n\n 1,\n\n 1,\n\n );\n\n }\n\n\n\n let apply_button = Button::new();\n\n\n\n {\n\n apply_button.set_label(\"Reset\");\n", "file_path": "gui/src/app/root_stack/oc_page/clocks_frame.rs", "rank": 24, "score": 26752.120328972687 }, { "content": " let gpu_clock = self.gpu_clock_adjustment.get_value() as i64;\n\n\n\n let vram_clock = self.vram_clock_adjustment.get_value() as i64;\n\n\n\n let gpu_voltage = (self.gpu_voltage_adjustment.get_value() * 1000.0) as i64;\n\n\n\n ClocksSettings {\n\n gpu_clock,\n\n vram_clock,\n\n gpu_voltage,\n\n }\n\n }\n\n\n\n pub fn connect_clocks_reset<F: Fn() + 'static + Clone>(&self, f: F) {\n\n self.apply_button.connect_clicked(move |_| {\n\n f();\n\n });\n\n }\n\n\n\n pub fn connect_clocks_changed<F: Fn() + 'static + Clone>(&self, f: F) {\n", "file_path": "gui/src/app/root_stack/oc_page/clocks_frame.rs", "rank": 25, "score": 26752.09689956814 }, { "content": " .set_upper(clocks_table.voltage_range.1 as f64 / 1000.0);*/\n\n\n\n self.vram_clock_adjustment\n\n .set_lower(clocks_table.mem_clocks_range.0 as f64);\n\n self.vram_clock_adjustment\n\n .set_upper(clocks_table.mem_clocks_range.1 as f64);\n\n\n\n self.gpu_clock_adjustment\n\n .set_value(clocks_table.current_gpu_clocks.1 as f64);\n\n\n\n // self.gpu_voltage_adjustment\n\n // .set_value(*clocks_table.gpu_voltage as f64 / 1000.0);\n\n\n\n self.vram_clock_adjustment\n\n .set_value(clocks_table.current_max_mem_clock as f64);\n\n }\n\n }\n\n }\n\n\n\n pub fn get_settings(&self) -> ClocksSettings {\n", "file_path": "gui/src/app/root_stack/oc_page/clocks_frame.rs", "rank": 26, "score": 26752.084093904803 }, { "content": " let root_grid = Grid::new();\n\n\n\n root_grid.set_row_spacing(5);\n\n root_grid.set_column_spacing(10);\n\n\n\n {\n\n let gpu_clock_scale = Scale::new(Orientation::Horizontal, Some(&gpu_clock_adjustment));\n\n\n\n gpu_clock_scale.set_hexpand(true); // Affects the grid column and all scales\n\n\n\n gpu_clock_scale.set_value_pos(PositionType::Right);\n\n\n\n root_grid.attach(&gpu_clock_scale, 1, 0, 1, 1);\n\n\n\n root_grid.attach_next_to(\n\n &Label::new(Some(\"GPU Clock (MHz)\")),\n\n Some(&gpu_clock_scale),\n\n PositionType::Left,\n\n 1,\n\n 1,\n", "file_path": "gui/src/app/root_stack/oc_page/clocks_frame.rs", "rank": 27, "score": 26751.810440670663 }, { "content": " );\n\n\n\n let gpu_voltage_scale =\n\n Scale::new(Orientation::Horizontal, Some(&gpu_voltage_adjustment));\n\n\n\n gpu_voltage_scale.set_value_pos(PositionType::Right);\n\n\n\n gpu_voltage_scale.set_digits(3);\n\n gpu_voltage_scale.set_round_digits(3);\n\n\n\n root_grid.attach(&gpu_voltage_scale, 1, 1, 1, 1);\n\n\n\n root_grid.attach_next_to(\n\n &Label::new(Some(\"GPU Voltage (V)\")),\n\n Some(&gpu_voltage_scale),\n\n PositionType::Left,\n\n 1,\n\n 1,\n\n );\n\n\n", "file_path": "gui/src/app/root_stack/oc_page/clocks_frame.rs", "rank": 28, "score": 26751.049855653448 }, { "content": " {\n\n let f = f.clone();\n\n self.gpu_clock_adjustment.connect_value_changed(move |_| {\n\n f();\n\n });\n\n }\n\n {\n\n let f = f.clone();\n\n self.vram_clock_adjustment.connect_value_changed(move |_| {\n\n f();\n\n });\n\n }\n\n {\n\n self.gpu_voltage_adjustment.connect_value_changed(move |_| {\n\n f();\n\n });\n\n }\n\n }\n\n\n\n pub fn hide(&self) {\n\n self.container.set_visible(false);\n\n }\n\n\n\n pub fn show(&self) {\n\n self.container.set_visible(true);\n\n }\n\n}\n", "file_path": "gui/src/app/root_stack/oc_page/clocks_frame.rs", "rank": 29, "score": 26750.80041590789 }, { "content": " match clocks_table {\n\n ClocksTable::Old(clocks_table) => {\n\n self.gpu_clock_adjustment\n\n .set_lower(clocks_table.gpu_clocks_range.0 as f64);\n\n self.gpu_clock_adjustment\n\n .set_upper(clocks_table.gpu_clocks_range.1 as f64);\n\n\n\n self.gpu_voltage_adjustment\n\n .set_lower(clocks_table.voltage_range.0 as f64 / 1000.0);\n\n self.gpu_voltage_adjustment\n\n .set_upper(clocks_table.voltage_range.1 as f64 / 1000.0);\n\n\n\n self.vram_clock_adjustment\n\n .set_lower(clocks_table.mem_clocks_range.0 as f64);\n\n self.vram_clock_adjustment\n\n .set_upper(clocks_table.mem_clocks_range.1 as f64);\n\n\n\n let (gpu_clockspeed, gpu_voltage) =\n\n clocks_table.gpu_power_levels.iter().next_back().unwrap().1;\n\n\n", "file_path": "gui/src/app/root_stack/oc_page/clocks_frame.rs", "rank": 30, "score": 26750.40760344919 }, { "content": "use gtk::*;\n\n\n\n#[derive(Clone)]\n\npub struct PowerCapFrame {\n\n pub container: Frame,\n\n label: Label,\n\n adjustment: Adjustment,\n\n}\n\n\n\nimpl PowerCapFrame {\n\n pub fn new() -> Self {\n\n let container = Frame::new(None);\n\n\n\n container.set_shadow_type(ShadowType::None);\n\n\n\n container.set_label_widget(Some(&{\n\n let label = Label::new(None);\n\n label.set_markup(\"<span font_desc='11'><b>Power Usage Limit</b></span>\");\n\n label\n\n }));\n", "file_path": "gui/src/app/root_stack/oc_page/power_cap_frame.rs", "rank": 31, "score": 26.133725210647203 }, { "content": "use daemon::gpu_controller::VulkanInfo;\n\nuse gtk::*;\n\n\n\n#[derive(Clone)]\n\npub struct VulkanInfoFrame {\n\n pub container: Frame,\n\n device_name_label: Label,\n\n version_label: Label,\n\n features_box: Box,\n\n}\n\n\n\nimpl VulkanInfoFrame {\n\n pub fn new() -> Self {\n\n let container = Frame::new(None);\n\n\n\n container.set_label_widget(Some(&{\n\n let label = Label::new(None);\n\n label.set_markup(\"<span font_desc='11'><b>Vulkan Information</b></span>\");\n\n label\n\n }));\n", "file_path": "gui/src/app/root_stack/info_page/vulkan_info.rs", "rank": 32, "score": 24.094411007688073 }, { "content": " power_profile_frame: PowerProfileFrame,\n\n power_cap_frame: PowerCapFrame,\n\n clocks_frame: ClocksFrame,\n\n pub warning_frame: WarningFrame,\n\n}\n\n\n\nimpl OcPage {\n\n pub fn new() -> Self {\n\n let container = Box::new(Orientation::Vertical, 5);\n\n\n\n let warning_frame = WarningFrame::new();\n\n\n\n container.pack_start(&warning_frame.container, false, true, 5);\n\n\n\n let stats_grid = StatsGrid::new();\n\n\n\n container.pack_start(&stats_grid.container, false, true, 5);\n\n\n\n let power_cap_frame = PowerCapFrame::new();\n\n\n", "file_path": "gui/src/app/root_stack/oc_page.rs", "rank": 33, "score": 23.944081464882863 }, { "content": "use gtk::*;\n\n\n\n#[derive(Clone)]\n\npub struct ApplyRevealer {\n\n pub container: Revealer,\n\n apply_button: Button,\n\n}\n\n\n\nimpl ApplyRevealer {\n\n pub fn new() -> Self {\n\n let container = Revealer::new();\n\n\n\n container.set_transition_duration(150);\n\n\n\n let apply_button = Button::new();\n\n\n\n apply_button.set_label(\"Apply\");\n\n\n\n container.add(&apply_button);\n\n\n", "file_path": "gui/src/app/apply_revealer.rs", "rank": 34, "score": 23.74651064678552 }, { "content": "mod vulkan_info;\n\n\n\nuse daemon::gpu_controller::GpuInfo;\n\nuse gtk::*;\n\nuse vulkan_info::VulkanInfoFrame;\n\n\n\n#[derive(Clone)]\n\npub struct InformationPage {\n\n pub container: Grid,\n\n gpu_name_label: Label,\n\n gpu_manufacturer_label: Label,\n\n vbios_version_label: Label,\n\n driver_label: Label,\n\n vram_size_label: Label,\n\n link_speed_label: Label,\n\n vulkan_info_frame: VulkanInfoFrame,\n\n}\n\n\n\nimpl InformationPage {\n\n pub fn new() -> Self {\n", "file_path": "gui/src/app/root_stack/info_page.rs", "rank": 35, "score": 23.717529078879632 }, { "content": "use daemon::gpu_controller::PowerProfile;\n\nuse gtk::*;\n\nuse prelude::ComboBoxExtManual;\n\n\n\n#[derive(Clone)]\n\npub struct PowerProfileFrame {\n\n pub container: Frame,\n\n combo_box: ComboBoxText,\n\n description_label: Label,\n\n}\n\n\n\nimpl PowerProfileFrame {\n\n pub fn new() -> Self {\n\n let container = Frame::new(None);\n\n\n\n container.set_shadow_type(ShadowType::None);\n\n\n\n container.set_label_widget(Some(&{\n\n let label = Label::new(None);\n\n label.set_markup(\"<span font_desc='11'><b>Power Profile</b></span>\");\n", "file_path": "gui/src/app/root_stack/oc_page/power_profile_frame.rs", "rank": 36, "score": 23.427010187312117 }, { "content": "use gtk::prelude::{ComboBoxExtManual, ObjectExt};\n\nuse gtk::*;\n\nuse pango::EllipsizeMode;\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Clone)]\n\npub struct Header {\n\n pub container: HeaderBar,\n\n gpu_selector: ComboBoxText,\n\n switcher: StackSwitcher,\n\n}\n\n\n\nimpl Header {\n\n pub fn new() -> Self {\n\n let container = HeaderBar::new();\n\n\n\n container.set_custom_title(Some(&Grid::new())); // Bad workaround to hide the title\n\n\n\n container.set_show_close_button(true);\n\n\n", "file_path": "gui/src/app/header.rs", "rank": 37, "score": 23.20613876023049 }, { "content": "use gtk::*;\n\n\n\n#[derive(Clone)]\n\npub struct WarningFrame {\n\n pub container: Frame,\n\n}\n\n\n\nimpl WarningFrame {\n\n pub fn new() -> Self {\n\n let container = Frame::new(Some(\"Overclocking information\"));\n\n\n\n container.set_label_align(0.3, 0.5);\n\n\n\n let warning_label = Label::new(None);\n\n\n\n warning_label.set_line_wrap(true);\n\n warning_label.set_markup(\"Overclocking support is not enabled! To enable overclocking support, you need to add <b>amdgpu.ppfeaturemask=0xffffffff</b> to your kernel boot options. Look for the documentation of your distro.\");\n\n warning_label.set_selectable(true);\n\n\n\n container.add(&warning_label);\n", "file_path": "gui/src/app/root_stack/oc_page/warning_frame.rs", "rank": 38, "score": 22.166487551321765 }, { "content": "mod clocks_frame;\n\nmod power_cap_frame;\n\nmod power_profile_frame;\n\nmod stats_grid;\n\nmod warning_frame;\n\n\n\nuse clocks_frame::ClocksSettings;\n\nuse daemon::gpu_controller::{GpuInfo, GpuStats, PowerProfile};\n\nuse gtk::*;\n\n\n\nuse clocks_frame::ClocksFrame;\n\nuse power_cap_frame::PowerCapFrame;\n\nuse power_profile_frame::PowerProfileFrame;\n\nuse stats_grid::StatsGrid;\n\nuse warning_frame::WarningFrame;\n\n\n\n#[derive(Clone)]\n\npub struct OcPage {\n\n pub container: Box,\n\n stats_grid: StatsGrid,\n", "file_path": "gui/src/app/root_stack/oc_page.rs", "rank": 39, "score": 22.102882705980363 }, { "content": "use std::collections::BTreeMap;\n\n\n\nuse gtk::*;\n\n\n\n#[derive(Clone)]\n\npub struct FanCurveFrame {\n\n pub container: Frame,\n\n adjustment_1: Adjustment,\n\n adjustment_2: Adjustment,\n\n adjustment_3: Adjustment,\n\n adjustment_4: Adjustment,\n\n adjustment_5: Adjustment,\n\n}\n\n\n\nimpl FanCurveFrame {\n\n pub fn new() -> Self {\n\n let container = Frame::new(Some(\"Fan Curve\"));\n\n\n\n container.set_margin_start(10);\n\n container.set_margin_end(10);\n", "file_path": "gui/src/app/root_stack/thermals_page/fan_curve_frame.rs", "rank": 40, "score": 20.603378759391795 }, { "content": "mod fan_curve_frame;\n\n\n\nuse daemon::gpu_controller::{FanControlInfo, GpuStats};\n\nuse gtk::prelude::*;\n\nuse gtk::*;\n\nuse std::collections::BTreeMap;\n\n\n\nuse fan_curve_frame::FanCurveFrame;\n\n\n\npub struct ThermalsSettings {\n\n pub automatic_fan_control_enabled: bool,\n\n pub curve: BTreeMap<i64, f64>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ThermalsPage {\n\n pub container: Box,\n\n temp_label: Label,\n\n fan_speed_label: Label,\n\n fan_control_enabled_switch: Switch,\n", "file_path": "gui/src/app/root_stack/thermals_page.rs", "rank": 41, "score": 20.072616824666778 }, { "content": "mod info_page;\n\nmod oc_page;\n\nmod thermals_page;\n\n\n\nuse gtk::*;\n\n\n\nuse info_page::InformationPage;\n\nuse oc_page::OcPage;\n\nuse thermals_page::ThermalsPage;\n\n\n\n#[derive(Clone)]\n\npub struct RootStack {\n\n pub container: Stack,\n\n pub info_page: InformationPage,\n\n pub thermals_page: ThermalsPage,\n\n pub oc_page: OcPage,\n\n}\n\n\n\nimpl RootStack {\n\n pub fn new() -> Self {\n", "file_path": "gui/src/app/root_stack.rs", "rank": 42, "score": 18.52799091289263 }, { "content": "}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug)]\n\npub struct GpuConfig {\n\n pub fan_control_enabled: bool,\n\n pub fan_curve: BTreeMap<i64, f64>,\n\n pub power_cap: i64,\n\n pub power_profile: PowerProfile,\n\n pub gpu_max_clock: i64,\n\n pub gpu_max_voltage: Option<i64>,\n\n pub vram_max_clock: i64,\n\n}\n\n\n\nimpl GpuConfig {\n\n pub fn new() -> Self {\n\n let mut fan_curve: BTreeMap<i64, f64> = BTreeMap::new();\n\n fan_curve.insert(20, 0f64);\n\n fan_curve.insert(40, 0f64);\n\n fan_curve.insert(60, 50f64);\n\n fan_curve.insert(80, 80f64);\n", "file_path": "daemon/src/config.rs", "rank": 43, "score": 18.478607859006416 }, { "content": " container.pack_start(&power_cap_frame.container, false, true, 0);\n\n\n\n let power_profile_frame = PowerProfileFrame::new();\n\n\n\n container.pack_start(&power_profile_frame.container, false, true, 0);\n\n\n\n let clocks_frame = ClocksFrame::new();\n\n\n\n container.pack_start(&clocks_frame.container, false, true, 0);\n\n\n\n Self {\n\n container,\n\n stats_grid,\n\n power_profile_frame,\n\n clocks_frame,\n\n warning_frame,\n\n power_cap_frame,\n\n }\n\n }\n\n\n", "file_path": "gui/src/app/root_stack/oc_page.rs", "rank": 44, "score": 17.689922063179807 }, { "content": " pub fn set_stats(&self, stats: &GpuStats) {\n\n self.stats_grid.set_stats(stats);\n\n }\n\n\n\n pub fn connect_clocks_reset<F: Fn() + 'static + Clone>(&self, f: F) {\n\n self.clocks_frame.connect_clocks_reset(move || {\n\n f();\n\n });\n\n }\n\n\n\n pub fn connect_settings_changed<F: Fn() + 'static + Clone>(&self, f: F) {\n\n {\n\n let f = f.clone();\n\n self.power_profile_frame\n\n .connect_power_profile_changed(move || {\n\n f();\n\n });\n\n }\n\n {\n\n let f = f.clone();\n", "file_path": "gui/src/app/root_stack/oc_page.rs", "rank": 45, "score": 17.13473775740977 }, { "content": " fan_curve_frame: FanCurveFrame,\n\n}\n\n\n\nimpl ThermalsPage {\n\n pub fn new() -> Self {\n\n let container = Box::new(Orientation::Vertical, 5);\n\n\n\n let grid = Grid::new();\n\n\n\n grid.set_margin_start(5);\n\n grid.set_margin_end(5);\n\n grid.set_margin_bottom(5);\n\n grid.set_margin_top(5);\n\n\n\n grid.set_column_homogeneous(true);\n\n\n\n grid.set_row_spacing(7);\n\n grid.set_column_spacing(5);\n\n\n\n grid.attach(\n", "file_path": "gui/src/app/root_stack/thermals_page.rs", "rank": 46, "score": 16.859796715551365 }, { "content": "\n\n // Temperature threshold labels\n\n {\n\n root_grid.attach(\n\n &{\n\n let label = Label::new(Some(\"Temperature °C\"));\n\n label.set_hexpand(true);\n\n label\n\n },\n\n 2,\n\n 7,\n\n 5,\n\n 1,\n\n );\n\n\n\n root_grid.attach(&Label::new(Some(\"20\")), 2, 6, 1, 1);\n\n root_grid.attach(&Label::new(Some(\"40\")), 3, 6, 1, 1);\n\n root_grid.attach(&Label::new(Some(\"60\")), 4, 6, 1, 1);\n\n root_grid.attach(&Label::new(Some(\"80\")), 5, 6, 1, 1);\n\n root_grid.attach(&Label::new(Some(\"100\")), 6, 6, 1, 1);\n", "file_path": "gui/src/app/root_stack/thermals_page/fan_curve_frame.rs", "rank": 47, "score": 16.620250616790997 }, { "content": "use header::Header;\n\nuse root_stack::RootStack;\n\n\n\n#[derive(Clone)]\n\npub struct App {\n\n pub window: Window,\n\n pub header: Header,\n\n root_stack: RootStack,\n\n apply_revealer: ApplyRevealer,\n\n daemon_connection: DaemonConnection,\n\n}\n\n\n\nimpl App {\n\n pub fn new(daemon_connection: DaemonConnection) -> Self {\n\n let window = Window::new(WindowType::Toplevel);\n\n\n\n let header = Header::new();\n\n\n\n window.set_titlebar(Some(&header.container));\n\n window.set_title(\"LACT\");\n", "file_path": "gui/src/app.rs", "rank": 48, "score": 16.27943406658425 }, { "content": " combo_box.connect_changed(move |combobox| match combobox.get_active().unwrap() {\n\n 0 => description_label\n\n .set_text(\"Automatically adjust GPU and VRAM clocks. (Default)\"),\n\n 1 => description_label\n\n .set_text(\"Always use the highest clockspeeds for GPU and VRAM.\"),\n\n 2 => description_label\n\n .set_text(\"Always use the lowest clockspeeds for GPU and VRAM.\"),\n\n _ => unreachable!(),\n\n });\n\n }\n\n\n\n container.add(&root_box);\n\n Self {\n\n container,\n\n combo_box,\n\n description_label,\n\n }\n\n }\n\n\n\n pub fn set_active_profile(&self, profile: &PowerProfile) {\n", "file_path": "gui/src/app/root_stack/oc_page/power_profile_frame.rs", "rank": 49, "score": 15.761472440171094 }, { "content": " pub voltage_range: (i64, i64), //IN MILLIVOLTS\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, Default)]\n\npub struct ClocksTableNew {\n\n pub current_gpu_clocks: (i64, i64),\n\n pub current_max_mem_clock: i64,\n\n // pub vddc_curve: [(i64, i64); 3],\n\n pub gpu_clocks_range: (i64, i64),\n\n pub mem_clocks_range: (i64, i64),\n\n // pub voltage_range: (i64, i64), //IN MILLIVOLTS\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct GpuStats {\n\n pub mem_used: Option<u64>,\n\n pub mem_total: Option<u64>,\n\n pub mem_freq: Option<i64>,\n\n pub gpu_freq: Option<i64>,\n\n pub gpu_temp: Option<i64>,\n", "file_path": "daemon/src/gpu_controller.rs", "rank": 50, "score": 15.618953665500374 }, { "content": " container.set_margin_bottom(10);\n\n container.set_margin_top(10);\n\n\n\n container.set_label_align(0.35, 0.5);\n\n\n\n // container.set_shadow_type(ShadowType::None);\n\n //\n\n let root_grid = Grid::new();\n\n\n\n // PWM Percentage Labels\n\n {\n\n root_grid.attach(\n\n &{\n\n let label = Label::new(Some(\"PWM %\"));\n\n label.set_angle(90.0);\n\n label.set_vexpand(true); // This expands the entire top section of the grid, including the scales\n\n label\n\n },\n\n 0,\n\n 0,\n", "file_path": "gui/src/app/root_stack/thermals_page/fan_curve_frame.rs", "rank": 51, "score": 15.251233946665314 }, { "content": "\n\n container.attach(&vbios_version_label, 2, 2, 3, 1);\n\n\n\n container.attach(\n\n &{\n\n let label = Label::new(Some(\"Driver in use:\"));\n\n label.set_halign(Align::End);\n\n label\n\n },\n\n 0,\n\n 3,\n\n 2,\n\n 1,\n\n );\n\n\n\n let driver_label = Label::new(None);\n\n driver_label.set_halign(Align::Start);\n\n\n\n container.attach(&driver_label, 2, 3, 3, 1);\n\n\n", "file_path": "gui/src/app/root_stack/info_page.rs", "rank": 52, "score": 14.475468728680063 }, { "content": " &{\n\n let label = Label::new(Some(\"Temperature:\"));\n\n label.set_halign(Align::End);\n\n label\n\n },\n\n 0,\n\n 0,\n\n 1,\n\n 1,\n\n );\n\n\n\n let temp_label = Label::new(None);\n\n temp_label.set_halign(Align::Start);\n\n\n\n grid.attach(&temp_label, 2, 0, 1, 1);\n\n\n\n grid.attach(\n\n &{\n\n let label = Label::new(Some(\"Fan speed:\"));\n\n label.set_halign(Align::End);\n", "file_path": "gui/src/app/root_stack/thermals_page.rs", "rank": 53, "score": 14.43439254543977 }, { "content": " container.set_label_align(0.5, 0.5);\n\n\n\n container.set_shadow_type(ShadowType::None);\n\n\n\n let grid = Grid::new();\n\n\n\n grid.set_margin_start(5);\n\n grid.set_margin_end(5);\n\n grid.set_margin_bottom(5);\n\n grid.set_margin_top(5);\n\n\n\n grid.set_column_homogeneous(true);\n\n\n\n grid.set_row_spacing(7);\n\n grid.set_column_spacing(5);\n\n\n\n grid.attach(\n\n &{\n\n let label = Label::new(Some(\"Device name:\"));\n\n label.set_halign(Align::End);\n", "file_path": "gui/src/app/root_stack/info_page/vulkan_info.rs", "rank": 54, "score": 14.319804045005679 }, { "content": "use crate::config::Config;\n\nuse crate::gpu_controller::{FanControlInfo, GpuStats};\n\nuse crate::gpu_controller::{GpuInfo, PowerProfile};\n\nuse crate::Daemon;\n\nuse crate::DaemonError;\n\nuse crate::{Action, DaemonResponse, SOCK_PATH};\n\nuse std::collections::{BTreeMap, HashMap};\n\n\n\n#[derive(Clone, Copy)]\n\npub struct DaemonConnection {}\n\n\n\npub const BUFFER_SIZE: usize = 4096;\n\n\n\nimpl DaemonConnection {\n\n pub fn new() -> Result<Self, DaemonError> {\n\n let addr = nix::sys::socket::SockAddr::Unix(\n\n nix::sys::socket::UnixAddr::new_abstract(SOCK_PATH.as_bytes()).unwrap(),\n\n );\n\n let socket = nix::sys::socket::socket(\n\n nix::sys::socket::AddressFamily::Unix,\n", "file_path": "daemon/src/daemon_connection.rs", "rank": 55, "score": 14.264044530096388 }, { "content": "\n\n Self {\n\n container,\n\n temp_label,\n\n fan_speed_label,\n\n fan_control_enabled_switch,\n\n fan_curve_frame,\n\n }\n\n }\n\n\n\n pub fn set_thermals_info(&self, stats: &GpuStats) {\n\n match stats.gpu_temp {\n\n Some(temp) => self.temp_label.set_markup(&format!(\"<b>{}°C</b>\", temp)),\n\n None => self.temp_label.set_text(\"Sensor not found\"),\n\n }\n\n\n\n match stats.fan_speed {\n\n Some(fan_speed) => self.fan_speed_label.set_markup(&format!(\n\n \"<b>{} RPM ({}%)</b>\",\n\n fan_speed,\n", "file_path": "gui/src/app/root_stack/thermals_page.rs", "rank": 56, "score": 14.24103458644191 }, { "content": " let scale = Scale::new(Orientation::Horizontal, Some(&adjustment));\n\n\n\n scale.set_draw_value(false);\n\n\n\n root_box.pack_start(&scale, true, true, 5);\n\n\n\n container.add(&root_box);\n\n\n\n Self {\n\n container,\n\n label,\n\n adjustment,\n\n }\n\n }\n\n\n\n pub fn set_data(&self, power_cap: Option<i64>, power_cap_max: Option<i64>) {\n\n if let Some(power_cap_max) = power_cap_max {\n\n self.adjustment.set_upper(power_cap_max as f64);\n\n } else {\n\n self.container.set_visible(false);\n", "file_path": "gui/src/app/root_stack/oc_page/power_cap_frame.rs", "rank": 57, "score": 14.077015369073749 }, { "content": " label.set_halign(Align::End);\n\n label\n\n },\n\n 0,\n\n 5,\n\n 2,\n\n 1,\n\n );\n\n\n\n let link_speed_label = Label::new(None);\n\n link_speed_label.set_halign(Align::Start);\n\n\n\n container.attach(&link_speed_label, 2, 5, 3, 1);\n\n\n\n let vulkan_info_frame = VulkanInfoFrame::new();\n\n container.attach(&vulkan_info_frame.container, 0, 6, 5, 1);\n\n\n\n Self {\n\n container,\n\n gpu_name_label,\n", "file_path": "gui/src/app/root_stack/info_page.rs", "rank": 58, "score": 13.950933483166814 }, { "content": " let container = Grid::new();\n\n\n\n container.set_margin_start(5);\n\n container.set_margin_end(5);\n\n container.set_margin_bottom(5);\n\n container.set_margin_top(5);\n\n\n\n container.set_column_homogeneous(true);\n\n\n\n container.set_row_spacing(7);\n\n container.set_column_spacing(5);\n\n\n\n container.attach(\n\n &{\n\n let label = Label::new(Some(\"GPU Model:\"));\n\n label.set_halign(Align::End);\n\n label\n\n },\n\n 0,\n\n 0,\n", "file_path": "gui/src/app/root_stack/info_page.rs", "rank": 59, "score": 13.919764046328904 }, { "content": " features_box.set_halign(Align::Center);\n\n\n\n features_scrolled_window.add(&features_box);\n\n\n\n features_expander.add(&features_scrolled_window);\n\n\n\n container.add(&grid);\n\n\n\n Self {\n\n container,\n\n device_name_label,\n\n version_label,\n\n features_box,\n\n }\n\n }\n\n\n\n pub fn set_info(&self, vulkan_info: &VulkanInfo) {\n\n log::trace!(\"Setting vulkan info: {:?}\", vulkan_info);\n\n\n\n self.device_name_label\n", "file_path": "gui/src/app/root_stack/info_page/vulkan_info.rs", "rank": 60, "score": 13.640795561109973 }, { "content": " Ok(_) => {\n\n self.config.power_profile = profile;\n\n Ok(())\n\n }\n\n Err(_) => Err(GpuControllerError::NotSupported),\n\n }\n\n }\n\n\n\n fn get_clocks_table(&self) -> Result<ClocksTable, GpuControllerError> {\n\n match fs::read_to_string(self.hw_path.join(\"pp_od_clk_voltage\")) {\n\n Ok(table) => Self::parse_clocks_table(&table),\n\n Err(_) => Err(GpuControllerError::NotSupported),\n\n }\n\n }\n\n\n\n fn parse_clocks_table(table: &str) -> Result<ClocksTable, GpuControllerError> {\n\n if table.contains(\"CURVE\") {\n\n Ok(ClocksTable::New(Self::parse_clocks_table_new(table)?))\n\n } else {\n\n Ok(ClocksTable::Old(Self::parse_clocks_table_old(table)?))\n", "file_path": "daemon/src/gpu_controller.rs", "rank": 61, "score": 13.42788282076353 }, { "content": " }\n\n if let Some(power_cap) = power_cap {\n\n self.adjustment.set_value(power_cap as f64);\n\n } else {\n\n self.container.set_visible(false);\n\n }\n\n }\n\n\n\n pub fn get_cap(&self) -> Option<i64> {\n\n // Using match gives a warning that floats shouldn't be used in patterns\n\n let cap = self.adjustment.get_value();\n\n if cap == 0.0 {\n\n None\n\n } else {\n\n Some(cap as i64)\n\n }\n\n }\n\n\n\n pub fn connect_cap_changed<F: Fn() + 'static>(&self, f: F) {\n\n self.adjustment.connect_value_changed(move |_| {\n\n f();\n\n });\n\n }\n\n}\n", "file_path": "gui/src/app/root_stack/oc_page/power_cap_frame.rs", "rank": 62, "score": 13.133954553713739 }, { "content": " Self {\n\n container,\n\n apply_button,\n\n }\n\n }\n\n\n\n pub fn show(&self) {\n\n self.container.set_reveal_child(true);\n\n }\n\n\n\n pub fn hide(&self) {\n\n self.container.set_reveal_child(false);\n\n }\n\n\n\n pub fn connect_apply_button_clicked<F: Fn() + 'static>(&self, f: F) {\n\n self.apply_button.connect_clicked(move |_| {\n\n f();\n\n });\n\n }\n\n}\n", "file_path": "gui/src/app/apply_revealer.rs", "rank": 63, "score": 12.997120325763175 }, { "content": " label\n\n }));\n\n container.set_label_align(0.2, 0.0);\n\n\n\n let root_box = Box::new(Orientation::Horizontal, 5);\n\n\n\n let combo_box = ComboBoxText::new();\n\n\n\n combo_box.append(Some(\"0\"), \"Automatic\");\n\n combo_box.append(Some(\"1\"), \"Highest clocks\");\n\n combo_box.append(Some(\"2\"), \"Lowest clocks\");\n\n\n\n root_box.pack_start(&combo_box, false, true, 5);\n\n\n\n let description_label = Label::new(Some(\"A description is supposed to be here\"));\n\n\n\n root_box.pack_start(&description_label, false, true, 5);\n\n\n\n {\n\n let description_label = description_label.clone();\n", "file_path": "gui/src/app/root_stack/oc_page/power_profile_frame.rs", "rank": 64, "score": 12.525744417884496 }, { "content": " },\n\n 1,\n\n 3,\n\n 1,\n\n 1,\n\n );\n\n root_grid.attach(\n\n &{\n\n let label = Label::new(Some(\"50\"));\n\n label.set_angle(90.0);\n\n label\n\n },\n\n 1,\n\n 2,\n\n 1,\n\n 1,\n\n );\n\n root_grid.attach(\n\n &{\n\n let label = Label::new(Some(\"75\"));\n", "file_path": "gui/src/app/root_stack/thermals_page/fan_curve_frame.rs", "rank": 65, "score": 12.41150827947497 }, { "content": " 1,\n\n 5,\n\n );\n\n\n\n root_grid.attach(\n\n &{\n\n let label = Label::new(Some(\"0\"));\n\n label.set_angle(90.0);\n\n label\n\n },\n\n 1,\n\n 4,\n\n 1,\n\n 1,\n\n );\n\n root_grid.attach(\n\n &{\n\n let label = Label::new(Some(\"25\"));\n\n label.set_angle(90.0);\n\n label\n", "file_path": "gui/src/app/root_stack/thermals_page/fan_curve_frame.rs", "rank": 66, "score": 12.252102108082825 }, { "content": " container.attach(\n\n &{\n\n let label = Label::new(Some(\"VRAM Size:\"));\n\n label.set_halign(Align::End);\n\n label\n\n },\n\n 0,\n\n 4,\n\n 2,\n\n 1,\n\n );\n\n\n\n let vram_size_label = Label::new(None);\n\n vram_size_label.set_halign(Align::Start);\n\n\n\n container.attach(&vram_size_label, 2, 4, 3, 1);\n\n\n\n container.attach(\n\n &{\n\n let label = Label::new(Some(\"Link speed:\"));\n", "file_path": "gui/src/app/root_stack/info_page.rs", "rank": 67, "score": 12.037400443692059 }, { "content": " let adj = adjustments[i];\n\n\n\n root_grid.attach(\n\n &{\n\n let scale = Scale::new(Orientation::Vertical, Some(adj));\n\n scale.set_draw_value(false);\n\n scale.set_inverted(true);\n\n scale\n\n },\n\n i as i32 + 2,\n\n 0,\n\n 1,\n\n 5,\n\n );\n\n }\n\n }\n\n\n\n container.add(&root_grid);\n\n\n\n Self {\n", "file_path": "gui/src/app/root_stack/thermals_page/fan_curve_frame.rs", "rank": 68, "score": 11.975690788508032 }, { "content": " match self {\n\n PowerProfile::Auto => \"auto\".to_string(),\n\n PowerProfile::High => \"high\".to_string(),\n\n PowerProfile::Low => \"low\".to_string(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub enum ClocksTable {\n\n Old(ClocksTableOld),\n\n New(ClocksTableNew),\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, Default)]\n\npub struct ClocksTableOld {\n\n pub gpu_power_levels: BTreeMap<u32, (i64, i64)>, //<power level, (clockspeed, voltage)>\n\n pub mem_power_levels: BTreeMap<u32, (i64, i64)>,\n\n pub gpu_clocks_range: (i64, i64),\n\n pub mem_clocks_range: (i64, i64),\n", "file_path": "daemon/src/gpu_controller.rs", "rank": 69, "score": 11.833321322705785 }, { "content": "\n\nimpl Default for PowerProfile {\n\n fn default() -> Self {\n\n PowerProfile::Auto\n\n }\n\n}\n\n\n\nimpl PowerProfile {\n\n pub fn from_str(profile: &str) -> Result<Self, GpuControllerError> {\n\n match profile {\n\n \"auto\" | \"Automatic\" => Ok(PowerProfile::Auto),\n\n \"high\" | \"Highest Clocks\" => Ok(PowerProfile::High),\n\n \"low\" | \"Lowest Clocks\" => Ok(PowerProfile::Low),\n\n _ => Err(GpuControllerError::ParseError(\n\n \"unrecognized GPU power profile\".to_string(),\n\n )),\n\n }\n\n }\n\n\n\n pub fn to_string(&self) -> String {\n", "file_path": "daemon/src/gpu_controller.rs", "rank": 70, "score": 11.807750225175726 }, { "content": " .max_fan_speed\n\n .unwrap_or_default()\n\n .to_string()\n\n .bold(),\n\n \"RPM\".bold(),\n\n );\n\n println!(\n\n \"{} {}{}\",\n\n \"GPU Clock:\".green(),\n\n gpu_stats.gpu_freq.unwrap_or_default().to_string().bold(),\n\n \"MHz\".bold(),\n\n );\n\n println!(\n\n \"{} {}{}\",\n\n \"GPU Voltage:\".green(),\n\n (gpu_stats.voltage.unwrap_or_default() as f64 / 1000.0)\n\n .to_string()\n\n .bold(),\n\n \"V\".bold(),\n\n );\n", "file_path": "cli/src/main.rs", "rank": 71, "score": 11.801886963971441 }, { "content": " Example output:\n\n\n\n ```\n\n VRAM Usage: 545/4096MiB\n\n Temperature: 46°C\n\n Fan Speed: 785/3200RPM\n\n GPU Clock: 783MHz\n\n GPU Voltage: 0.975V\n\n VRAM Clock: 1750MHz\n\n Power Usage: 38/155W\n\n ```\n\n \n\n- Showing the current fan curve: \n\n\n\n `lact-cli curve status`\n\n \n\n Example output:\n\n\n\n ```\n\n Fan curve:\n\n 20C°: 0%\n\n 40C°: 0%\n\n 60C°: 50%\n\n 80C°: 88%\n\n 100C°: 100%\n", "file_path": "README.md", "rank": 72, "score": 11.669816375993562 }, { "content": " label\n\n },\n\n 0,\n\n 0,\n\n 2,\n\n 1,\n\n );\n\n\n\n let device_name_label = Label::new(None);\n\n device_name_label.set_halign(Align::Start);\n\n\n\n grid.attach(&device_name_label, 2, 0, 3, 1);\n\n\n\n grid.attach(\n\n &{\n\n let label = Label::new(Some(\"Version:\"));\n\n label.set_halign(Align::End);\n\n label\n\n },\n\n 0,\n", "file_path": "gui/src/app/root_stack/info_page/vulkan_info.rs", "rank": 73, "score": 11.628739430510306 }, { "content": " label\n\n },\n\n 0,\n\n 1,\n\n 1,\n\n 1,\n\n );\n\n\n\n let fan_speed_label = Label::new(None);\n\n fan_speed_label.set_halign(Align::Start);\n\n\n\n grid.attach(&fan_speed_label, 2, 1, 1, 1);\n\n\n\n grid.attach(\n\n &{\n\n let label = Label::new(Some(\"Automatic fan control:\"));\n\n label.set_halign(Align::End);\n\n label\n\n },\n\n 0,\n", "file_path": "gui/src/app/root_stack/thermals_page.rs", "rank": 74, "score": 11.517058697675772 }, { "content": " 2,\n\n 1,\n\n );\n\n\n\n let gpu_name_label = Label::new(None);\n\n gpu_name_label.set_halign(Align::Start);\n\n\n\n container.attach(&gpu_name_label, 2, 0, 3, 1);\n\n\n\n container.attach(\n\n &{\n\n let label = Label::new(Some(\"GPU Manufacturer:\"));\n\n label.set_halign(Align::End);\n\n label\n\n },\n\n 0,\n\n 1,\n\n 2,\n\n 1,\n\n );\n", "file_path": "gui/src/app/root_stack/info_page.rs", "rank": 75, "score": 11.46252039941132 }, { "content": "\n\n let gpu_manufacturer_label = Label::new(None);\n\n gpu_manufacturer_label.set_halign(Align::Start);\n\n\n\n container.attach(&gpu_manufacturer_label, 2, 1, 3, 1);\n\n\n\n container.attach(\n\n &{\n\n let label = Label::new(Some(\"VBIOS Version:\"));\n\n label.set_halign(Align::End);\n\n label\n\n },\n\n 0,\n\n 2,\n\n 2,\n\n 1,\n\n );\n\n\n\n let vbios_version_label = Label::new(None);\n\n vbios_version_label.set_halign(Align::Start);\n", "file_path": "gui/src/app/root_stack/info_page.rs", "rank": 76, "score": 11.372157278887329 }, { "content": "\n\n Self { container }\n\n }\n\n\n\n pub fn show(&self) {\n\n self.container.set_visible(true);\n\n }\n\n\n\n pub fn hide(&self) {\n\n self.container.set_visible(false);\n\n }\n\n}\n", "file_path": "gui/src/app/root_stack/oc_page/warning_frame.rs", "rank": 77, "score": 11.287059874136048 }, { "content": " self.config.gpu_max_voltage = voltage;\n\n }\n\n ClocksTable::New(_) => {\n\n let s_line = format!(\"s 1 {}\\n\", clockspeed);\n\n\n\n fs::write(self.hw_path.join(\"pp_od_clk_voltage\"), s_line)?;\n\n\n\n if let Some(voltage) = voltage {\n\n let vc_line = format!(\"vc 2 {} {}\\n\", clockspeed, voltage);\n\n\n\n fs::write(self.hw_path.join(\"pp_od_clk_voltage\"), vc_line)?;\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn set_vram_max_clockspeed(&mut self, clockspeed: i64) -> Result<(), GpuControllerError> {\n\n match self.get_clocks_table()? {\n", "file_path": "daemon/src/gpu_controller.rs", "rank": 78, "score": 11.16042598146044 }, { "content": " label.set_angle(90.0);\n\n label\n\n },\n\n 1,\n\n 1,\n\n 1,\n\n 1,\n\n );\n\n root_grid.attach(\n\n &{\n\n let label = Label::new(Some(\"100\"));\n\n label.set_angle(90.0);\n\n label\n\n },\n\n 1,\n\n 0,\n\n 1,\n\n 1,\n\n );\n\n }\n", "file_path": "gui/src/app/root_stack/thermals_page/fan_curve_frame.rs", "rank": 79, "score": 11.119796371781131 }, { "content": " 1,\n\n 2,\n\n 1,\n\n );\n\n\n\n let version_label = Label::new(None);\n\n version_label.set_halign(Align::Start);\n\n\n\n grid.attach(&version_label, 2, 1, 3, 1);\n\n\n\n let features_expander = Expander::new(Some(\"Feature support\"));\n\n\n\n grid.attach(&features_expander, 0, 2, 5, 1);\n\n\n\n let features_scrolled_window = ScrolledWindow::new(NONE_ADJUSTMENT, NONE_ADJUSTMENT);\n\n\n\n features_scrolled_window.set_vexpand(true);\n\n\n\n let features_box = Box::new(Orientation::Vertical, 5);\n\n\n", "file_path": "gui/src/app/root_stack/info_page/vulkan_info.rs", "rank": 80, "score": 10.92330704717465 }, { "content": "}\n\n\n\nimpl Config {\n\n pub fn new(config_path: &PathBuf) -> Self {\n\n let gpu_configs: HashMap<u32, (GpuIdentifier, GpuConfig)> = HashMap::new();\n\n\n\n Config {\n\n gpu_configs,\n\n allow_online_update: None,\n\n config_path: config_path.clone(),\n\n group: String::from(\"wheel\"),\n\n }\n\n }\n\n\n\n pub fn read_from_file(path: &PathBuf) -> Result<Self, ConfigError> {\n\n let json = fs::read_to_string(path)?;\n\n\n\n Ok(serde_json::from_str::<Config>(&json)?)\n\n }\n\n\n", "file_path": "daemon/src/config.rs", "rank": 81, "score": 10.850865159674067 }, { "content": " fan_curve.insert(100, 100f64);\n\n\n\n GpuConfig {\n\n fan_curve,\n\n fan_control_enabled: false,\n\n power_cap: -1,\n\n power_profile: PowerProfile::Auto,\n\n gpu_max_clock: 0,\n\n gpu_max_voltage: None,\n\n vram_max_clock: 0,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug)]\n\npub struct Config {\n\n pub gpu_configs: HashMap<u32, (GpuIdentifier, GpuConfig)>,\n\n pub allow_online_update: Option<bool>,\n\n pub config_path: PathBuf,\n\n pub group: String,\n", "file_path": "daemon/src/config.rs", "rank": 82, "score": 10.746060039201296 }, { "content": "}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct HWMon {\n\n hwmon_path: PathBuf,\n\n fan_control: Arc<AtomicBool>,\n\n fan_curve: Arc<RwLock<BTreeMap<i64, f64>>>,\n\n}\n\n\n\nimpl HWMon {\n\n pub fn new(\n\n hwmon_path: &PathBuf,\n\n fan_control_enabled: bool,\n\n fan_curve: BTreeMap<i64, f64>,\n\n power_cap: Option<i64>,\n\n ) -> HWMon {\n\n let mut mon = HWMon {\n\n hwmon_path: hwmon_path.clone(),\n\n fan_control: Arc::new(AtomicBool::new(false)),\n\n fan_curve: Arc::new(RwLock::new(fan_curve)),\n", "file_path": "daemon/src/hw_mon.rs", "rank": 83, "score": 10.61753993110211 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse std::collections::{BTreeMap, HashMap};\n\nuse std::fs;\n\nuse std::io;\n\nuse std::path::PathBuf;\n\n\n\nuse crate::gpu_controller::PowerProfile;\n\n\n\n#[derive(Debug)]\n\npub enum ConfigError {\n\n IoError(io::Error),\n\n ParseError(serde_json::Error),\n\n}\n\n\n\nimpl From<io::Error> for ConfigError {\n\n fn from(error: io::Error) -> Self {\n\n ConfigError::IoError(error)\n\n }\n\n}\n\n\n", "file_path": "daemon/src/config.rs", "rank": 84, "score": 10.472912368510817 }, { "content": " let container = Stack::new();\n\n\n\n let info_page = InformationPage::new();\n\n\n\n container.add_titled(&info_page.container, \"info_page\", \"Information\");\n\n\n\n let oc_page = OcPage::new();\n\n\n\n container.add_titled(&oc_page.container, \"oc_page\", \"OC\");\n\n\n\n let thermals_page = ThermalsPage::new();\n\n\n\n container.add_titled(&thermals_page.container, \"thermals_page\", \"Thermals\");\n\n\n\n Self {\n\n container,\n\n info_page,\n\n thermals_page,\n\n oc_page,\n\n }\n\n }\n\n}\n", "file_path": "gui/src/app/root_stack.rs", "rank": 85, "score": 10.217785348172583 }, { "content": " container.set_label_align(0.2, 0.0);\n\n\n\n let root_box = Box::new(Orientation::Horizontal, 0);\n\n\n\n let label = Label::new(None);\n\n\n\n root_box.pack_start(&label, false, true, 5);\n\n\n\n let adjustment = Adjustment::new(0.0, 0.0, 0.0, 1.0, 10.0, 0.0);\n\n {\n\n let label = label.clone();\n\n adjustment.connect_value_changed(move |adj| {\n\n label.set_markup(&format!(\n\n \"{}/{} W\",\n\n adj.get_value().round(),\n\n adj.get_upper()\n\n ));\n\n });\n\n }\n\n\n", "file_path": "gui/src/app/root_stack/oc_page/power_cap_frame.rs", "rank": 86, "score": 10.21665077392388 }, { "content": "mod apply_revealer;\n\nmod header;\n\nmod root_stack;\n\n\n\nextern crate gtk;\n\n\n\nuse std::sync::Arc;\n\nuse std::thread;\n\nuse std::time::Duration;\n\nuse std::{\n\n fs,\n\n sync::atomic::{AtomicU32, Ordering},\n\n};\n\n\n\nuse apply_revealer::ApplyRevealer;\n\nuse daemon::daemon_connection::DaemonConnection;\n\nuse daemon::gpu_controller::GpuStats;\n\nuse daemon::DaemonError;\n\nuse gtk::*;\n\n\n", "file_path": "gui/src/app.rs", "rank": 87, "score": 9.91234415954798 }, { "content": " container,\n\n adjustment_1,\n\n adjustment_2,\n\n adjustment_3,\n\n adjustment_4,\n\n adjustment_5,\n\n }\n\n }\n\n\n\n pub fn set_curve(&self, curve: &BTreeMap<i64, f64>) {\n\n self.adjustment_1.set_value(*curve.get(&20).unwrap());\n\n self.adjustment_2.set_value(*curve.get(&40).unwrap());\n\n self.adjustment_3.set_value(*curve.get(&60).unwrap());\n\n self.adjustment_4.set_value(*curve.get(&80).unwrap());\n\n self.adjustment_5.set_value(*curve.get(&100).unwrap());\n\n }\n\n\n\n pub fn get_curve(&self) -> BTreeMap<i64, f64> {\n\n let mut curve = BTreeMap::new();\n\n\n", "file_path": "gui/src/app/root_stack/thermals_page/fan_curve_frame.rs", "rank": 88, "score": 9.860141818907017 }, { "content": " pub power_avg: Option<i64>,\n\n pub power_cap: Option<i64>,\n\n pub power_cap_max: Option<i64>,\n\n pub fan_speed: Option<i64>,\n\n pub max_fan_speed: Option<i64>,\n\n pub voltage: Option<i64>,\n\n pub gpu_usage: Option<u8>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct FanControlInfo {\n\n pub enabled: bool,\n\n pub curve: BTreeMap<i64, f64>,\n\n}\n\n#[derive(Serialize, Deserialize, Debug, Clone, Default)]\n\npub struct VulkanInfo {\n\n pub device_name: String,\n\n pub api_version: String,\n\n pub features: HashMap<String, bool>,\n\n}\n", "file_path": "daemon/src/gpu_controller.rs", "rank": 89, "score": 9.820795452681601 }, { "content": " let gpu_selector = ComboBoxText::new();\n\n container.pack_start(&gpu_selector);\n\n\n\n let switcher = StackSwitcher::new();\n\n container.pack_start(&switcher);\n\n\n\n Header {\n\n container,\n\n gpu_selector,\n\n switcher,\n\n }\n\n }\n\n\n\n pub fn set_switcher_stack(&self, stack: &Stack) {\n\n self.switcher.set_stack(Some(stack));\n\n }\n\n\n\n pub fn set_gpus(&self, gpus: HashMap<u32, Option<String>>) {\n\n for (id, name) in &gpus {\n\n self.gpu_selector\n", "file_path": "gui/src/app/header.rs", "rank": 90, "score": 9.557552544067033 }, { "content": "impl From<serde_json::Error> for ConfigError {\n\n fn from(error: serde_json::Error) -> Self {\n\n ConfigError::ParseError(error)\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Hash, Eq)]\n\npub struct GpuIdentifier {\n\n pub pci_id: String,\n\n pub card_model: Option<String>,\n\n pub gpu_model: Option<String>,\n\n pub path: PathBuf,\n\n}\n\n\n\nimpl PartialEq for GpuIdentifier {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.pci_id == other.pci_id\n\n && self.gpu_model == other.gpu_model\n\n && self.card_model == other.card_model\n\n }\n", "file_path": "daemon/src/config.rs", "rank": 91, "score": 9.520532260075745 }, { "content": "pub struct GpuController {\n\n pub hw_path: PathBuf,\n\n hw_mon: Option<HWMon>,\n\n gpu_info: GpuInfo,\n\n config: GpuConfig,\n\n}\n\n\n\nimpl GpuController {\n\n pub fn new(hw_path: PathBuf, config: GpuConfig, pci_db: &Option<PciDatabase>) -> Self {\n\n let mut controller = GpuController {\n\n hw_path: hw_path.clone(),\n\n hw_mon: None,\n\n config: GpuConfig::new(),\n\n gpu_info: GpuInfo::default(),\n\n };\n\n\n\n controller.gpu_info = controller.get_info_initial(pci_db);\n\n\n\n controller.load_config(&config);\n\n\n", "file_path": "daemon/src/gpu_controller.rs", "rank": 92, "score": 9.51277120035069 }, { "content": " 2,\n\n 1,\n\n 1,\n\n );\n\n\n\n let fan_control_enabled_switch = Switch::new();\n\n\n\n fan_control_enabled_switch.set_active(true);\n\n fan_control_enabled_switch.set_halign(Align::Start);\n\n\n\n grid.attach(&fan_control_enabled_switch, 2, 2, 1, 1);\n\n\n\n container.pack_start(&grid, false, false, 5);\n\n\n\n let fan_curve_frame = FanCurveFrame::new();\n\n\n\n container.pack_start(&fan_curve_frame.container, true, true, 5);\n\n\n\n // Show/hide fan curve when the switch is toggled\n\n {\n", "file_path": "gui/src/app/root_stack/thermals_page.rs", "rank": 93, "score": 9.465536053224502 }, { "content": " ClocksTable::Old(clocks_table) => {\n\n let (profile, voltage) = {\n\n let power_level = clocks_table.mem_power_levels.iter().next_back().unwrap();\n\n (power_level.0, power_level.1 .1)\n\n };\n\n\n\n let line = format!(\"m {} {} {}\\n\", profile, clockspeed, voltage);\n\n\n\n log::info!(\"Writing {} to pp_od_clk_voltage\", line);\n\n\n\n fs::write(self.hw_path.join(\"pp_od_clk_voltage\"), line)?;\n\n\n\n // self.config\n\n // .gpu_power_states\n\n // .insert(*profile, (clockspeed, voltage.unwrap()));\n\n }\n\n ClocksTable::New(_) => {\n\n let s_line = format!(\"m 1 {}\\n\", clockspeed);\n\n\n\n fs::write(self.hw_path.join(\"pp_od_clk_voltage\"), s_line)?;\n", "file_path": "daemon/src/gpu_controller.rs", "rank": 94, "score": 9.329705174731107 }, { "content": " link_speed,\n\n link_width,\n\n vulkan_info,\n\n pci_slot,\n\n power_profile: None,\n\n clocks_table: None,\n\n power_cap: None,\n\n power_cap_max: None,\n\n }\n\n }\n\n\n\n pub fn get_stats(&self) -> Result<GpuStats, HWMonError> {\n\n let mem_total = match fs::read_to_string(self.hw_path.join(\"mem_info_vram_total\")) {\n\n Ok(a) => Some(a.trim().parse::<u64>().unwrap() / 1024 / 1024),\n\n Err(_) => None,\n\n };\n\n\n\n let mem_used = match fs::read_to_string(self.hw_path.join(\"mem_info_vram_used\")) {\n\n Ok(a) => Some(a.trim().parse::<u64>().unwrap() / 1024 / 1024),\n\n Err(_) => None,\n", "file_path": "daemon/src/gpu_controller.rs", "rank": 95, "score": 9.309818433620801 }, { "content": " println!(\n\n \"{} {}{}\",\n\n \"VRAM Clock:\".green(),\n\n gpu_stats.mem_freq.unwrap_or_default().to_string().bold(),\n\n \"MHz\".bold(),\n\n );\n\n println!(\n\n \"{} {}/{}{}\",\n\n \"Power Usage:\".green(),\n\n gpu_stats.power_avg.unwrap_or_default().to_string().bold(),\n\n gpu_stats.power_cap.unwrap_or_default().to_string().bold(),\n\n \"W\".bold(),\n\n );\n\n}\n", "file_path": "cli/src/main.rs", "rank": 96, "score": 8.969784876945347 }, { "content": " (fan_speed as f64 / stats.max_fan_speed.unwrap() as f64 * 100.0).round()\n\n )),\n\n None => self.fan_speed_label.set_text(\"No fan detected\"),\n\n }\n\n }\n\n\n\n pub fn set_ventilation_info(&self, fan_control_info: FanControlInfo) {\n\n log::info!(\"Setting fan control info {:?}\", fan_control_info);\n\n\n\n self.fan_control_enabled_switch.set_visible(true);\n\n\n\n self.fan_control_enabled_switch\n\n .set_active(!fan_control_info.enabled);\n\n\n\n if !fan_control_info.enabled {\n\n self.fan_curve_frame.hide();\n\n } else {\n\n self.fan_curve_frame.show();\n\n }\n\n\n", "file_path": "gui/src/app/root_stack/thermals_page.rs", "rank": 97, "score": 8.761813781982944 }, { "content": " };\n\n\n\n log::trace!(\"Parsing clock levels\");\n\n\n\n // If `next()` is used on the main iterator directly, it will consume the `OD_MCLK:` aswell,\n\n // which means the outer loop won't recognize that the next lines are of a different clock type.\n\n // Thus, it is better to count how many lines were of the clock levels and then substract that amount from the main iterator.\n\n let mut i = 0;\n\n let mut lines = lines_iter.clone();\n\n\n\n while let Some(line) = lines.next() {\n\n let line = line.trim();\n\n log::trace!(\"Parsing power level line {}\", line);\n\n\n\n // Probably shouldn't unwrap, will fail on empty lines in clocks table\n\n if let Some(_) = line.chars().next().unwrap().to_digit(10) {\n\n let (num, clock, voltage) =\n\n GpuController::parse_clock_voltage_line(line)?;\n\n\n\n log::trace!(\"Power level {}: {}MHz {}mV\", num, clock, voltage);\n", "file_path": "daemon/src/gpu_controller.rs", "rank": 98, "score": 8.614702169583746 }, { "content": "use colored::*;\n\nuse daemon::daemon_connection::DaemonConnection;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(StructOpt)]\n", "file_path": "cli/src/main.rs", "rank": 99, "score": 8.453057195876337 } ]
Rust
src/lib.rs
aldanor/pod-typeinfo
9118046ce2a8e9b4e6cc523d18d7eace8fdaf0f1
#![cfg_attr(feature = "unstable", feature(plugin))] #![cfg_attr(feature = "unstable", plugin(clippy))] #[derive(Clone, PartialEq, Debug)] pub enum Type { Int8, Int16, Int32, Int64, UInt8, UInt16, UInt32, UInt64, Float32, Float64, Char, Bool, Array(Box<Type>, usize), Compound(Vec<Field>, usize), } impl Type { pub fn size(&self) -> usize { match *self { Type::Int8 | Type::UInt8 | Type::Bool => 1, Type::Int16 | Type::UInt16 => 2, Type::Int32 | Type::UInt32 | Type::Float32 | Type::Char => 4, Type::Int64 | Type::UInt64 | Type::Float64 => 8, Type::Array(ref ty, num) => ty.size() * num, Type::Compound(_, size) => size, } } pub fn is_scalar(&self) -> bool { !self.is_array() && !self.is_compound() } pub fn is_array(&self) -> bool { if let Type::Array(_, _) = *self { true } else { false } } pub fn is_compound(&self) -> bool { if let Type::Compound(_, _) = *self { true } else { false } } } #[derive(Clone, PartialEq, Debug)] pub struct Field { pub ty: Type, pub name: String, pub offset: usize, } impl Field { pub fn new<S: Into<String>>(ty: &Type, name: S, offset: usize) -> Field { Field { ty: ty.clone(), name: name.into(), offset: offset } } } pub trait TypeInfo: Copy { fn type_info() -> Type; } macro_rules! impl_scalar { ($t:ty, $i:ident) => ( impl $crate::TypeInfo for $t { #[inline(always)] fn type_info() -> $crate::Type { $crate::Type::$i } } ) } impl_scalar!(i8, Int8); impl_scalar!(i16, Int16); impl_scalar!(i32, Int32); impl_scalar!(i64, Int64); impl_scalar!(u8, UInt8); impl_scalar!(u16, UInt16); impl_scalar!(u32, UInt32); impl_scalar!(u64, UInt64); impl_scalar!(f32, Float32); impl_scalar!(f64, Float64); impl_scalar!(char, Char); impl_scalar!(bool, Bool); #[cfg(target_pointer_width = "32")] impl_scalar!(isize, Int32); #[cfg(target_pointer_width = "64")] impl_scalar!(isize, Int64); #[cfg(target_pointer_width = "32")] impl_scalar!(usize, UInt32); #[cfg(target_pointer_width = "64")] impl_scalar!(usize, UInt64); macro_rules! impl_array { ($($n:expr),*$(,)*) => { $( impl<T: $crate::TypeInfo> $crate::TypeInfo for [T; $n] { #[inline(always)] fn type_info() -> $crate::Type { $crate::Type::Array( Box::new(<T as $crate::TypeInfo>::type_info()), $n ) } } )* }; } impl_array!( 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, ); #[macro_export] macro_rules! def { ($($(#[$attr:meta])* struct $s:ident { $($i:ident: $t:ty),+$(,)* })*) => ( $( #[allow(dead_code)] #[derive(Clone, Copy)] $(#[$attr])* struct $s { $($i: $t),+ } def!(@impl $s { $($i: $t),+ } ); )* ); ($($(#[$attr:meta])* pub struct $s:ident { $($i:ident: $t:ty),+$(,)* })*) => ( $( #[allow(dead_code)] #[derive(Clone, Copy)] $(#[$attr])* pub struct $s { $($i: $t),+ } def!(@impl $s { $($i: $t),+ } ); )* ); ($($(#[$attr:meta])* pub struct $s:ident { $(pub $i:ident: $t:ty),+$(,)* })*) => ( $( #[allow(dead_code)] #[derive(Clone, Copy)] $(#[$attr])* pub struct $s { $(pub $i: $t),+ } def!(@impl $s { $($i: $t),+ } ); )* ); (@impl $s:ident { $($i:ident: $t:ty),+ }) => ( impl $crate::TypeInfo for $s { fn type_info() -> $crate::Type { let base = 0usize as *const $s; $crate::Type::Compound(vec![$( $crate::Field::new( &<$t as $crate::TypeInfo>::type_info(), stringify!($i), unsafe { &((*base).$i) as *const $t as usize} ) ),+], ::std::mem::size_of::<$s>()) } } ); }
#![cfg_attr(feature = "unstable", feature(plugin))] #![cfg_attr(feature = "unstable", plugin(clippy))] #[derive(Clone, PartialEq, Debug)] pub enum Type { Int8, Int16, Int32, Int64, UInt8, UInt16, UInt32, UInt64, Float32, Float64, Char, Bool, Array(Box<Type>, usize), Compound(Vec<Field>, usize), } impl Type { pub fn size(&self) -> usize { match *self { Type::Int8 | Type::UInt8 | Type::Bool => 1, Type::Int16 | Type::UInt16 => 2, Type::Int32 | Type::UInt32 | Type::Float32 | Type::Char => 4, Type::Int64 | Type::UInt64 | Type::Float64 => 8, Type::Array(ref ty, num) => ty.size() * num, Type::Compound(_, size) => size, } } pub fn is_scalar(&self) -> bool { !self.is_array() && !self.is_compound() } pub fn is_array(&self) -> bool { if let Type::Array(_, _) = *self { true } else { false } } pub fn is_compound(&self) -> bool { if let Type::Compound(_, _) = *self { true } else { false } } } #[derive(Clone, PartialEq, Debug)] pub struct Field { pub ty: Type, pub name: String, pub offset: usize, } impl Field {
} pub trait TypeInfo: Copy { fn type_info() -> Type; } macro_rules! impl_scalar { ($t:ty, $i:ident) => ( impl $crate::TypeInfo for $t { #[inline(always)] fn type_info() -> $crate::Type { $crate::Type::$i } } ) } impl_scalar!(i8, Int8); impl_scalar!(i16, Int16); impl_scalar!(i32, Int32); impl_scalar!(i64, Int64); impl_scalar!(u8, UInt8); impl_scalar!(u16, UInt16); impl_scalar!(u32, UInt32); impl_scalar!(u64, UInt64); impl_scalar!(f32, Float32); impl_scalar!(f64, Float64); impl_scalar!(char, Char); impl_scalar!(bool, Bool); #[cfg(target_pointer_width = "32")] impl_scalar!(isize, Int32); #[cfg(target_pointer_width = "64")] impl_scalar!(isize, Int64); #[cfg(target_pointer_width = "32")] impl_scalar!(usize, UInt32); #[cfg(target_pointer_width = "64")] impl_scalar!(usize, UInt64); macro_rules! impl_array { ($($n:expr),*$(,)*) => { $( impl<T: $crate::TypeInfo> $crate::TypeInfo for [T; $n] { #[inline(always)] fn type_info() -> $crate::Type { $crate::Type::Array( Box::new(<T as $crate::TypeInfo>::type_info()), $n ) } } )* }; } impl_array!( 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, ); #[macro_export] macro_rules! def { ($($(#[$attr:meta])* struct $s:ident { $($i:ident: $t:ty),+$(,)* })*) => ( $( #[allow(dead_code)] #[derive(Clone, Copy)] $(#[$attr])* struct $s { $($i: $t),+ } def!(@impl $s { $($i: $t),+ } ); )* ); ($($(#[$attr:meta])* pub struct $s:ident { $($i:ident: $t:ty),+$(,)* })*) => ( $( #[allow(dead_code)] #[derive(Clone, Copy)] $(#[$attr])* pub struct $s { $($i: $t),+ } def!(@impl $s { $($i: $t),+ } ); )* ); ($($(#[$attr:meta])* pub struct $s:ident { $(pub $i:ident: $t:ty),+$(,)* })*) => ( $( #[allow(dead_code)] #[derive(Clone, Copy)] $(#[$attr])* pub struct $s { $(pub $i: $t),+ } def!(@impl $s { $($i: $t),+ } ); )* ); (@impl $s:ident { $($i:ident: $t:ty),+ }) => ( impl $crate::TypeInfo for $s { fn type_info() -> $crate::Type { let base = 0usize as *const $s; $crate::Type::Compound(vec![$( $crate::Field::new( &<$t as $crate::TypeInfo>::type_info(), stringify!($i), unsafe { &((*base).$i) as *const $t as usize} ) ),+], ::std::mem::size_of::<$s>()) } } ); }
pub fn new<S: Into<String>>(ty: &Type, name: S, offset: usize) -> Field { Field { ty: ty.clone(), name: name.into(), offset: offset } }
function_block-full_function
[ { "content": "#[test]\n\n#[allow(unused_variables, unused_imports)]\n\nfn test_pub_structs_fields() {\n\n use module::{A, B};\n\n use module::multiple::{E, F, G, H};\n\n let b = B { x: 1, y: 2 };\n\n}\n", "file_path": "tests/test.rs", "rank": 0, "score": 65230.86850490266 }, { "content": "fn main() {\n\n use m::{Foo, Bar}; //~ ERROR struct `Foo` is private\n\n let f = Foo { a: 1 }; //~ ERROR field `a` of struct `m::Foo` is private\n\n let b = Bar { a: 1 }; //~ ERROR field `a` of struct `m::Bar` is private\n\n}\n", "file_path": "tests/compile-fail/non-pub-structs-fields.rs", "rank": 1, "score": 59457.96976269319 }, { "content": "#[macro_use]\n\nextern crate typeinfo;\n\n\n\nmod m {\n\n def! {\n\n struct Foo {\n\n a: i32,\n\n }\n\n }\n\n\n\n def! {\n\n pub struct Bar {\n\n a: i32,\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/compile-fail/non-pub-structs-fields.rs", "rank": 2, "score": 39741.922062804355 }, { "content": "#[test]\n\nfn test_scalar_types() {\n\n fn check_scalar_type<T: TypeInfo>(ty: Type) {\n\n assert_eq!(<T as TypeInfo>::type_info(), ty);\n\n assert_eq!(ty.size(), mem::size_of::<T>());\n\n assert!(ty.is_scalar() && !ty.is_array() && !ty.is_compound());\n\n }\n\n\n\n check_scalar_type::<i8>(Int8);\n\n check_scalar_type::<i16>(Int16);\n\n check_scalar_type::<i32>(Int32);\n\n check_scalar_type::<i64>(Int64);\n\n check_scalar_type::<u8>(UInt8);\n\n check_scalar_type::<u16>(UInt16);\n\n check_scalar_type::<u32>(UInt32);\n\n check_scalar_type::<u64>(UInt64);\n\n check_scalar_type::<f32>(Float32);\n\n check_scalar_type::<f64>(Float64);\n\n check_scalar_type::<bool>(Bool);\n\n check_scalar_type::<char>(Char);\n\n\n\n if mem::size_of::<usize>() == 4 {\n\n check_scalar_type::<isize>(Int32);\n\n check_scalar_type::<usize>(UInt32);\n\n } else {\n\n check_scalar_type::<isize>(Int64);\n\n check_scalar_type::<usize>(UInt64);\n\n }\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 3, "score": 39040.98427437687 }, { "content": "#[test]\n\nfn test_compound_types() {\n\n def![struct X { a: i32, }];\n\n let ty = X::type_info();\n\n assert_eq!(ty, Compound(vec![\n\n Field::new(&Int32, \"a\", 0)\n\n ], mem::size_of::<X>()));\n\n assert_eq!(ty.size(), mem::size_of::<X>());\n\n assert!(ty.is_compound() && !ty.is_scalar() && !ty.is_array());\n\n\n\n def![struct Y { a: u64, x: [X; 2] }];\n\n let ty = Y::type_info();\n\n assert_eq!(ty, Compound(vec![\n\n Field::new(&UInt64, \"a\", 0),\n\n Field::new(&Array(Box::new(X::type_info()), 2), \"x\", 8),\n\n ], mem::size_of::<Y>()));\n\n assert_eq!(ty.size(), mem::size_of::<Y>());\n\n assert!(ty.is_compound() && !ty.is_scalar() && !ty.is_array());\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 4, "score": 39040.98427437687 }, { "content": "#[test]\n\nfn test_array_types() {\n\n let ty = <[u16; 42] as TypeInfo>::type_info();\n\n assert_eq!(ty, Array(Box::new(UInt16), 42));\n\n assert_eq!(ty.size(), 2 * 42);\n\n assert!(ty.is_array() && !ty.is_scalar() && !ty.is_compound());\n\n\n\n let ty = <[[i8; 2]; 3] as TypeInfo>::type_info();\n\n assert_eq!(ty, Array(Box::new(Array(Box::new(Int8), 2)), 3));\n\n assert_eq!(ty.size(), 1 * 2 * 3);\n\n assert!(ty.is_array() && !ty.is_scalar() && !ty.is_compound());\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 5, "score": 39040.98427437687 }, { "content": "#[test]\n\nfn test_struct_attributes() {\n\n def![struct X { a: i8, b: u64 }];\n\n def![#[repr(packed)] struct Y { a: i8, b: u64 }];\n\n assert!(X::type_info().size() > Y::type_info().size());\n\n}\n\n\n\n#[cfg(test)]\n\nmod module {\n\n def! {\n\n pub struct A {\n\n x: i32,\n\n y: i32\n\n }\n\n }\n\n def! {\n\n pub struct B {\n\n pub x: i32,\n\n pub y: i32\n\n }\n\n }\n", "file_path": "tests/test.rs", "rank": 6, "score": 38590.70864802445 }, { "content": "#[test]\n\nfn compile_test() {\n\n run_mode(\"compile-fail\");\n\n}\n", "file_path": "tests/compile-test.rs", "rank": 8, "score": 24010.728167067424 }, { "content": "#[test]\n\nfn test_compound_copy_clone() {\n\n def![struct X { a: char }];\n\n let x = X { a: '0' };\n\n let y = x;\n\n assert_eq!(x.a, y.a);\n\n assert_eq!(x.clone().a, y.clone().a);\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 9, "score": 23182.89635683979 }, { "content": "fn run_mode(mode: &'static str) {\n\n let mut config = compiletest::default_config();\n\n\n\n let cfg_mode = mode.parse().ok().expect(\"Invalid mode\");\n\n\n\n config.target_rustcflags = Some(\"-L target/debug/ -L target/debug/deps/\".to_owned());\n\n if let Ok(name) = var::<&str>(\"TESTNAME\") {\n\n let s : String = name.to_owned();\n\n config.filter = Some(s)\n\n }\n\n config.mode = cfg_mode;\n\n config.src_base = PathBuf::from(format!(\"tests/{}\", mode));\n\n\n\n compiletest::run_tests(&config);\n\n}\n\n\n", "file_path": "tests/compile-test.rs", "rank": 10, "score": 17362.892760348695 }, { "content": "#[macro_use]\n\nextern crate typeinfo;\n\n\n\ndef! {\n\n struct Foo {} //~ ERROR no rules expected the token `}`\n\n}\n", "file_path": "tests/compile-fail/empty-struct.rs", "rank": 11, "score": 14579.980480957027 }, { "content": "#[macro_use]\n\nextern crate typeinfo;\n\n\n\ndef! {\n\n struct Foo; //~ ERROR no rules expected the token `;`\n\n}\n", "file_path": "tests/compile-fail/unit-struct.rs", "rank": 12, "score": 14579.980480957027 }, { "content": "#[macro_use]\n\nextern crate typeinfo;\n\n\n\ndef! {\n\n struct Foo {\n\n a: i32,\n\n pub b: i32, //~ ERROR no rules expected the token `b`\n\n }\n\n}\n", "file_path": "tests/compile-fail/mixed-field-qualifiers.rs", "rank": 13, "score": 14159.178751427606 }, { "content": "#[macro_use]\n\nextern crate typeinfo;\n\n\n\ndef! {\n\n struct Foo {\n\n a: i32,\n\n b: i32,\n\n }\n\n\n\n pub struct Bar { //~ ERROR no rules expected the token `pub`\n\n a: i32,\n\n b: i32,\n\n }\n\n}\n", "file_path": "tests/compile-fail/mixed-struct-qualifiers.rs", "rank": 14, "score": 13735.41063191077 }, { "content": "# typeinfo\n\n\n\n[![Build Status](https://travis-ci.org/aldanor/typeinfo.svg?branch=master)](https://travis-ci.org/aldanor/typeinfo)\n\n[![Build Status](https://ci.appveyor.com/api/projects/status/uh34kafh5qs458ue/branch/master?svg=true)](https://ci.appveyor.com/project/aldanor/typeinfo)\n\n\n\n[Documentation](http://ivansmirnov.io/typeinfo)\n\n\n\nThe `typeinfo` crate provides access to type information for POD (*plain old data*)\n\ntypes at runtime.\n\n\n\n## Examples\n\n\n\nDefining reflectable struct types only requires wrapping the struct definition in\n\nthe [`def!`](http://ivansmirnov.io/typeinfo/typeinfo/macro.def!.html) macro:\n\n\n\n```rust\n\n#[use_macro]\n\nextern crate typeinfo;\n\nuse typeinfo::TypeInfo;\n\n\n\ndef! {\n\n #[derive(Debug)]\n\n pub struct Color { r: u16, g: u16, b: u16, }\n\n\n\n #[derive(Debug)]\n\n #[repr(packed)]\n\n pub struct Palette {\n\n monochrome: bool,\n\n colors: [Color; 16]\n\n }\n\n}\n\n\n\nfn main() {\n\n println!(\"{:#?}\", Palette::type_info());\n\n}\n\n```\n\n\n\nOutput (whitespace formatted):\n\n\n\n```rust\n\nCompound([\n\n Field { ty: Bool, name: \"monochrome\", offset: 0 },\n\n Field {\n\n ty: Array(\n\n Compound([\n\n Field { ty: UInt16, name: \"r\", offset: 0 },\n\n Field { ty: UInt16, name: \"g\", offset: 2 },\n\n Field { ty: UInt16, name: \"b\", offset: 4 }\n\n ], 6),\n\n 16),\n\n name: \"colors\",\n\n offset: 1\n\n }\n\n], 97)\n\n```\n\n\n\n## License\n\n\n\n`typeinfo` is primarily distributed under the terms of both the MIT license and\n\nthe Apache License (Version 2.0), with portions covered by various BSD-like\n\nlicenses.\n\n\n\nSee [LICENSE-APACHE](LICENSE-APACHE), and [LICENSE-MIT](LICENSE-MIT) for details.\n", "file_path": "README.md", "rank": 19, "score": 14.8180888872492 }, { "content": "#[macro_use]\n\nextern crate typeinfo;\n\n\n\nuse std::mem;\n\n\n\nuse typeinfo::Type::*;\n\nuse typeinfo::{Type, TypeInfo, Field};\n\n\n\n#[test]\n", "file_path": "tests/test.rs", "rank": 28, "score": 4.21639300655623 }, { "content": "\n\n pub mod multiple {\n\n def! {\n\n struct C { x: i32 }\n\n struct D { x: i32 }\n\n }\n\n def! {\n\n pub struct E { x: i32 }\n\n pub struct F { x: i32 }\n\n }\n\n def! {\n\n pub struct G { pub x: i32 }\n\n pub struct H { pub x: i32 }\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 30, "score": 2.6658362937314948 }, { "content": "#![cfg(feature = \"unstable\")]\n\n\n\nextern crate compiletest_rs as compiletest;\n\n\n\nuse std::path::PathBuf;\n\nuse std::env::var;\n\n\n", "file_path": "tests/compile-test.rs", "rank": 31, "score": 1.9828042244405701 } ]
Rust
src/main.rs
sonald/redis-cli-rs
5de760421f4d052d4a28001d1274ab90688d0fd3
use structopt::StructOpt; use tokio::prelude::*; use tokio::net::TcpStream; use rustyline::{Editor, error::ReadlineError}; use std::error::Error; use log::*; use std::io::Write; mod redis; use self::redis::*; #[derive(Debug, StructOpt)] struct Opt { #[structopt(short, long)] pub debug: bool, #[structopt(short, long, default_value = "127.0.0.1")] pub hostname: String, #[structopt(short("P"), long, default_value = "6379")] pub port: u16, #[structopt(short, long)] pub pipe: bool, pub cmds: Vec<String>, } type Result<T> = std::result::Result<T, Box<dyn Error>>; async fn read_redis_output(cli: &mut TcpStream) -> Result<Vec<u8>> { let mut res = vec![]; let mut buf = [0u8; 64]; loop { let n = cli.read(&mut buf[..]).await?; res.extend(&buf[..n]); if n < 64 { break } } Ok(res) } async fn consume_all_output(cli: &mut TcpStream) -> Result<()> { let res = read_redis_output(cli).await?; let mut start = 0; while let Some((value, left)) = RedisValue::deserialize(&res[start..]) { info!("{}", value); start += left; } Ok(()) } async fn stream(args: Vec<String>, pipe: bool, cli: &mut TcpStream) -> Result<()> { let cmd = args[0].clone(); let data = if pipe { args.into_iter().map(|a| a + "\r\n").collect::<String>().into_bytes() } else { let value = RedisValue::from_vec(args); value.to_wire()? }; cli.write(data.as_slice()).await?; match cmd.as_str() { "monitor" | "subscribe" => loop { consume_all_output(cli).await? }, _ => consume_all_output(cli).await } } async fn interactive<S: AsRef<str>>(prompt: S, cli: &mut TcpStream) -> Result<()> { let mut rl = Editor::<()>::new(); loop { let readline = rl.readline(prompt.as_ref()); match readline { Ok(line) => { rl.add_history_entry(line.as_str()); let args = line.split_whitespace().map(|s| s.to_owned()).collect::<Vec<String>>(); let cmd = args[0].clone(); let value = RedisValue::from_vec(args); cli.write(value.to_wire()?.as_slice()).await?; match cmd.as_str() { "monitor" | "subscribe" => loop { consume_all_output(cli).await? }, _ => { let res = read_redis_output(cli).await?; print!("{}", RedisValue::deserialize(&res).expect("").0); } } }, Err(ReadlineError::Interrupted) => { info!("CTRL-C"); break }, Err(ReadlineError::Eof) => { info!("CTRL-D"); break }, Err(err) => { return Err(Box::new(err)) } } } Ok(()) } async fn run(args: Opt) -> Result<()> { let mut cli = TcpStream::connect((args.hostname.as_str(), args.port)).await?; let prompt = format!("{}:{}> ", args.hostname,args.port); if args.cmds.len() == 0 && !args.pipe { interactive(prompt, &mut cli).await } else { let cmds = if args.pipe { let mut buf = String::new(); tokio::io::stdin().read_to_string(&mut buf).await?; buf.split('\n').map(|s| s.to_owned()).collect::<Vec<String>>() } else { args.cmds }; stream(cmds, args.pipe, &mut cli).await } } #[tokio::main] async fn main() { unsafe { signal_hook::register(signal_hook::SIGINT, || { println!("quit"); std::process::exit(0); }).expect("hook sigint failed"); } let start = std::time::Instant::now(); env_logger::builder().format(move |buf, log| { let current = start.elapsed().as_secs_f32(); writeln!(buf, "{:.04} {} - {}", current, log.level(), log.args()) }).init(); let args = Opt::from_args(); info!("start"); if let Err(err) = run(args).await { error!("error: {}", err); } }
use structopt::StructOpt; use tokio::prelude::*; use tokio::net::TcpStream; use rustyline::{Editor, error::ReadlineError}; use std::error::Error; use log::*; use std::io::Write; mod redis; use self::redis::*; #[derive(Debug, StructOpt)] struct Opt { #[structopt(short, long)] pub debug: bool, #[structopt(short, long, default_value = "127.0.0.1")] pub hostname: String, #[structopt(short("P"), long, default_value = "6379")] pub port: u16, #[structopt(short, long)] pub pipe: bool, pub cmds: Vec<String>, } type Result<T> = std::result::Result<T, Box<dyn Error>>; async fn read_redis_output(cli: &mut TcpStream) -> Result<Vec<u8>> { let mut res = vec![]; let mut buf = [0u8; 64]; loop { let n = cli.read(&mut buf[..]).await?; res.extend(&buf[..n]); if n < 64 { break } } Ok(res) } async fn consume_all_output(cli: &mut TcpStream) -> Result<()> { let res = read_redis_output(cli).await?; let mut start = 0; while let Some((value, left)) = RedisValue::deserialize(&res[start..]) { info!("{}", value); start += left; } Ok(()) } async fn stream(args: Vec<String>, pipe: bool, cli: &mut TcpStream) -> Result<()> { let cmd = args[0].clone(); let data = if pipe { args.into_iter().map(|a| a + "\r\n").collect::<String>().into_bytes() } else { let value = RedisValue::from_vec(args); value.to_wire()? }; cli.write(data.as_slice()).await?; match cmd.as_str() { "monitor" | "subscribe" => loop { consume_all_output(cli).await? }, _ => consume_all_output(cli).await } } async fn interactive<S: AsRef<str>>(prompt: S, cli: &mut TcpStream) -> Result<()> { let mut rl = Editor::<()>::new(); loop { let readline = rl.readline(prompt.as_ref()); match readline { Ok(line) => { rl.add_history_entry(line.as_str()); let args = line.split_whitespace().map(|s| s.to_owned()).collect::<Vec<String>>(); let cmd = args[0].clone(); let value = RedisValue::from_vec(args); cli.write(value.to_wire()?.as_slice()).await?; match cmd.as_str() {
_ => { let res = read_redis_output(cli).await?; print!("{}", RedisValue::deserialize(&res).expect("").0); } } }, Err(ReadlineError::Interrupted) => { info!("CTRL-C"); break }, Err(ReadlineError::Eof) => { info!("CTRL-D"); break }, Err(err) => { return Err(Box::new(err)) } } } Ok(()) } async fn run(args: Opt) -> Result<()> { let mut cli = TcpStream::connect((args.hostname.as_str(), args.port)).await?; let prompt = format!("{}:{}> ", args.hostname,args.port); if args.cmds.len() == 0 && !args.pipe { interactive(prompt, &mut cli).await } else { let cmds = if args.pipe { let mut buf = String::new(); tokio::io::stdin().read_to_string(&mut buf).await?; buf.split('\n').map(|s| s.to_owned()).collect::<Vec<String>>() } else { args.cmds }; stream(cmds, args.pipe, &mut cli).await } } #[tokio::main] async fn main() { unsafe { signal_hook::register(signal_hook::SIGINT, || { println!("quit"); std::process::exit(0); }).expect("hook sigint failed"); } let start = std::time::Instant::now(); env_logger::builder().format(move |buf, log| { let current = start.elapsed().as_secs_f32(); writeln!(buf, "{:.04} {} - {}", current, log.level(), log.args()) }).init(); let args = Opt::from_args(); info!("start"); if let Err(err) = run(args).await { error!("error: {}", err); } }
"monitor" | "subscribe" => loop { consume_all_output(cli).await? },
function_block-random_span
[ { "content": "# cedis\n\n\n\na simple redis-cli replacement written in pure rust\n", "file_path": "README.md", "rank": 2, "score": 13893.999718397194 }, { "content": "use std::fmt;\n\nuse std::error::Error;\n\nuse bytes::Bytes;\n\n\n\n#[derive(Debug)]\n\npub enum RedisValue {\n\n Str(String),\n\n Bulk(String),\n\n Array(Vec<RedisValue>),\n\n Int(i64),\n\n Nil,\n\n Error(String),\n\n}\n\n\n\nimpl fmt::Display for RedisValue {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n RedisValue::Str(s) => write!(f, \"{}\", s),\n\n RedisValue::Bulk(s) => write!(f, \"{:?}\", s),\n\n RedisValue::Int(i) => write!(f, \"(integer) {}\", i),\n", "file_path": "src/redis.rs", "rank": 3, "score": 11706.541123216713 }, { "content": " }\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n let mut ts = s.iter();\n\n match_value(&mut ts).map(|v| (v, s.as_ref().len() - ts.size_hint().0))\n\n }\n\n\n\n pub fn to_wire(&self) -> Result<Vec<u8>, Box<dyn Error>> {\n\n use std::io::Write;\n\n macro_rules! write_cmd {\n\n ($res:ident, $e:expr) => (Write::write(&mut $res, $e)?);\n\n }\n\n\n\n let mut res = vec![];\n\n match self {\n\n RedisValue::Str(s) => {\n\n write_cmd!(res, b\"+\");\n", "file_path": "src/redis.rs", "rank": 4, "score": 11705.163807373674 }, { "content": " if let Some(ch) = ts.next() {\n\n match ch {\n\n b'-' => {\n\n Some(RedisValue::Error(match_string(ts)))\n\n },\n\n b'+' => {\n\n Some(RedisValue::Str(match_string(ts)))\n\n },\n\n b'$' => {\n\n let mut n = match_string(ts).parse::<i32>().unwrap_or(0);\n\n if n == -1 {\n\n Some(RedisValue::Nil)\n\n } else {\n\n let mut buf = vec![];\n\n while n > 0 {\n\n let ch = ts.next().expect(\"invlaid resp\");\n\n buf.push(*ch);\n\n n -= 1;\n\n }\n\n\n", "file_path": "src/redis.rs", "rank": 5, "score": 11704.572905013856 }, { "content": " /// deserialize a RedisValue from `s`, and return value with consumed bytes\n\n pub fn deserialize(s: &[u8]) -> Option<(RedisValue, usize)> {\n\n if s.as_ref().len() == 0 {\n\n return None\n\n }\n\n\n\n fn match_string<'a>(ts: &mut impl DoubleEndedIterator<Item = &'a u8>) -> String {\n\n let mut buf = vec![];\n\n while let Some(ch) = ts.next() {\n\n if *ch == b'\\r' {\n\n break\n\n }\n\n\n\n buf.push(*ch);\n\n }\n\n ts.next(); // eat \\n\n\n String::from_utf8_lossy(&buf).to_string()\n\n }\n\n\n\n fn match_value<'a>(ts: &mut impl DoubleEndedIterator<Item = &'a u8>) -> Option<RedisValue> {\n", "file_path": "src/redis.rs", "rank": 6, "score": 11704.446805952932 }, { "content": " Set(String, String),\n\n Pipeline(Vec<Command>)\n\n}\n\n\n\nimpl From<Command> for RedisValue {\n\n fn from(cmd: Command) -> RedisValue {\n\n match cmd {\n\n Command::Get(key) => RedisValue::from_vec(vec![key]),\n\n Command::Set(key, val) => RedisValue::from_vec(vec![key, val]),\n\n _ => {unimplemented!();}\n\n }\n\n }\n\n}\n\n\n\nimpl RedisValue {\n\n pub fn from_vec(v: Vec<String>) -> RedisValue {\n\n match v.len() {\n\n 0 => RedisValue::Nil,\n\n _ => RedisValue::Array(v.into_iter().map(RedisValue::Bulk).collect())\n\n }\n", "file_path": "src/redis.rs", "rank": 7, "score": 11703.498367120566 }, { "content": " RedisValue::Nil => write!(f, \"(nil)\"),\n\n RedisValue::Error(s) => write!(f, \"(error) {}\", s),\n\n RedisValue::Array(v) => {\n\n if v.len() == 0 {\n\n write!(f, \"(empty array)\")\n\n } else {\n\n for i in 1..=v.len() {\n\n write!(f, \"{}{}) {}\", if i > 1 { \"\\n\" } else { \"\" },\n\n i, v[i-1])?;\n\n }\n\n Ok(())\n\n }\n\n },\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Command {\n\n Get(String),\n", "file_path": "src/redis.rs", "rank": 8, "score": 11703.29458821215 }, { "content": " if n == -1 {\n\n Some(RedisValue::Nil)\n\n } else {\n\n let mut buf = vec![];\n\n while n > 0 {\n\n let ch = ts.next().expect(\"invlaid resp\");\n\n buf.push(*ch);\n\n n -= 1;\n\n }\n\n\n\n ts.next();\n\n ts.next();\n\n\n\n Some(RedisValue::Bulk(String::from_utf8_lossy(&buf).to_string()))\n\n }\n\n },\n\n b':' => {\n\n Some(RedisValue::Int(match_string(ts).parse::<i64>().unwrap_or(0)))\n\n },\n\n b'*' => {\n", "file_path": "src/redis.rs", "rank": 9, "score": 11702.800946627392 }, { "content": " ts.next();\n\n ts.next();\n\n\n\n Some(RedisValue::Bulk(String::from_utf8_lossy(&buf).to_string()))\n\n }\n\n },\n\n b':' => {\n\n Some(RedisValue::Int(match_string(ts).parse::<i64>().unwrap_or(0)))\n\n },\n\n b'*' => {\n\n let n = match_string(ts).parse::<usize>().unwrap_or(0);\n\n let res = (0..n).fold(vec![], |mut v, _| {\n\n let value = match_value(ts).expect(\"invalid resp\");\n\n v.push(value);\n\n v\n\n });\n\n\n\n Some(RedisValue::Array(res))\n\n },\n\n _ => panic!(\"invalid redis resp\"),\n", "file_path": "src/redis.rs", "rank": 10, "score": 11702.35650596731 }, { "content": " }\n\n\n\n pub fn is_valid<S: AsRef<[u8]>>(s: S) -> bool {\n\n if s.as_ref().len() == 0 { return false }\n\n\n\n fn match_string<'a>(ts: &mut impl DoubleEndedIterator<Item = &'a u8>) -> bool {\n\n ts.find(|&&c| c == '\\n').is_some()\n\n }\n\n\n\n fn match_value<'a>(ts: &mut impl DoubleEndedIterator<Item = &'a u8>) -> bool {\n\n if let Some(ch) = ts.next() {\n\n match ch {\n\n b'-' => {\n\n match_string(ts)\n\n },\n\n b'+' => {\n\n match_string(ts)\n\n },\n\n b'$' => {\n\n let mut n = match_string(ts).parse::<i32>().unwrap_or(0);\n", "file_path": "src/redis.rs", "rank": 11, "score": 11702.22113370738 }, { "content": " let n = match_string(ts).parse::<usize>().unwrap_or(0);\n\n let res = (0..n).fold(vec![], |mut v, _| {\n\n let value = match_value(ts).expect(\"invalid resp\");\n\n v.push(value);\n\n v\n\n });\n\n\n\n Some(RedisValue::Array(res))\n\n },\n\n _ => panic!(\"invalid redis resp\"),\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n let mut ts = s.iter();\n\n match_value(&mut ts).map(|v| (v, s.as_ref().len() - ts.size_hint().0))\n\n }\n\n\n", "file_path": "src/redis.rs", "rank": 12, "score": 11701.319718012377 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_to_wire_error() {\n\n let r = RedisValue::Error(\"hello\".to_string());\n\n assert_eq!(&r.to_wire().expect(\"\"), b\"-hello\\r\\n\");\n\n }\n\n #[test]\n\n fn test_to_wire_nil() {\n\n let r = RedisValue::Nil;\n\n assert_eq!(&r.to_wire().expect(\"\"), b\"$-1\\r\\n\");\n\n }\n\n #[test]\n\n fn test_to_wire_str() {\n\n let r = RedisValue::Str(\"hello\".to_string());\n\n assert_eq!(&r.to_wire().expect(\"\"), b\"+hello\\r\\n\");\n\n }\n", "file_path": "src/redis.rs", "rank": 13, "score": 11700.8810968167 }, { "content": " RedisValue::Error(s) => {\n\n write_cmd!(res, b\"-\");\n\n write_cmd!(res, s.as_bytes());\n\n write_cmd!(res, b\"\\r\\n\");\n\n },\n\n RedisValue::Array(v) => {\n\n write_cmd!(res, b\"*\");\n\n write_cmd!(res, format!(\"{}\", v.len()).as_bytes());\n\n write_cmd!(res, b\"\\r\\n\");\n\n for d in v {\n\n write_cmd!(res, &d.to_wire()?);\n\n }\n\n },\n\n }\n\n\n\n Ok(res)\n\n }\n\n}\n\n\n\n\n", "file_path": "src/redis.rs", "rank": 14, "score": 11700.676270771322 }, { "content": " #[test]\n\n fn test_to_wire_bulk() {\n\n let r = RedisValue::Bulk(\"hello\".to_string());\n\n assert_eq!(&r.to_wire().expect(\"\"), b\"$5\\r\\nhello\\r\\n\");\n\n }\n\n #[test]\n\n fn test_to_wire_int() {\n\n let r = RedisValue::Int(34);\n\n assert_eq!(&r.to_wire().expect(\"\"), b\":34\\r\\n\");\n\n }\n\n #[test]\n\n fn test_to_wire_array() {\n\n let mut v = vec![];\n\n v.push(RedisValue::Error(\"hello\".to_string()));\n\n v.push(RedisValue::Nil);\n\n v.push(RedisValue::Str(\"hello\".to_string()));\n\n v.push(RedisValue::Bulk(\"hello\".to_string()));\n\n v.push(RedisValue::Int(34));\n\n let r = RedisValue::Array(v);\n\n let r2 = b\"*5\\r\\n-hello\\r\\n$-1\\r\\n+hello\\r\\n$5\\r\\nhello\\r\\n:34\\r\\n\".iter().map(|&c| c).collect::<Vec<u8>>();\n", "file_path": "src/redis.rs", "rank": 15, "score": 11700.530159881708 }, { "content": " assert_eq!(r.to_wire().expect(\"\"), r2);\n\n }\n\n #[test]\n\n fn test_deserialize() {\n\n let data = \"*5\\r\\n-hello\\r\\n$-1\\r\\n+hello\\r\\n$5\\r\\nhello\\r\\n:34\\r\\n\";\n\n let value = RedisValue::deserialize(data).expect(\"\");\n\n println!(\"{}\", value.0);\n\n assert_eq!(value.0.to_wire().expect(\"\"), data.as_bytes());\n\n }\n\n #[test]\n\n fn test_deserialize2() {\n\n let data = \"*5\\r\\n-hello\\r\\n$-1\\r\\n+hello\\r\\n$5\\r\\nhello\\r\\n:34\\r\\n+another value\";\n\n let value = RedisValue::deserialize(data).expect(\"\");\n\n println!(\"{}, {}\", value.0, value.1);\n\n assert_eq!(value.0.to_wire().expect(\"\"), &data.as_bytes()[..value.1]);\n\n }\n\n}\n", "file_path": "src/redis.rs", "rank": 16, "score": 11698.64029227289 }, { "content": " write_cmd!(res, s.as_bytes());\n\n write_cmd!(res, b\"\\r\\n\");\n\n },\n\n RedisValue::Bulk(s) => {\n\n write_cmd!(res, b\"$\");\n\n write_cmd!(res, format!(\"{}\", s.len()).as_bytes());\n\n write_cmd!(res, b\"\\r\\n\");\n\n write_cmd!(res, s.as_bytes());\n\n write_cmd!(res, b\"\\r\\n\");\n\n },\n\n RedisValue::Int(i) => {\n\n write_cmd!(res, b\":\");\n\n write_cmd!(res, format!(\"{}\", i).as_bytes());\n\n write_cmd!(res, b\"\\r\\n\");\n\n },\n\n RedisValue::Nil => {\n\n write_cmd!(res, b\"$\");\n\n write_cmd!(res, format!(\"{}\", -1).as_bytes());\n\n write_cmd!(res, b\"\\r\\n\");\n\n },\n", "file_path": "src/redis.rs", "rank": 17, "score": 11698.460879328537 } ]
Rust
runtime_v4/src/runtime.rs
MatchaChoco010/game_loop_async_runtime
4ccd2e4c642cdf793f83c8764d8b408fbfe17b05
use std::cell::RefCell; use std::collections::HashMap; use std::fmt::Debug; use std::future::Future; use std::hash::Hash; use std::pin::Pin; use std::rc::Rc; use std::sync::{Arc, Mutex}; use std::task::{Context, Poll, Waker}; use futures::task::ArcWake; struct Task { future: Pin<Box<dyn Future<Output = ()> + 'static>>, } impl Task { fn new(f: impl Future<Output = ()> + 'static) -> Self { Self { future: Box::pin(f), } } fn poll(&mut self, mut ctx: Context) -> Poll<()> { match Future::poll(self.future.as_mut(), &mut ctx) { Poll::Pending => Poll::Pending, Poll::Ready(()) => Poll::Ready(()), } } } #[derive(Clone)] struct WakeFlag { waked: Arc<Mutex<bool>>, } impl WakeFlag { fn new() -> Self { Self { waked: Arc::new(Mutex::new(false)), } } fn wake(&self) { *self.waked.lock().unwrap() = true; } fn is_waked(&self) -> bool { *self.waked.lock().unwrap() } } #[derive(Clone)] struct WakeFlagWaker { flag: WakeFlag, } impl WakeFlagWaker { fn waker(flag: WakeFlag) -> Waker { futures::task::waker(Arc::new(Self { flag })) } } impl ArcWake for WakeFlagWaker { fn wake_by_ref(arc_self: &Arc<Self>) { arc_self.flag.wake(); } } pub enum RuntimeIsDone { Done, NotDone, } #[derive(Clone)] pub struct Runtime<T: Eq + Hash + Clone + Debug> { frame_counter: u64, tasks: Rc<RefCell<HashMap<T, Vec<Task>>>>, wait_tasks: Rc<RefCell<HashMap<T, Vec<Task>>>>, activated_phase: Rc<RefCell<HashMap<u16, T>>>, } impl<T: Eq + Hash + Clone + Debug> Runtime<T> { pub fn new() -> Self { Self { frame_counter: 0, tasks: Rc::new(RefCell::new(HashMap::new())), wait_tasks: Rc::new(RefCell::new(HashMap::new())), activated_phase: Rc::new(RefCell::new(HashMap::new())), } } pub fn spawn(&self, phase: T, f: impl Future<Output = ()> + 'static) { let mut tasks = self.tasks.borrow_mut(); let ts = tasks.entry(phase).or_insert(vec![]); ts.push(Task::new(f)); } pub fn update(&mut self) -> RuntimeIsDone { let activated_phase = self.activated_phase.borrow(); let mut phases = activated_phase.iter().collect::<Vec<_>>(); phases.sort_by_key(|(&order, _phase)| order); let phases = phases.into_iter().map(|(_order, phase)| phase); for phase in phases { 'current_frame: loop { let task = self .tasks .borrow_mut() .entry(phase.clone()) .or_insert(vec![]) .pop(); match task { None => break 'current_frame, Some(mut task) => { let flag = WakeFlag::new(); let waker = WakeFlagWaker::waker(flag.clone()); match task.poll(Context::from_waker(&waker)) { Poll::Ready(()) => (), Poll::Pending => { if flag.is_waked() { let mut tasks = self.tasks.borrow_mut(); let ts = tasks.entry(phase.clone()).or_insert(vec![]); ts.push(task); } else { let mut wait_tasks = self.wait_tasks.borrow_mut(); let wts = wait_tasks.entry(phase.clone()).or_insert(vec![]); wts.push(task); } } } } } } } { let mut done_flag = true; let wait_tasks = self.wait_tasks.borrow(); for (_p, tasks) in wait_tasks.iter() { if !tasks.is_empty() { done_flag = false; } } if done_flag { return RuntimeIsDone::Done; } } self.frame_counter += 1; std::mem::swap(&mut self.wait_tasks, &mut self.tasks); RuntimeIsDone::NotDone } pub fn frame_counter(&self) -> u64 { self.frame_counter } pub fn activate_phase(&mut self, phase: T, order: u16) { let mut activated_phase = self.activated_phase.borrow_mut(); if let Some(p) = activated_phase.get(&order) { panic!(format!( "Another PHASE has already been registered in this order: {:?}", p )); } activated_phase.insert(order, phase); } }
use std::cell::RefCell; use std::collections::HashMap; use std::fmt::Debug; use std::future::Future; use std::hash::Hash; use std::pin::Pin; use std::rc::Rc; use std::sync::{Arc, Mutex}; use std::task::{Context, Poll, Waker}; use futures::task::ArcWake; struct Task { future: Pin<Box<dyn Future<Output = ()> + 'static>>, } impl Task { fn new(f: impl Future<Output = ()> + 'static) -> Self { Self { future: Box::pin(f), } } fn poll(&mut self, mut ctx: Context) -> Poll<()> { match Future::poll(self.future.as_mut(), &mut ctx) { Poll::Pending => Poll::Pending, Poll::Ready(()) => Poll::Ready(()), } } } #[derive(Clone)] struct WakeFlag { waked: Arc<Mutex<bool>>, } impl WakeFlag { fn new() -> Self { Self { waked: Arc::new(Mutex::new(false)), } } fn wake(&self) { *self.waked.lock().unwrap() = true; } fn is_waked(&self) -> bool { *self.waked.lock().unwrap() } } #[derive(Clone)] struct WakeFlagWaker { flag: WakeFlag, } impl WakeFlagWaker { fn waker(flag: WakeFlag) -> Waker { futures::task::waker(Arc::new(Self { flag })) } } impl ArcWake for WakeFlagWaker { fn wake_by_ref(arc_self: &Arc<Self>) { arc_self.flag.wake(); } } pub enum RuntimeIsDone { Done,
c<RefCell<HashMap<T, Vec<Task>>>>, activated_phase: Rc<RefCell<HashMap<u16, T>>>, } impl<T: Eq + Hash + Clone + Debug> Runtime<T> { pub fn new() -> Self { Self { frame_counter: 0, tasks: Rc::new(RefCell::new(HashMap::new())), wait_tasks: Rc::new(RefCell::new(HashMap::new())), activated_phase: Rc::new(RefCell::new(HashMap::new())), } } pub fn spawn(&self, phase: T, f: impl Future<Output = ()> + 'static) { let mut tasks = self.tasks.borrow_mut(); let ts = tasks.entry(phase).or_insert(vec![]); ts.push(Task::new(f)); } pub fn update(&mut self) -> RuntimeIsDone { let activated_phase = self.activated_phase.borrow(); let mut phases = activated_phase.iter().collect::<Vec<_>>(); phases.sort_by_key(|(&order, _phase)| order); let phases = phases.into_iter().map(|(_order, phase)| phase); for phase in phases { 'current_frame: loop { let task = self .tasks .borrow_mut() .entry(phase.clone()) .or_insert(vec![]) .pop(); match task { None => break 'current_frame, Some(mut task) => { let flag = WakeFlag::new(); let waker = WakeFlagWaker::waker(flag.clone()); match task.poll(Context::from_waker(&waker)) { Poll::Ready(()) => (), Poll::Pending => { if flag.is_waked() { let mut tasks = self.tasks.borrow_mut(); let ts = tasks.entry(phase.clone()).or_insert(vec![]); ts.push(task); } else { let mut wait_tasks = self.wait_tasks.borrow_mut(); let wts = wait_tasks.entry(phase.clone()).or_insert(vec![]); wts.push(task); } } } } } } } { let mut done_flag = true; let wait_tasks = self.wait_tasks.borrow(); for (_p, tasks) in wait_tasks.iter() { if !tasks.is_empty() { done_flag = false; } } if done_flag { return RuntimeIsDone::Done; } } self.frame_counter += 1; std::mem::swap(&mut self.wait_tasks, &mut self.tasks); RuntimeIsDone::NotDone } pub fn frame_counter(&self) -> u64 { self.frame_counter } pub fn activate_phase(&mut self, phase: T, order: u16) { let mut activated_phase = self.activated_phase.borrow_mut(); if let Some(p) = activated_phase.get(&order) { panic!(format!( "Another PHASE has already been registered in this order: {:?}", p )); } activated_phase.insert(order, phase); } }
NotDone, } #[derive(Clone)] pub struct Runtime<T: Eq + Hash + Clone + Debug> { frame_counter: u64, tasks: Rc<RefCell<HashMap<T, Vec<Task>>>>, wait_tasks: R
random
[ { "content": "#[derive(Clone)]\n\nstruct WakeFlagWaker {\n\n flag: WakeFlag,\n\n}\n\nimpl WakeFlagWaker {\n\n fn waker(flag: WakeFlag) -> Waker {\n\n futures::task::waker(Arc::new(Self { flag }))\n\n }\n\n}\n\nimpl ArcWake for WakeFlagWaker {\n\n fn wake_by_ref(arc_self: &Arc<Self>) {\n\n arc_self.flag.wake();\n\n }\n\n}\n\n\n\n/// 非同期タスクがすべて終了したかどうかのenum。\n\npub enum RuntimeIsDone {\n\n Done,\n\n NotDone,\n\n}\n\n\n", "file_path": "runtime_v6/src/runtime.rs", "rank": 1, "score": 144005.8515091071 }, { "content": "#[derive(Clone)]\n\nstruct WakeFlagWaker {\n\n flag: WakeFlag,\n\n}\n\nimpl WakeFlagWaker {\n\n fn waker(flag: WakeFlag) -> Waker {\n\n futures::task::waker(Arc::new(Self { flag }))\n\n }\n\n}\n\nimpl ArcWake for WakeFlagWaker {\n\n fn wake_by_ref(arc_self: &Arc<Self>) {\n\n arc_self.flag.wake();\n\n }\n\n}\n\n\n\npub enum RuntimeIsDone {\n\n Done,\n\n NotDone,\n\n}\n\n\n\n#[derive(Clone)]\n", "file_path": "runtime_v3/src/runtime.rs", "rank": 2, "score": 144005.8515091071 }, { "content": "#[derive(Clone)]\n\nstruct WakeFlagWaker {\n\n flag: WakeFlag,\n\n}\n\nimpl WakeFlagWaker {\n\n fn waker(flag: WakeFlag) -> Waker {\n\n futures::task::waker(Arc::new(Self { flag }))\n\n }\n\n}\n\nimpl ArcWake for WakeFlagWaker {\n\n fn wake_by_ref(arc_self: &Arc<Self>) {\n\n arc_self.flag.wake();\n\n }\n\n}\n\n\n\n/// 非同期タスクがすべて終了したかどうかのenum。\n\npub enum RuntimeIsDone {\n\n Done,\n\n NotDone,\n\n}\n\n\n", "file_path": "runtime_v5/src/runtime.rs", "rank": 3, "score": 144005.8515091071 }, { "content": "#[derive(Clone)]\n\nstruct WakeFlag {\n\n waked: Arc<Mutex<bool>>,\n\n}\n\nimpl WakeFlag {\n\n fn new() -> Self {\n\n Self {\n\n waked: Arc::new(Mutex::new(false)),\n\n }\n\n }\n\n\n\n fn wake(&self) {\n\n *self.waked.lock().unwrap() = true;\n\n }\n\n\n\n fn is_waked(&self) -> bool {\n\n *self.waked.lock().unwrap()\n\n }\n\n}\n\n\n", "file_path": "runtime_v3/src/runtime.rs", "rank": 5, "score": 137226.56843271398 }, { "content": "#[derive(Clone)]\n\nstruct WakeFlag {\n\n waked: Arc<Mutex<bool>>,\n\n}\n\nimpl WakeFlag {\n\n fn new() -> Self {\n\n Self {\n\n waked: Arc::new(Mutex::new(false)),\n\n }\n\n }\n\n\n\n fn wake(&self) {\n\n *self.waked.lock().unwrap() = true;\n\n }\n\n\n\n fn is_waked(&self) -> bool {\n\n *self.waked.lock().unwrap()\n\n }\n\n}\n\n\n", "file_path": "runtime_v5/src/runtime.rs", "rank": 6, "score": 137226.56843271398 }, { "content": "#[derive(Clone)]\n\nstruct WakeFlag {\n\n waked: Arc<Mutex<bool>>,\n\n}\n\nimpl WakeFlag {\n\n fn new() -> Self {\n\n Self {\n\n waked: Arc::new(Mutex::new(false)),\n\n }\n\n }\n\n\n\n fn wake(&self) {\n\n *self.waked.lock().unwrap() = true;\n\n }\n\n\n\n fn is_waked(&self) -> bool {\n\n *self.waked.lock().unwrap()\n\n }\n\n}\n\n\n", "file_path": "runtime_v6/src/runtime.rs", "rank": 7, "score": 137226.56843271398 }, { "content": "fn process_tasks(mut tasks: Vec<Task>) -> Vec<Task> {\n\n let mut wait_tasks = vec![];\n\n\n\n 'current_frame: loop {\n\n let task = tasks.pop();\n\n\n\n match task {\n\n // tasksが空だった場合は次のphaseへ\n\n None => break 'current_frame,\n\n Some(mut task) => {\n\n let flag = WakeFlag::new();\n\n let waker = WakeFlagWaker::waker(flag.clone());\n\n\n\n match task.poll(Context::from_waker(&waker)) {\n\n Poll::Ready(()) => (),\n\n Poll::Pending => {\n\n // タスクがwake済みだったらtasksにpush\n\n // そうでなかったらwait_tasksにpushする\n\n if flag.is_waked() {\n\n tasks.push(task);\n", "file_path": "runtime_v5/src/runtime.rs", "rank": 8, "score": 136231.93762807973 }, { "content": "fn process_tasks(mut tasks: Vec<Task>) -> Vec<Task> {\n\n let mut wait_tasks = vec![];\n\n\n\n 'current_frame: loop {\n\n let task = tasks.pop();\n\n\n\n match task {\n\n // tasksが空だった場合は次のphaseへ\n\n None => break 'current_frame,\n\n Some(mut task) => {\n\n let flag = WakeFlag::new();\n\n let waker = WakeFlagWaker::waker(flag.clone());\n\n\n\n match task.poll(Context::from_waker(&waker)) {\n\n Poll::Ready(()) => (),\n\n Poll::Pending => {\n\n // タスクがwake済みだったらtasksにpush\n\n // そうでなかったらwait_tasksにpushする\n\n if flag.is_waked() {\n\n tasks.push(task);\n", "file_path": "runtime_v6/src/runtime.rs", "rank": 9, "score": 136231.93762807973 }, { "content": "/// 次のフレームまで待機するFutureを返す関数。\n\npub fn next_frame() -> WaitNextFrameFuture {\n\n WaitNextFrameFuture::new()\n\n}\n", "file_path": "runtime_v5/src/wait_next_frame_future.rs", "rank": 10, "score": 115581.084711563 }, { "content": "/// 次のフレームまで待機するFutureを返す関数。\n\npub fn next_frame() -> WaitNextFrameFuture {\n\n WaitNextFrameFuture::new()\n\n}\n", "file_path": "runtime_v6/src/wait_next_frame_future.rs", "rank": 11, "score": 115581.08471156299 }, { "content": "/// 次のフレームまで待機するFutureを返す関数。\n\npub fn next_frame() -> WaitNextFrameFuture {\n\n WaitNextFrameFuture::new()\n\n}\n", "file_path": "runtime_v4/src/wait_next_frame_future.rs", "rank": 12, "score": 115581.084711563 }, { "content": "pub fn next_frame() -> WaitNextFrameFuture {\n\n WaitNextFrameFuture::new()\n\n}\n", "file_path": "runtime_v3/src/wait_next_frame_future.rs", "rank": 13, "score": 115577.28283237062 }, { "content": "#[derive(PartialEq, Eq, Hash, Clone, Debug)]\n\nenum Phase {\n\n PreTask,\n\n Task,\n\n PostTask,\n\n}\n\n\n", "file_path": "use_v4_task_order/src/main.rs", "rank": 14, "score": 108189.3315274777 }, { "content": "pub fn next_frame() -> WaitNextFrame {\n\n WaitNextFrame::new()\n\n}\n", "file_path": "runtime_v1/src/wait_next_frame_future.rs", "rank": 15, "score": 107932.66527436292 }, { "content": "pub fn next_frame() -> WaitNextFrame {\n\n WaitNextFrame::new()\n\n}\n", "file_path": "runtime_v2/src/wait_next_frame_future.rs", "rank": 16, "score": 107932.66527436292 }, { "content": "struct Task {\n\n future: Pin<Box<dyn Future<Output = ()> + 'static>>,\n\n}\n\nimpl Task {\n\n fn new(f: impl Future<Output = ()> + 'static) -> Self {\n\n Self {\n\n future: Box::pin(f),\n\n }\n\n }\n\n\n\n fn poll(&mut self, mut ctx: Context) -> Poll<()> {\n\n match Future::poll(self.future.as_mut(), &mut ctx) {\n\n Poll::Pending => Poll::Pending,\n\n Poll::Ready(()) => Poll::Ready(()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "runtime_v3/src/runtime.rs", "rank": 17, "score": 106702.54112625335 }, { "content": "struct Task {\n\n future: Pin<Box<dyn Future<Output = ()> + Send + 'static>>,\n\n}\n\nimpl Task {\n\n fn new(f: impl Future<Output = ()> + Send + 'static) -> Self {\n\n Self {\n\n future: Box::pin(f),\n\n }\n\n }\n\n\n\n fn poll(&mut self, mut ctx: Context) -> Poll<()> {\n\n match Future::poll(self.future.as_mut(), &mut ctx) {\n\n Poll::Pending => Poll::Pending,\n\n Poll::Ready(()) => Poll::Ready(()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "runtime_v5/src/runtime.rs", "rank": 18, "score": 106702.54112625335 }, { "content": "struct Task {\n\n future: Pin<Box<dyn Future<Output = ()> + Send + 'static>>,\n\n}\n\nimpl Task {\n\n fn new(f: impl Future<Output = ()> + Send + 'static) -> Self {\n\n Self {\n\n future: Box::pin(f),\n\n }\n\n }\n\n\n\n fn poll(&mut self, mut ctx: Context) -> Poll<()> {\n\n match Future::poll(self.future.as_mut(), &mut ctx) {\n\n Poll::Pending => Poll::Pending,\n\n Poll::Ready(()) => Poll::Ready(()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "runtime_v6/src/runtime.rs", "rank": 20, "score": 106702.54112625335 }, { "content": "type Task = Pin<Box<dyn Future<Output = ()> + 'static>>;\n\n\n\npub struct Runtime {\n\n frame_counter: u64,\n\n current_pool_index: u8,\n\n task_pool_0: Vec<Task>,\n\n task_pool_1: Vec<Task>,\n\n}\n\nimpl Runtime {\n\n pub fn new() -> Self {\n\n Self {\n\n frame_counter: 0,\n\n current_pool_index: 0,\n\n task_pool_0: vec![],\n\n task_pool_1: vec![],\n\n }\n\n }\n\n\n\n pub fn spawn(&mut self, f: impl Future<Output = ()> + 'static) {\n\n if self.current_pool_index == 0 {\n", "file_path": "runtime_v2/src/runtime.rs", "rank": 21, "score": 105200.1100079846 }, { "content": "type Task = Pin<Box<dyn Future<Output = ()> + 'static>>;\n\n\n\npub struct Runtime {\n\n frame_counter: u64,\n\n current_pool_index: u8,\n\n task_pool_0: Vec<Task>,\n\n task_pool_1: Vec<Task>,\n\n}\n\nimpl Runtime {\n\n pub fn new() -> Self {\n\n Self {\n\n frame_counter: 0,\n\n current_pool_index: 0,\n\n task_pool_0: vec![],\n\n task_pool_1: vec![],\n\n }\n\n }\n\n\n\n pub fn spawn(&mut self, f: impl Future<Output = ()> + 'static) {\n\n if self.current_pool_index == 0 {\n", "file_path": "runtime_v1/src/runtime.rs", "rank": 22, "score": 105200.1100079846 }, { "content": "fn main() {\n\n let mut runtime = Runtime::new();\n\n\n\n runtime.activate_phase(Phase::PreTask, 0);\n\n runtime.activate_phase(Phase::Task, 10);\n\n runtime.activate_phase(Phase::PostTask, 20);\n\n\n\n runtime.spawn(Phase::PreTask, pre_task());\n\n runtime.spawn(Phase::Task, task_1());\n\n runtime.spawn(Phase::Task, task_2());\n\n runtime.spawn(Phase::Task, task_3());\n\n runtime.spawn(Phase::PostTask, post_task_1());\n\n runtime.spawn(Phase::PostTask, post_task_2());\n\n\n\n 'update_loop: loop {\n\n let frame_start = Instant::now();\n\n let frame_duration = Duration::new(1, 0);\n\n\n\n match runtime.update() {\n\n RuntimeIsDone::Done => break 'update_loop,\n", "file_path": "use_v4_task_order/src/main.rs", "rank": 23, "score": 104975.21476045523 }, { "content": "fn main() {\n\n let mut runtime = Runtime::new();\n\n\n\n runtime.spawn(task_1());\n\n runtime.spawn(task_2());\n\n runtime.spawn(task_3());\n\n\n\n 'update_loop: loop {\n\n let frame_start = Instant::now();\n\n let frame_duration = Duration::new(1, 0);\n\n\n\n match runtime.update() {\n\n RuntimeIsDone::Done => break 'update_loop,\n\n RuntimeIsDone::NotDone => (),\n\n }\n\n\n\n let now = Instant::now();\n\n let duration = now.duration_since(frame_start);\n\n if duration < frame_duration {\n\n sleep(frame_duration - duration);\n\n }\n\n }\n\n}\n", "file_path": "use_v3_task_order/src/main.rs", "rank": 24, "score": 104975.21476045523 }, { "content": "pub trait World: 'static {\n\n type Command;\n\n fn process_command(&mut self, cmd: Self::Command);\n\n}\n", "file_path": "runtime_v6/src/world.rs", "rank": 25, "score": 95164.02751152182 }, { "content": "#[derive(PartialEq, Eq, Hash, Clone, Debug)]\n\nenum Phase {\n\n QueueCommand,\n\n Flush,\n\n}\n\n\n\nasync fn queue_command(w: Rc<RefCell<impl Write>>) {\n\n {\n\n let mut w = w.borrow_mut();\n\n execute!(w, EnterAlternateScreen).unwrap();\n\n execute!(w, Hide).unwrap();\n\n }\n\n\n\n let count_up = count_up(w.clone());\n\n let tween_1 = linear(w.clone());\n\n let tween_2 = ease_in_quadratic(w.clone());\n\n let tween_3 = ease_out_quadratic(w.clone());\n\n join!(count_up, tween_1, tween_2, tween_3);\n\n\n\n for _ in 0..150 {\n\n next_frame().await;\n", "file_path": "use_v4_tween/src/main.rs", "rank": 26, "score": 79211.8028616626 }, { "content": "#[derive(PartialEq, Eq, Hash, Clone, Debug)]\n\nenum Phase {\n\n Input,\n\n Update,\n\n LateUpdate,\n\n Render,\n\n}\n\n\n", "file_path": "use_v5_cli_game/src/main.rs", "rank": 27, "score": 77321.03124175034 }, { "content": "#[derive(PartialEq, Eq, Hash, Clone, Debug)]\n\nenum Phase {\n\n Input,\n\n UpdateSubmitCommand,\n\n UpdateProcessCommand,\n\n LateUpdate,\n\n Render,\n\n}\n\n\n", "file_path": "use_v5_cli_game_2/src/main.rs", "rank": 28, "score": 77321.03124175034 }, { "content": "#[derive(PartialEq, Eq, Hash, Clone, Debug)]\n\nenum Phase {\n\n PreTask,\n\n Task,\n\n PostTask,\n\n}\n\n\n", "file_path": "use_v5_thread_id/src/main.rs", "rank": 29, "score": 77321.03124175034 }, { "content": "fn main() {\n\n let mut runtime = Runtime::new();\n\n\n\n enable_raw_mode().unwrap();\n\n\n\n runtime.spawn(async {\n\n print_key_event().await;\n\n });\n\n\n\n 'update_loop: loop {\n\n let frame_start = Instant::now();\n\n let frame_duration = Duration::new(1, 0);\n\n\n\n match runtime.update() {\n\n RuntimeIsDone::Done => break 'update_loop,\n\n RuntimeIsDone::NotDone => (),\n\n }\n\n\n\n let now = Instant::now();\n\n let duration = now.duration_since(frame_start);\n\n if duration < frame_duration {\n\n sleep(frame_duration - duration);\n\n }\n\n }\n\n\n\n disable_raw_mode().unwrap();\n\n}\n", "file_path": "use_v3_keyevent_2/src/main.rs", "rank": 30, "score": 75986.90809781833 }, { "content": "fn main() {\n\n let mut runtime = Runtime::new();\n\n\n\n enable_raw_mode().unwrap();\n\n\n\n runtime.spawn(async {\n\n print_key_event().await;\n\n });\n\n\n\n 'update_loop: loop {\n\n let frame_start = Instant::now();\n\n let frame_duration = Duration::new(1, 0);\n\n\n\n match runtime.update() {\n\n RuntimeIsDone::Done => break 'update_loop,\n\n RuntimeIsDone::NotDone => (),\n\n }\n\n\n\n let now = Instant::now();\n\n let duration = now.duration_since(frame_start);\n\n if duration < frame_duration {\n\n sleep(frame_duration - duration);\n\n }\n\n }\n\n\n\n disable_raw_mode().unwrap();\n\n}\n", "file_path": "use_v2_keyevent/src/main.rs", "rank": 31, "score": 75986.90809781833 }, { "content": "fn main() {\n\n let mut runtime = Runtime::new();\n\n runtime.spawn(async {\n\n let screen = HideCursor::from(AlternateScreen::from(stdout()));\n\n let screen = Rc::new(RefCell::new(screen));\n\n\n\n let tween1 = count_up(screen.clone());\n\n let tween2 = linear(screen.clone());\n\n let tween3 = ease_in_cubic(screen.clone());\n\n let tween4 = ease_out_cubic(screen.clone());\n\n join!(tween1, tween2, tween3, tween4);\n\n\n\n let tween2 = linear(screen.clone());\n\n tween2.await;\n\n\n\n let tween3 = ease_in_cubic(screen.clone());\n\n let tween4 = ease_out_cubic(screen);\n\n join!(tween3, tween4);\n\n\n\n for _ in 0..120 {\n\n next_frame().await;\n\n }\n\n });\n\n runtime.run();\n\n}\n", "file_path": "use_v1_tween/src/main.rs", "rank": 32, "score": 75986.90809781833 }, { "content": "fn main() {\n\n let mut runtime = Runtime::new();\n\n let stdout = Rc::new(RefCell::new(stdout()));\n\n\n\n runtime.spawn(async move {\n\n {\n\n let mut w = stdout.borrow_mut();\n\n execute!(w, EnterAlternateScreen).unwrap();\n\n execute!(w, Hide).unwrap();\n\n }\n\n\n\n let count_up = count_up(stdout.clone());\n\n let tween_1 = linear(stdout.clone());\n\n let tween_2 = ease_in_quadratic(stdout.clone());\n\n let tween_3 = ease_out_quadratic(stdout.clone());\n\n join!(count_up, tween_1, tween_2, tween_3);\n\n\n\n for _ in 0..150 {\n\n next_frame().await;\n\n }\n", "file_path": "use_v3_tween/src/main.rs", "rank": 33, "score": 75986.90809781833 }, { "content": "fn main() {\n\n let mut runtime = Runtime::new();\n\n let stdout = Rc::new(RefCell::new(stdout()));\n\n\n\n runtime.spawn(async move {\n\n {\n\n let mut w = stdout.borrow_mut();\n\n execute!(w, EnterAlternateScreen).unwrap();\n\n execute!(w, Hide).unwrap();\n\n }\n\n\n\n let count_up = count_up(stdout.clone());\n\n let tween_1 = linear(stdout.clone());\n\n let tween_2 = ease_in_quadratic(stdout.clone());\n\n let tween_3 = ease_out_quadratic(stdout.clone());\n\n join!(count_up, tween_1, tween_2, tween_3);\n\n\n\n for _ in 0..150 {\n\n next_frame().await;\n\n }\n", "file_path": "use_v2_tween/src/main.rs", "rank": 34, "score": 75986.90809781833 }, { "content": "fn main() {\n\n let mut runtime = Runtime::new();\n\n\n\n enable_raw_mode().unwrap();\n\n\n\n runtime.spawn(async {\n\n print_key_event().await;\n\n });\n\n\n\n 'update_loop: loop {\n\n let frame_start = Instant::now();\n\n let frame_duration = Duration::new(1, 0);\n\n\n\n match runtime.update() {\n\n RuntimeIsDone::Done => break 'update_loop,\n\n RuntimeIsDone::NotDone => (),\n\n }\n\n\n\n let now = Instant::now();\n\n let duration = now.duration_since(frame_start);\n\n if duration < frame_duration {\n\n sleep(frame_duration - duration);\n\n }\n\n }\n\n\n\n disable_raw_mode().unwrap();\n\n}\n", "file_path": "use_v3_keyevent/src/main.rs", "rank": 35, "score": 75986.90809781833 }, { "content": "fn main() {\n\n let mut runtime = Runtime::new();\n\n let stdout = Rc::new(RefCell::new(stdout()));\n\n\n\n runtime.activate_phase(Phase::QueueCommand, 0);\n\n runtime.activate_phase(Phase::Flush, 10);\n\n\n\n runtime.spawn(Phase::QueueCommand, queue_command(stdout.clone()));\n\n runtime.spawn(Phase::Flush, flush(stdout.clone()));\n\n\n\n 'update_loop: loop {\n\n let frame_start = Instant::now();\n\n let frame_duration = Duration::new(0, 16_666_666);\n\n\n\n match runtime.update() {\n\n RuntimeIsDone::Done => break 'update_loop,\n\n RuntimeIsDone::NotDone => (),\n\n }\n\n\n\n let now = Instant::now();\n\n let duration = now.duration_since(frame_start);\n\n if duration < frame_duration {\n\n sleep(frame_duration - duration);\n\n }\n\n }\n\n}\n", "file_path": "use_v4_tween/src/main.rs", "rank": 36, "score": 75986.90809781833 }, { "content": "fn main() {\n\n let world = Arc::new(RwLock::new(World::new()));\n\n let (sender, receiver) = channel();\n\n let stdout = Arc::new(Mutex::new(stdout()));\n\n\n\n let mut runtime = Runtime::new();\n\n\n\n runtime.activate_phase(Phase::Input, 0);\n\n runtime.activate_phase(Phase::UpdateSubmitCommand, 10);\n\n runtime.activate_phase(Phase::UpdateProcessCommand, 11);\n\n runtime.activate_phase(Phase::LateUpdate, 30);\n\n runtime.activate_phase(Phase::Render, 40);\n\n\n\n runtime.spawn(Phase::Input, input_system(Arc::clone(&world)));\n\n\n\n runtime.spawn(\n\n Phase::UpdateSubmitCommand,\n\n player_system(Arc::clone(&world), sender.clone()),\n\n );\n\n runtime.spawn(\n", "file_path": "use_v5_cli_game_2/src/main.rs", "rank": 37, "score": 74106.91447472789 }, { "content": "fn main() {\n\n let mut runtime = Runtime::new();\n\n\n\n runtime.activate_phase(Phase::PreTask, 0);\n\n runtime.activate_phase(Phase::Task, 10);\n\n runtime.activate_phase(Phase::PostTask, 20);\n\n\n\n runtime.spawn(Phase::PreTask, pre_task());\n\n runtime.spawn(Phase::Task, task_1());\n\n runtime.spawn(Phase::Task, task_2());\n\n runtime.spawn(Phase::Task, task_3());\n\n runtime.spawn(Phase::PostTask, post_task_1());\n\n runtime.spawn(Phase::PostTask, post_task_2());\n\n\n\n 'update_loop: loop {\n\n let frame_start = Instant::now();\n\n let frame_duration = Duration::new(1, 0);\n\n\n\n match runtime.update() {\n\n RuntimeIsDone::Done => break 'update_loop,\n", "file_path": "use_v5_thread_id/src/main.rs", "rank": 38, "score": 74106.91447472789 }, { "content": "fn main() {\n\n let world = Arc::new(Mutex::new(World::new()));\n\n let stdout = Arc::new(Mutex::new(stdout()));\n\n\n\n let mut runtime = Runtime::new();\n\n\n\n runtime.activate_phase(Phase::Input, 0);\n\n runtime.activate_phase(Phase::Update, 10);\n\n runtime.activate_phase(Phase::LateUpdate, 30);\n\n runtime.activate_phase(Phase::Render, 40);\n\n\n\n runtime.spawn(Phase::Input, input_system(world.clone()));\n\n runtime.spawn(Phase::Update, player_system(world.clone()));\n\n runtime.spawn(Phase::Update, enemy_system(world.clone()));\n\n runtime.spawn(Phase::LateUpdate, late_update_system(world.clone()));\n\n runtime.spawn(Phase::Render, render_system(world.clone(), stdout));\n\n\n\n enable_raw_mode().unwrap();\n\n\n\n 'update_loop: loop {\n", "file_path": "use_v5_cli_game/src/main.rs", "rank": 39, "score": 74106.91447472789 }, { "content": "fn main() {\n\n let world = GameWorld::new();\n\n\n\n let mut runtime = Runtime::new(world);\n\n\n\n runtime.activate_phase(Phase::Input, 0);\n\n runtime.activate_phase(Phase::Update, 10);\n\n runtime.activate_phase(Phase::LateUpdate, 20);\n\n runtime.activate_phase(Phase::Render, 30);\n\n\n\n runtime.add_async_system(Phase::Input, input_system);\n\n runtime.add_async_system(Phase::Update, player_system);\n\n runtime.add_async_system(Phase::Update, enemy_system);\n\n runtime.add_async_system(Phase::LateUpdate, late_update_system);\n\n runtime.add_async_system(Phase::Render, render_system);\n\n\n\n enable_raw_mode().unwrap();\n\n\n\n 'update_loop: loop {\n\n let frame_start = Instant::now();\n", "file_path": "use_v6_cli_game/src/main.rs", "rank": 40, "score": 74106.91447472789 }, { "content": "fn easing_ease_in_cubic(t: f32) -> f32 {\n\n t * t * t\n\n}\n\n\n\nasync fn ease_in_cubic(screen: Rc<RefCell<impl Write>>) {\n\n for i in 0..300 {\n\n let t = i as f32 / 299 as f32;\n\n let t = easing_ease_in_cubic(t);\n\n\n\n let width = terminal_size().unwrap().0;\n\n let bar_width = ((width - 3) as f32 * t) as u16;\n\n\n\n let mut progress_bar = \"\".to_string();\n\n for _ in 0..bar_width {\n\n progress_bar.push('#');\n\n }\n\n for _ in 0..(width - 3 - bar_width) {\n\n progress_bar.push(' ');\n\n }\n\n\n", "file_path": "use_v1_tween/src/main.rs", "rank": 41, "score": 63880.257019838755 }, { "content": "fn easing_ease_out_cubic(t: f32) -> f32 {\n\n let t = t - 1.0;\n\n t * t * t + 1.0\n\n}\n\nasync fn ease_out_cubic(screen: Rc<RefCell<impl Write>>) {\n\n for i in 0..300 {\n\n let t = i as f32 / 299 as f32;\n\n let t = easing_ease_out_cubic(t);\n\n\n\n let width = terminal_size().unwrap().0;\n\n let bar_width = ((width - 3) as f32 * t) as u16;\n\n\n\n let mut progress_bar = \"\".to_string();\n\n for _ in 0..bar_width {\n\n progress_bar.push('#');\n\n }\n\n for _ in 0..(width - 3 - bar_width) {\n\n progress_bar.push(' ');\n\n }\n\n\n", "file_path": "use_v1_tween/src/main.rs", "rank": 42, "score": 63880.257019838755 }, { "content": "use std::{\n\n thread::sleep,\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse runtime_v3::{next_frame, Runtime, RuntimeIsDone};\n\n\n\nasync fn task_1() {\n\n for i in 0..5 {\n\n println!(\"Task 1: {}\", i);\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn task_2() {\n\n for i in 0..5 {\n\n println!(\"Task 2: {}\", i);\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn task_3() {\n\n for i in 0..5 {\n\n println!(\"Task 3: {}\", i);\n\n next_frame().await;\n\n }\n\n}\n\n\n", "file_path": "use_v3_task_order/src/main.rs", "rank": 43, "score": 58234.73212868555 }, { "content": "use std::{\n\n thread::sleep,\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse runtime_v4::{next_frame, Runtime, RuntimeIsDone};\n\n\n\nasync fn pre_task() {\n\n for i in 0..5 {\n\n println!(\"-------- Frame {} start --------\", i);\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn task_1() {\n\n for i in 0..5 {\n\n println!(\"Task 1: {}\", i);\n\n next_frame().await;\n\n }\n\n}\n", "file_path": "use_v4_task_order/src/main.rs", "rank": 44, "score": 58234.50721538222 }, { "content": "\n\nasync fn task_2() {\n\n for i in 0..5 {\n\n println!(\"Task 2: {}\", i);\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn task_3() {\n\n for i in 0..5 {\n\n println!(\"Task 3: {}\", i);\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn post_task_1() {\n\n for i in 0..5 {\n\n println!(\"Post Task 1: {}\", i);\n\n next_frame().await;\n\n }\n", "file_path": "use_v4_task_order/src/main.rs", "rank": 45, "score": 58228.949340816376 }, { "content": "}\n\n\n\nasync fn post_task_2() {\n\n for i in 0..5 {\n\n println!(\"Post Task 2: {}\", i);\n\n next_frame().await;\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, Hash, Clone, Debug)]\n", "file_path": "use_v4_task_order/src/main.rs", "rank": 46, "score": 58228.26240703188 }, { "content": " RuntimeIsDone::NotDone => (),\n\n }\n\n\n\n let now = Instant::now();\n\n let duration = now.duration_since(frame_start);\n\n if duration < frame_duration {\n\n sleep(frame_duration - duration);\n\n }\n\n }\n\n}\n", "file_path": "use_v4_task_order/src/main.rs", "rank": 47, "score": 58228.22643047698 }, { "content": "#[derive(Debug, PartialEq, Eq, Hash, Clone)]\n\nenum Phase {\n\n Input,\n\n Update,\n\n}\n\n\n", "file_path": "runtime_v6/src/main.rs", "rank": 48, "score": 53999.10736415154 }, { "content": "struct MyWorld {\n\n field: f64,\n\n}\n\nimpl World for MyWorld {\n\n type Command = MyWorldCommand;\n\n fn process_command(&mut self, cmd: Self::Command) {\n\n match cmd {\n\n MyWorldCommand::Add(f) => self.field += f,\n\n MyWorldCommand::Sub(f) => self.field -= f,\n\n }\n\n }\n\n}\n\n\n\nasync fn hey(\n\n world: Read<MyWorld>,\n\n sender: Sender<MyWorldCommand>,\n\n _runtime: Runtime<Phase, MyWorld>,\n\n) {\n\n let f = world.field;\n\n println!(\"{}\", f);\n", "file_path": "runtime_v6/src/main.rs", "rank": 49, "score": 53018.452698495224 }, { "content": "enum MyWorldCommand {\n\n Add(f64),\n\n Sub(f64),\n\n}\n\n\n", "file_path": "runtime_v6/src/main.rs", "rank": 50, "score": 52888.699753788016 }, { "content": "fn main() {\n\n let world = MyWorld { field: 0.0 };\n\n\n\n let mut runtime = Runtime::<Phase, MyWorld>::new(world);\n\n\n\n runtime.activate_phase(Phase::Input, 0);\n\n runtime.activate_phase(Phase::Update, 10);\n\n\n\n runtime.add_async_system(Phase::Input, hey);\n\n runtime.add_async_system(Phase::Update, update);\n\n\n\n 'update_loop: loop {\n\n match runtime.update() {\n\n RuntimeIsDone::Done => break 'update_loop,\n\n RuntimeIsDone::NotDone => (),\n\n }\n\n }\n\n}\n", "file_path": "runtime_v6/src/main.rs", "rank": 51, "score": 50762.68835869462 }, { "content": "use std::pin::Pin;\n\nuse std::task::Context;\n\nuse std::{future::Future, task::Poll};\n\n\n\npub struct WaitNextFrameFuture {\n\n polled: bool,\n\n}\n\nimpl WaitNextFrameFuture {\n\n fn new() -> Self {\n\n Self { polled: false }\n\n }\n\n}\n\nimpl Future for WaitNextFrameFuture {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.polled {\n\n Poll::Ready(())\n\n } else {\n\n self.polled = true;\n\n Poll::Pending\n\n }\n\n }\n\n}\n\n\n", "file_path": "runtime_v3/src/wait_next_frame_future.rs", "rank": 52, "score": 30943.170735879634 }, { "content": "use std::pin::Pin;\n\nuse std::task::Context;\n\nuse std::{future::Future, task::Poll};\n\n\n\npub struct WaitNextFrameFuture {\n\n polled: bool,\n\n}\n\nimpl WaitNextFrameFuture {\n\n fn new() -> Self {\n\n Self { polled: false }\n\n }\n\n}\n\nimpl Future for WaitNextFrameFuture {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.polled {\n\n Poll::Ready(())\n\n } else {\n\n self.polled = true;\n\n Poll::Pending\n\n }\n\n }\n\n}\n\n\n\n/// 次のフレームまで待機するFutureを返す関数。\n", "file_path": "runtime_v4/src/wait_next_frame_future.rs", "rank": 53, "score": 30942.123471976818 }, { "content": "use std::pin::Pin;\n\nuse std::task::Context;\n\nuse std::{future::Future, task::Poll};\n\n\n\npub struct WaitNextFrameFuture {\n\n polled: bool,\n\n}\n\nimpl WaitNextFrameFuture {\n\n fn new() -> Self {\n\n Self { polled: false }\n\n }\n\n}\n\nimpl Future for WaitNextFrameFuture {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.polled {\n\n Poll::Ready(())\n\n } else {\n\n self.polled = true;\n\n Poll::Pending\n\n }\n\n }\n\n}\n\n\n\n/// 次のフレームまで待機するFutureを返す関数。\n", "file_path": "runtime_v6/src/wait_next_frame_future.rs", "rank": 54, "score": 30942.123471976818 }, { "content": "use std::pin::Pin;\n\nuse std::task::Context;\n\nuse std::{future::Future, task::Poll};\n\n\n\npub struct WaitNextFrameFuture {\n\n polled: bool,\n\n}\n\nimpl WaitNextFrameFuture {\n\n fn new() -> Self {\n\n Self { polled: false }\n\n }\n\n}\n\nimpl Future for WaitNextFrameFuture {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.polled {\n\n Poll::Ready(())\n\n } else {\n\n self.polled = true;\n\n Poll::Pending\n\n }\n\n }\n\n}\n\n\n\n/// 次のフレームまで待機するFutureを返す関数。\n", "file_path": "runtime_v5/src/wait_next_frame_future.rs", "rank": 55, "score": 30942.123471976818 }, { "content": "use std::{\n\n future::Future,\n\n pin::Pin,\n\n task::{Context, Poll},\n\n};\n\n\n\npub struct WaitNextFrame {\n\n polled: bool,\n\n}\n\nimpl WaitNextFrame {\n\n fn new() -> Self {\n\n Self { polled: false }\n\n }\n\n}\n\nimpl Future for WaitNextFrame {\n\n type Output = ();\n\n\n\n fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.polled {\n\n Poll::Ready(())\n\n } else {\n\n self.get_mut().polled = true;\n\n Poll::Pending\n\n }\n\n }\n\n}\n\n\n", "file_path": "runtime_v1/src/wait_next_frame_future.rs", "rank": 56, "score": 30941.62323171085 }, { "content": "use std::{\n\n future::Future,\n\n pin::Pin,\n\n task::{Context, Poll},\n\n};\n\n\n\npub struct WaitNextFrame {\n\n polled: bool,\n\n}\n\nimpl WaitNextFrame {\n\n fn new() -> Self {\n\n Self { polled: false }\n\n }\n\n}\n\nimpl Future for WaitNextFrame {\n\n type Output = ();\n\n\n\n fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.polled {\n\n Poll::Ready(())\n\n } else {\n\n self.get_mut().polled = true;\n\n Poll::Pending\n\n }\n\n }\n\n}\n\n\n", "file_path": "runtime_v2/src/wait_next_frame_future.rs", "rank": 57, "score": 30941.62323171085 }, { "content": "use std::cell::RefCell;\n\nuse std::io::{stdout, Write};\n\nuse std::rc::Rc;\n\nuse std::thread::sleep;\n\nuse std::time::{Duration, Instant};\n\n\n\nuse crossterm::{\n\n cursor::{Hide, MoveTo, Show},\n\n execute, queue,\n\n style::Print,\n\n terminal::{size, EnterAlternateScreen, LeaveAlternateScreen},\n\n};\n\nuse futures::join;\n\n\n\nuse runtime_v4::{next_frame, Runtime, RuntimeIsDone};\n\n\n\nasync fn count_up(w: Rc<RefCell<impl Write>>) {\n\n for i in 0..300 {\n\n {\n\n let mut w = w.borrow_mut();\n", "file_path": "use_v4_tween/src/main.rs", "rank": 58, "score": 27247.434569357658 }, { "content": "use std::cell::RefCell;\n\nuse std::io::{stdout, Write};\n\nuse std::rc::Rc;\n\n\n\nuse futures::join;\n\nuse termion::{\n\n cursor::{Goto, HideCursor},\n\n screen::AlternateScreen,\n\n terminal_size,\n\n};\n\n\n\nuse runtime_v1::{next_frame, Runtime};\n\n\n\nasync fn count_up(screen: Rc<RefCell<impl Write>>) {\n\n for i in 0..300 {\n\n {\n\n let mut screen = screen.borrow_mut();\n\n write!(screen, \"{}COUNT UP: {:3}/299\", Goto(2, 5), i).unwrap();\n\n screen.flush().unwrap();\n\n }\n", "file_path": "use_v1_tween/src/main.rs", "rank": 59, "score": 27245.663390209564 }, { "content": "\n\n {\n\n let mut w = stdout.borrow_mut();\n\n execute!(w, Show).unwrap();\n\n execute!(w, LeaveAlternateScreen).unwrap();\n\n }\n\n });\n\n\n\n 'update_loop: loop {\n\n let frame_start = Instant::now();\n\n let frame_duration = Duration::new(0, 16_666_666);\n\n\n\n match runtime.update() {\n\n RuntimeIsDone::Done => break 'update_loop,\n\n RuntimeIsDone::NotDone => (),\n\n }\n\n\n\n let now = Instant::now();\n\n let duration = now.duration_since(frame_start);\n\n if duration < frame_duration {\n\n sleep(frame_duration - duration);\n\n }\n\n }\n\n}\n", "file_path": "use_v3_tween/src/main.rs", "rank": 60, "score": 27245.541095486627 }, { "content": "\n\n {\n\n let mut w = stdout.borrow_mut();\n\n execute!(w, Show).unwrap();\n\n execute!(w, LeaveAlternateScreen).unwrap();\n\n }\n\n });\n\n\n\n 'update_loop: loop {\n\n let frame_start = Instant::now();\n\n let frame_duration = Duration::new(0, 16_666_666);\n\n\n\n match runtime.update() {\n\n RuntimeIsDone::Done => break 'update_loop,\n\n RuntimeIsDone::NotDone => (),\n\n }\n\n\n\n let now = Instant::now();\n\n let duration = now.duration_since(frame_start);\n\n if duration < frame_duration {\n\n sleep(frame_duration - duration);\n\n }\n\n }\n\n}\n", "file_path": "use_v2_tween/src/main.rs", "rank": 61, "score": 27245.541095486627 }, { "content": "use std::{\n\n thread::sleep,\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse crossterm::{\n\n event::{Event, EventStream, KeyCode, KeyEvent, KeyModifiers},\n\n terminal::{disable_raw_mode, enable_raw_mode},\n\n};\n\nuse futures::StreamExt;\n\n\n\nuse runtime_v2::{Runtime, RuntimeIsDone};\n\n\n\n// runtime_v2はawaitすると問答無用で次のフレームに処理が回ってしまう。\n\n// そのためawaitでキーイベントのストリームを待つと1フレームに1つのキーしか処理できない。\n\nasync fn print_key_event() {\n\n let mut reader = EventStream::new();\n\n\n\n while let Some(evt) = reader.next().await {\n\n match evt {\n", "file_path": "use_v2_keyevent/src/main.rs", "rank": 62, "score": 27245.017087173463 }, { "content": "use std::cell::RefCell;\n\nuse std::io::{stdout, Write};\n\nuse std::rc::Rc;\n\nuse std::{\n\n thread::sleep,\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse crossterm::{\n\n cursor::{Hide, MoveTo, Show},\n\n execute, queue,\n\n style::Print,\n\n terminal::{size, EnterAlternateScreen, LeaveAlternateScreen},\n\n};\n\nuse futures::join;\n\n\n\nuse runtime_v2::{next_frame, Runtime, RuntimeIsDone};\n\n\n\nasync fn count_up(w: Rc<RefCell<impl Write>>) {\n\n for i in 0..300 {\n", "file_path": "use_v2_tween/src/main.rs", "rank": 63, "score": 27244.960939067852 }, { "content": "use std::cell::RefCell;\n\nuse std::io::{stdout, Write};\n\nuse std::rc::Rc;\n\nuse std::{\n\n thread::sleep,\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse crossterm::{\n\n cursor::{Hide, MoveTo, Show},\n\n execute, queue,\n\n style::Print,\n\n terminal::{size, EnterAlternateScreen, LeaveAlternateScreen},\n\n};\n\nuse futures::join;\n\n\n\nuse runtime_v3::{next_frame, Runtime, RuntimeIsDone};\n\n\n\nasync fn count_up(w: Rc<RefCell<impl Write>>) {\n\n for i in 0..300 {\n", "file_path": "use_v3_tween/src/main.rs", "rank": 64, "score": 27244.960939067852 }, { "content": "use std::{\n\n thread::sleep,\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse crossterm::{\n\n event::{Event, EventStream, KeyCode, KeyEvent, KeyModifiers},\n\n terminal::{disable_raw_mode, enable_raw_mode},\n\n};\n\nuse futures::StreamExt;\n\n\n\nuse runtime_v3::{Runtime, RuntimeIsDone};\n\n\n\n// runtime_v3ではawaitでフレームをまたがない処理もできるようになったので、\n\n// 同フレーム中に複数のイベントを処理できる。はず。\n\n// が、駄目だった。\n\n//\n\n// 調査してみたところpollしてから次のイベントをpollするのが必ず1ミリ秒弱ほどかかるようだ。\n\n// たとえキーが複数入力されていて次のキーが即座に取得できそうな場面でも、\n\n// 何故か1ミリ秒弱待たないと次のキーが取得できないようだ。\n", "file_path": "use_v3_keyevent/src/main.rs", "rank": 65, "score": 27243.465430045067 }, { "content": "async fn print_key_event() {\n\n let mut reader = EventStream::new();\n\n while let Some(evt) = reader.next().await {\n\n match evt {\n\n Ok(event) => match event {\n\n Event::Key(key_event) => {\n\n println!(\"{:?}\", key_event);\n\n match key_event {\n\n KeyEvent {\n\n code: KeyCode::Char('c'),\n\n modifiers: KeyModifiers::CONTROL,\n\n } => break,\n\n _ => (),\n\n }\n\n }\n\n _ => (),\n\n },\n\n Err(err) => {\n\n println!(\"{:?}\", err);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "use_v3_keyevent/src/main.rs", "rank": 66, "score": 27242.589515268115 }, { "content": "use std::{\n\n thread::sleep,\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse crossterm::{\n\n event::{KeyCode, KeyEvent, KeyModifiers},\n\n terminal::{disable_raw_mode, enable_raw_mode},\n\n};\n\nuse futures::StreamExt;\n\n\n\nuse runtime_v3::{Runtime, RuntimeIsDone};\n\n\n\nmod key_event_stream;\n\nuse key_event_stream::KeyEventStream;\n\n\n\n// 改めてKeyEventStreamを自作して実装する。\n\n//\n\n// runtime_v2ではawaitするたびに問答無用で後続タスクが次フレームに送られるので\n\n// 1フレームに1つのキーしか処理することができない。\n", "file_path": "use_v3_keyevent_2/src/main.rs", "rank": 67, "score": 27241.45999361613 }, { "content": " }\n\n\n\n {\n\n let mut w = w.borrow_mut();\n\n execute!(w, Show).unwrap();\n\n execute!(w, LeaveAlternateScreen).unwrap();\n\n }\n\n}\n\n\n\nasync fn flush(w: Rc<RefCell<impl Write>>) {\n\n loop {\n\n {\n\n let mut w = w.borrow_mut();\n\n w.flush().unwrap();\n\n }\n\n next_frame().await;\n\n }\n\n}\n\n\n", "file_path": "use_v4_tween/src/main.rs", "rank": 68, "score": 27241.22822040207 }, { "content": " {\n\n let mut w = w.borrow_mut();\n\n queue!(w, MoveTo(2, 5)).unwrap();\n\n queue!(w, Print(format!(\"COUNT UP: {:3}/299\", i))).unwrap();\n\n w.flush().unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn linear(w: Rc<RefCell<impl Write>>) {\n\n for i in 0..300 {\n\n let t = i as f32 / 299 as f32;\n\n\n\n let width = size().unwrap().0;\n\n let bar_width = ((width - 2) as f32 * t) as u16;\n\n\n\n let mut progress_bar = \"\".to_string();\n\n for _ in 0..bar_width {\n", "file_path": "use_v2_tween/src/main.rs", "rank": 69, "score": 27240.38724931258 }, { "content": " {\n\n let mut w = w.borrow_mut();\n\n queue!(w, MoveTo(2, 5)).unwrap();\n\n queue!(w, Print(format!(\"COUNT UP: {:3}/299\", i))).unwrap();\n\n w.flush().unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn linear(w: Rc<RefCell<impl Write>>) {\n\n for i in 0..300 {\n\n let t = i as f32 / 299 as f32;\n\n\n\n let width = size().unwrap().0;\n\n let bar_width = ((width - 2) as f32 * t) as u16;\n\n\n\n let mut progress_bar = \"\".to_string();\n\n for _ in 0..bar_width {\n", "file_path": "use_v3_tween/src/main.rs", "rank": 70, "score": 27240.38724931258 }, { "content": " progress_bar.push(' ');\n\n }\n\n\n\n {\n\n let mut w = w.borrow_mut();\n\n queue!(w, MoveTo(2, 7), Print(\"Linear:\")).unwrap();\n\n queue!(w, MoveTo(0, 8), Print(format!(\"[{}]\", progress_bar))).unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn ease_in_quadratic(w: Rc<RefCell<impl Write>>) {\n\n fn easing_ease_in_quadratic(t: f32) -> f32 {\n\n t * t\n\n }\n\n\n\n for i in 0..300 {\n\n let t = i as f32 / 299 as f32;\n", "file_path": "use_v4_tween/src/main.rs", "rank": 71, "score": 27240.088687650845 }, { "content": " progress_bar.push('#');\n\n }\n\n for _ in 0..(width - 2 - bar_width) {\n\n progress_bar.push(' ');\n\n }\n\n\n\n {\n\n let mut w = w.borrow_mut();\n\n queue!(w, MoveTo(2, 7), Print(\"Linear:\")).unwrap();\n\n queue!(w, MoveTo(0, 8), Print(format!(\"[{}]\", progress_bar))).unwrap();\n\n w.flush().unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn ease_in_quadratic(w: Rc<RefCell<impl Write>>) {\n\n fn easing_ease_in_quadratic(t: f32) -> f32 {\n\n t * t\n", "file_path": "use_v2_tween/src/main.rs", "rank": 72, "score": 27239.800302119846 }, { "content": " progress_bar.push('#');\n\n }\n\n for _ in 0..(width - 2 - bar_width) {\n\n progress_bar.push(' ');\n\n }\n\n\n\n {\n\n let mut w = w.borrow_mut();\n\n queue!(w, MoveTo(2, 7), Print(\"Linear:\")).unwrap();\n\n queue!(w, MoveTo(0, 8), Print(format!(\"[{}]\", progress_bar))).unwrap();\n\n w.flush().unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn ease_in_quadratic(w: Rc<RefCell<impl Write>>) {\n\n fn easing_ease_in_quadratic(t: f32) -> f32 {\n\n t * t\n", "file_path": "use_v3_tween/src/main.rs", "rank": 73, "score": 27239.800302119846 }, { "content": "// runtime_v3はawaitしても、皇族タスクが即座に実行可能な場合には次フレームに送らず\n\n// 同フレーム中で後続タスクを続けて処理をする。\n\n// 今回はruntime_v3を使っているので、後続処理が即座に実行可能な場合、\n\n// つまりキーイベントストリームにキーイベントが溜まっていた場合には、\n\n// 同フレーム中で処理を回せるため、1フレームで複数のキーイベントを処理できる。\n\nasync fn print_key_event() {\n\n let mut stream = KeyEventStream::new();\n\n while let Some(evt) = stream.next().await {\n\n println!(\"{:?}\", evt);\n\n match evt {\n\n KeyEvent {\n\n code: KeyCode::Char('c'),\n\n modifiers: KeyModifiers::CONTROL,\n\n } => break,\n\n _ => (),\n\n }\n\n }\n\n}\n\n\n", "file_path": "use_v3_keyevent_2/src/main.rs", "rank": 74, "score": 27239.271966821463 }, { "content": "\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn linear(screen: Rc<RefCell<impl Write>>) {\n\n for i in 0..300 {\n\n let t = i as f32 / 299 as f32;\n\n\n\n let width = terminal_size().unwrap().0;\n\n let bar_width = ((width - 3) as f32 * t) as u16;\n\n\n\n let mut progress_bar = \"\".to_string();\n\n for _ in 0..bar_width {\n\n progress_bar.push('#');\n\n }\n\n for _ in 0..(width - 3 - bar_width) {\n\n progress_bar.push(' ');\n\n }\n\n\n", "file_path": "use_v1_tween/src/main.rs", "rank": 75, "score": 27239.18854294379 }, { "content": " }\n\n}\n\n\n\nasync fn ease_out_quadratic(w: Rc<RefCell<impl Write>>) {\n\n fn easing_ease_out_quadratic(t: f32) -> f32 {\n\n -t * (t - 2.0)\n\n }\n\n\n\n for i in 0..300 {\n\n let t = i as f32 / 299 as f32;\n\n let t = easing_ease_out_quadratic(t);\n\n\n\n let width = size().unwrap().0;\n\n let bar_width = ((width - 2) as f32 * t) as u16;\n\n\n\n let mut progress_bar = \"\".to_string();\n\n for _ in 0..bar_width {\n\n progress_bar.push('#');\n\n }\n\n for _ in 0..(width - 2 - bar_width) {\n", "file_path": "use_v4_tween/src/main.rs", "rank": 76, "score": 27239.137611682483 }, { "content": " queue!(w, MoveTo(2, 5)).unwrap();\n\n queue!(w, Print(format!(\"COUNT UP: {:3}/299\", i))).unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn linear(w: Rc<RefCell<impl Write>>) {\n\n for i in 0..300 {\n\n let t = i as f32 / 299 as f32;\n\n\n\n let width = size().unwrap().0;\n\n let bar_width = ((width - 2) as f32 * t) as u16;\n\n\n\n let mut progress_bar = \"\".to_string();\n\n for _ in 0..bar_width {\n\n progress_bar.push('#');\n\n }\n\n for _ in 0..(width - 2 - bar_width) {\n", "file_path": "use_v4_tween/src/main.rs", "rank": 77, "score": 27239.038918737868 }, { "content": " let mut progress_bar = \"\".to_string();\n\n for _ in 0..bar_width {\n\n progress_bar.push('#');\n\n }\n\n for _ in 0..(width - 2 - bar_width) {\n\n progress_bar.push(' ');\n\n }\n\n\n\n {\n\n let mut w = w.borrow_mut();\n\n queue!(w, MoveTo(2, 13), Print(\"EaseOutQuadratic:\")).unwrap();\n\n queue!(w, MoveTo(0, 14), Print(format!(\"[{}]\", progress_bar))).unwrap();\n\n w.flush().unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n", "file_path": "use_v2_tween/src/main.rs", "rank": 78, "score": 27238.015184058902 }, { "content": " let mut progress_bar = \"\".to_string();\n\n for _ in 0..bar_width {\n\n progress_bar.push('#');\n\n }\n\n for _ in 0..(width - 2 - bar_width) {\n\n progress_bar.push(' ');\n\n }\n\n\n\n {\n\n let mut w = w.borrow_mut();\n\n queue!(w, MoveTo(2, 13), Print(\"EaseOutQuadratic:\")).unwrap();\n\n queue!(w, MoveTo(0, 14), Print(format!(\"[{}]\", progress_bar))).unwrap();\n\n w.flush().unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n", "file_path": "use_v3_tween/src/main.rs", "rank": 79, "score": 27238.015184058902 }, { "content": " Ok(event) => match event {\n\n Event::Key(key_event) => {\n\n println!(\"{:?}\", key_event);\n\n match key_event {\n\n KeyEvent {\n\n code: KeyCode::Char('c'),\n\n modifiers: KeyModifiers::CONTROL,\n\n } => break,\n\n _ => (),\n\n }\n\n }\n\n _ => (),\n\n },\n\n Err(err) => {\n\n println!(\"{:?}\", err);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "use_v2_keyevent/src/main.rs", "rank": 80, "score": 27238.008024852646 }, { "content": " }\n\n\n\n for i in 0..300 {\n\n let t = i as f32 / 299 as f32;\n\n let t = easing_ease_in_quadratic(t);\n\n\n\n let width = size().unwrap().0;\n\n let bar_width = ((width - 2) as f32 * t) as u16;\n\n\n\n let mut progress_bar = \"\".to_string();\n\n for _ in 0..bar_width {\n\n progress_bar.push('#');\n\n }\n\n for _ in 0..(width - 2 - bar_width) {\n\n progress_bar.push(' ');\n\n }\n\n\n\n {\n\n let mut w = w.borrow_mut();\n\n queue!(w, MoveTo(2, 10), Print(\"EaseInQuadratic:\")).unwrap();\n", "file_path": "use_v2_tween/src/main.rs", "rank": 81, "score": 27237.995011087918 }, { "content": " }\n\n\n\n for i in 0..300 {\n\n let t = i as f32 / 299 as f32;\n\n let t = easing_ease_in_quadratic(t);\n\n\n\n let width = size().unwrap().0;\n\n let bar_width = ((width - 2) as f32 * t) as u16;\n\n\n\n let mut progress_bar = \"\".to_string();\n\n for _ in 0..bar_width {\n\n progress_bar.push('#');\n\n }\n\n for _ in 0..(width - 2 - bar_width) {\n\n progress_bar.push(' ');\n\n }\n\n\n\n {\n\n let mut w = w.borrow_mut();\n\n queue!(w, MoveTo(2, 10), Print(\"EaseInQuadratic:\")).unwrap();\n", "file_path": "use_v3_tween/src/main.rs", "rank": 82, "score": 27237.995011087918 }, { "content": " {\n\n let mut screen = screen.borrow_mut();\n\n write!(screen, \"{}Linear:\", Goto(2, 7)).unwrap();\n\n write!(screen, \"{}[{}] \", Goto(2, 8), progress_bar).unwrap();\n\n screen.flush().unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n", "file_path": "use_v1_tween/src/main.rs", "rank": 83, "score": 27237.94536608429 }, { "content": " {\n\n let mut screen = screen.borrow_mut();\n\n write!(screen, \"{}EaseInCubic:\", Goto(2, 10)).unwrap();\n\n write!(screen, \"{}[{}] \", Goto(2, 11), progress_bar).unwrap();\n\n screen.flush().unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n", "file_path": "use_v1_tween/src/main.rs", "rank": 84, "score": 27237.91633371171 }, { "content": " {\n\n let mut screen = screen.borrow_mut();\n\n write!(screen, \"{}EaseOutCubic:\", Goto(2, 13)).unwrap();\n\n write!(screen, \"{}[{}] \", Goto(2, 14), progress_bar).unwrap();\n\n screen.flush().unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n", "file_path": "use_v1_tween/src/main.rs", "rank": 85, "score": 27237.91633371171 }, { "content": " let t = easing_ease_in_quadratic(t);\n\n\n\n let width = size().unwrap().0;\n\n let bar_width = ((width - 2) as f32 * t) as u16;\n\n\n\n let mut progress_bar = \"\".to_string();\n\n for _ in 0..bar_width {\n\n progress_bar.push('#');\n\n }\n\n for _ in 0..(width - 2 - bar_width) {\n\n progress_bar.push(' ');\n\n }\n\n\n\n {\n\n let mut w = w.borrow_mut();\n\n queue!(w, MoveTo(2, 10), Print(\"EaseInQuadratic:\")).unwrap();\n\n queue!(w, MoveTo(0, 11), Print(format!(\"[{}]\", progress_bar))).unwrap();\n\n }\n\n\n\n next_frame().await;\n", "file_path": "use_v4_tween/src/main.rs", "rank": 86, "score": 27237.841344868415 }, { "content": " progress_bar.push(' ');\n\n }\n\n\n\n {\n\n let mut w = w.borrow_mut();\n\n queue!(w, MoveTo(2, 13), Print(\"EaseOutQuadratic:\")).unwrap();\n\n queue!(w, MoveTo(0, 14), Print(format!(\"[{}]\", progress_bar))).unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, Hash, Clone, Debug)]\n", "file_path": "use_v4_tween/src/main.rs", "rank": 87, "score": 27237.751606033762 }, { "content": " queue!(w, MoveTo(0, 11), Print(format!(\"[{}]\", progress_bar))).unwrap();\n\n w.flush().unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn ease_out_quadratic(w: Rc<RefCell<impl Write>>) {\n\n fn easing_ease_out_quadratic(t: f32) -> f32 {\n\n -t * (t - 2.0)\n\n }\n\n\n\n for i in 0..300 {\n\n let t = i as f32 / 299 as f32;\n\n let t = easing_ease_out_quadratic(t);\n\n\n\n let width = size().unwrap().0;\n\n let bar_width = ((width - 2) as f32 * t) as u16;\n\n\n", "file_path": "use_v3_tween/src/main.rs", "rank": 88, "score": 27236.837663387083 }, { "content": " queue!(w, MoveTo(0, 11), Print(format!(\"[{}]\", progress_bar))).unwrap();\n\n w.flush().unwrap();\n\n }\n\n\n\n next_frame().await;\n\n }\n\n}\n\n\n\nasync fn ease_out_quadratic(w: Rc<RefCell<impl Write>>) {\n\n fn easing_ease_out_quadratic(t: f32) -> f32 {\n\n -t * (t - 2.0)\n\n }\n\n\n\n for i in 0..300 {\n\n let t = i as f32 / 299 as f32;\n\n let t = easing_ease_out_quadratic(t);\n\n\n\n let width = size().unwrap().0;\n\n let bar_width = ((width - 2) as f32 * t) as u16;\n\n\n", "file_path": "use_v2_tween/src/main.rs", "rank": 89, "score": 27236.837663387083 }, { "content": "pub struct Input {\n\n pub z: bool,\n\n pub left: bool,\n\n pub right: bool,\n\n pub up: bool,\n\n pub down: bool,\n\n}\n\nimpl Input {\n\n fn new() -> Self {\n\n Self {\n\n z: false,\n\n left: false,\n\n right: false,\n\n up: false,\n\n down: false,\n\n }\n\n }\n\n\n\n pub fn reset(&mut self) {\n\n self.z = false;\n", "file_path": "use_v6_cli_game/src/world.rs", "rank": 90, "score": 26343.532970267148 }, { "content": "use rand::prelude::*;\n\n\n\npub const WIDTH: u16 = 30;\n\npub const HEIGHT: u16 = 20;\n\n\n\n#[derive(Debug)]\n\npub struct Input {\n\n pub z: bool,\n\n pub left: bool,\n\n pub right: bool,\n\n pub up: bool,\n\n pub down: bool,\n\n}\n\nimpl Input {\n\n fn new() -> Self {\n\n Self {\n\n z: false,\n\n left: false,\n\n right: false,\n\n up: false,\n", "file_path": "use_v5_cli_game/src/world.rs", "rank": 91, "score": 26343.095822215702 }, { "content": "use rand::prelude::*;\n\n\n\npub const WIDTH: u16 = 30;\n\npub const HEIGHT: u16 = 20;\n\n\n\n#[derive(Debug)]\n\npub struct Input {\n\n pub z: bool,\n\n pub left: bool,\n\n pub right: bool,\n\n pub up: bool,\n\n pub down: bool,\n\n}\n\nimpl Input {\n\n fn new() -> Self {\n\n Self {\n\n z: false,\n\n left: false,\n\n right: false,\n\n up: false,\n", "file_path": "use_v5_cli_game_2/src/world.rs", "rank": 92, "score": 26343.095822215702 }, { "content": " pub attacked: bool,\n\n}\n\nimpl Player {\n\n fn new(x: u16, y: u16) -> Self {\n\n Self {\n\n dead: false,\n\n x,\n\n y,\n\n dir: Direction::Right,\n\n attacked: false,\n\n }\n\n }\n\n}\n\n\n\npub struct Enemy {\n\n pub dead: bool,\n\n pub x: u16,\n\n pub y: u16,\n\n}\n\nimpl Enemy {\n", "file_path": "use_v6_cli_game/src/world.rs", "rank": 93, "score": 26341.342325971946 }, { "content": "\n\n#[derive(Debug)]\n\npub struct Player {\n\n pub dead: bool,\n\n pub x: u16,\n\n pub y: u16,\n\n pub dir: Direction,\n\n pub attacked: bool,\n\n}\n\nimpl Player {\n\n fn new(x: u16, y: u16) -> Self {\n\n Self {\n\n dead: false,\n\n x,\n\n y,\n\n dir: Direction::Right,\n\n attacked: false,\n\n }\n\n }\n\n}\n", "file_path": "use_v5_cli_game_2/src/world.rs", "rank": 94, "score": 26340.177083834256 }, { "content": "\n\n#[derive(Debug)]\n\npub struct Player {\n\n pub dead: bool,\n\n pub x: u16,\n\n pub y: u16,\n\n pub dir: Direction,\n\n pub attacked: bool,\n\n}\n\nimpl Player {\n\n fn new(x: u16, y: u16) -> Self {\n\n Self {\n\n dead: false,\n\n x,\n\n y,\n\n dir: Direction::Right,\n\n attacked: false,\n\n }\n\n }\n\n}\n", "file_path": "use_v5_cli_game/src/world.rs", "rank": 95, "score": 26340.177083834256 }, { "content": " };\n\n enemies.push(Enemy::new(x, y));\n\n }\n\n\n\n Self {\n\n state: GameState::InGame,\n\n should_stop_game: false,\n\n input: Input::new(),\n\n player: Player::new(2, 2),\n\n enemies,\n\n }\n\n }\n\n}\n\nimpl World for GameWorld {\n\n type Command = GameCommand;\n\n fn process_command(&mut self, cmd: Self::Command) {\n\n match cmd {\n\n GameCommand::Input(input) => match input {\n\n InputCommand::Reset => self.input.reset(),\n\n InputCommand::Left => self.input.left = true,\n", "file_path": "use_v6_cli_game/src/world.rs", "rank": 96, "score": 26339.303151565535 }, { "content": "#[derive(Debug)]\n\npub struct World {\n\n pub state: GameState,\n\n pub should_stop_game: bool,\n\n pub input: Input,\n\n pub player: Player,\n\n pub enemies: Vec<Enemy>,\n\n}\n\nimpl World {\n\n pub fn new() -> Self {\n\n let mut rng = rand::thread_rng();\n\n\n\n let mut enemies: Vec<Enemy> = vec![];\n\n for _ in 0..5 {\n\n let (x, y) = 'label: loop {\n\n let x = rng.gen_range(0..WIDTH);\n\n let y = rng.gen_range(0..HEIGHT);\n\n\n\n if x > 4 || y > 4 {\n\n if enemies.is_empty() {\n", "file_path": "use_v5_cli_game_2/src/world.rs", "rank": 97, "score": 26339.30207841102 }, { "content": "#[derive(Debug)]\n\npub struct World {\n\n pub state: GameState,\n\n pub should_stop_game: bool,\n\n pub input: Input,\n\n pub player: Player,\n\n pub enemies: Vec<Enemy>,\n\n}\n\nimpl World {\n\n pub fn new() -> Self {\n\n let mut rng = rand::thread_rng();\n\n\n\n let mut enemies: Vec<Enemy> = vec![];\n\n for _ in 0..5 {\n\n let (x, y) = 'label: loop {\n\n let x = rng.gen_range(0..WIDTH);\n\n let y = rng.gen_range(0..HEIGHT);\n\n\n\n if x > 4 || y > 4 {\n\n if enemies.is_empty() {\n", "file_path": "use_v5_cli_game/src/world.rs", "rank": 98, "score": 26339.30207841102 }, { "content": "\n\n#[derive(Debug)]\n\npub struct Enemy {\n\n pub dead: bool,\n\n pub x: u16,\n\n pub y: u16,\n\n}\n\nimpl Enemy {\n\n fn new(x: u16, y: u16) -> Self {\n\n Self { dead: false, x, y }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum GameState {\n\n InGame,\n\n GameOver,\n\n GameClear,\n\n}\n\n\n", "file_path": "use_v5_cli_game_2/src/world.rs", "rank": 99, "score": 26339.13195098571 } ]
Rust
src/prng/chacha.rs
TheIronBorn/rand
e1b60350d0936e6f17c7fd017ed18f3151006f43
use core::fmt; use rand_core::{BlockRngCore, CryptoRng, RngCore, SeedableRng, Error, le}; use rand_core::impls::BlockRng; const SEED_WORDS: usize = 8; const STATE_WORDS: usize = 16; #[derive(Clone, Debug)] pub struct ChaChaRng(BlockRng<ChaChaCore>); impl RngCore for ChaChaRng { #[inline] fn next_u32(&mut self) -> u32 { self.0.next_u32() } #[inline] fn next_u64(&mut self) -> u64 { self.0.next_u64() } #[inline] fn fill_bytes(&mut self, dest: &mut [u8]) { self.0.fill_bytes(dest) } #[inline] fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.0.try_fill_bytes(dest) } } impl SeedableRng for ChaChaRng { type Seed = <ChaChaCore as SeedableRng>::Seed; fn from_seed(seed: Self::Seed) -> Self { ChaChaRng(BlockRng::<ChaChaCore>::from_seed(seed)) } fn from_rng<R: RngCore>(rng: R) -> Result<Self, Error> { BlockRng::<ChaChaCore>::from_rng(rng).map(ChaChaRng) } } impl CryptoRng for ChaChaRng {} impl ChaChaRng { #[deprecated(since="0.5.0", note="use the NewRng or SeedableRng trait")] pub fn new_unseeded() -> ChaChaRng { ChaChaRng::from_seed([0; SEED_WORDS*4]) } pub fn set_counter(&mut self, counter_low: u64, counter_high: u64) { self.0.inner_mut().set_counter(counter_low, counter_high); self.0.reset(); } pub fn set_rounds(&mut self, rounds: usize) { self.0.inner_mut().set_rounds(rounds); self.0.reset(); } } #[derive(Clone)] pub struct ChaChaCore { state: [u32; STATE_WORDS], rounds: usize, } impl fmt::Debug for ChaChaCore { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ChaChaCore {{}}") } } macro_rules! quarter_round{ ($a: expr, $b: expr, $c: expr, $d: expr) => {{ $a = $a.wrapping_add($b); $d ^= $a; $d = $d.rotate_left(16); $c = $c.wrapping_add($d); $b ^= $c; $b = $b.rotate_left(12); $a = $a.wrapping_add($b); $d ^= $a; $d = $d.rotate_left( 8); $c = $c.wrapping_add($d); $b ^= $c; $b = $b.rotate_left( 7); }} } macro_rules! double_round{ ($x: expr) => {{ quarter_round!($x[ 0], $x[ 4], $x[ 8], $x[12]); quarter_round!($x[ 1], $x[ 5], $x[ 9], $x[13]); quarter_round!($x[ 2], $x[ 6], $x[10], $x[14]); quarter_round!($x[ 3], $x[ 7], $x[11], $x[15]); quarter_round!($x[ 0], $x[ 5], $x[10], $x[15]); quarter_round!($x[ 1], $x[ 6], $x[11], $x[12]); quarter_round!($x[ 2], $x[ 7], $x[ 8], $x[13]); quarter_round!($x[ 3], $x[ 4], $x[ 9], $x[14]); }} } impl BlockRngCore for ChaChaCore { type Item = u32; type Results = [u32; STATE_WORDS]; fn generate(&mut self, results: &mut Self::Results) { fn core(results: &mut [u32; STATE_WORDS], state: &[u32; STATE_WORDS], rounds: usize) { let mut tmp = *state; for _ in 0..rounds / 2 { double_round!(tmp); } for i in 0..STATE_WORDS { results[i] = tmp[i].wrapping_add(state[i]); } } core(results, &self.state, self.rounds); self.state[12] = self.state[12].wrapping_add(1); if self.state[12] != 0 { return; }; self.state[13] = self.state[13].wrapping_add(1); if self.state[13] != 0 { return; }; self.state[14] = self.state[14].wrapping_add(1); if self.state[14] != 0 { return; }; self.state[15] = self.state[15].wrapping_add(1); } } impl ChaChaCore { pub fn set_counter(&mut self, counter_low: u64, counter_high: u64) { self.state[12] = counter_low as u32; self.state[13] = (counter_low >> 32) as u32; self.state[14] = counter_high as u32; self.state[15] = (counter_high >> 32) as u32; } pub fn set_rounds(&mut self, rounds: usize) { assert!([4usize, 8, 12, 16, 20].iter().any(|x| *x == rounds)); self.rounds = rounds; } } impl SeedableRng for ChaChaCore { type Seed = [u8; SEED_WORDS*4]; fn from_seed(seed: Self::Seed) -> Self { let mut seed_le = [0u32; SEED_WORDS]; le::read_u32_into(&seed, &mut seed_le); Self { state: [0x61707865, 0x3320646E, 0x79622D32, 0x6B206574, seed_le[0], seed_le[1], seed_le[2], seed_le[3], seed_le[4], seed_le[5], seed_le[6], seed_le[7], 0, 0, 0, 0], rounds: 20, } } } impl CryptoRng for ChaChaCore {} #[cfg(test)] mod test { use {RngCore, SeedableRng}; use super::ChaChaRng; #[test] fn test_chacha_construction() { let seed = [0,0,0,0,0,0,0,0, 1,0,0,0,0,0,0,0, 2,0,0,0,0,0,0,0, 3,0,0,0,0,0,0,0]; let mut rng1 = ChaChaRng::from_seed(seed); assert_eq!(rng1.next_u32(), 137206642); let mut rng2 = ChaChaRng::from_rng(rng1).unwrap(); assert_eq!(rng2.next_u32(), 1325750369); } #[test] fn test_chacha_true_values_a() { let seed = [0u8; 32]; let mut rng = ChaChaRng::from_seed(seed); let mut results = [0u32; 16]; for i in results.iter_mut() { *i = rng.next_u32(); } let expected = [0xade0b876, 0x903df1a0, 0xe56a5d40, 0x28bd8653, 0xb819d2bd, 0x1aed8da0, 0xccef36a8, 0xc70d778b, 0x7c5941da, 0x8d485751, 0x3fe02477, 0x374ad8b8, 0xf4b8436a, 0x1ca11815, 0x69b687c3, 0x8665eeb2]; assert_eq!(results, expected); for i in results.iter_mut() { *i = rng.next_u32(); } let expected = [0xbee7079f, 0x7a385155, 0x7c97ba98, 0x0d082d73, 0xa0290fcb, 0x6965e348, 0x3e53c612, 0xed7aee32, 0x7621b729, 0x434ee69c, 0xb03371d5, 0xd539d874, 0x281fed31, 0x45fb0a51, 0x1f0ae1ac, 0x6f4d794b]; assert_eq!(results, expected); } #[test] fn test_chacha_true_values_b() { let seed = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]; let mut rng = ChaChaRng::from_seed(seed); for _ in 0..16 { rng.next_u32(); } let mut results = [0u32; 16]; for i in results.iter_mut() { *i = rng.next_u32(); } let expected = [0x2452eb3a, 0x9249f8ec, 0x8d829d9b, 0xddd4ceb1, 0xe8252083, 0x60818b01, 0xf38422b8, 0x5aaa49c9, 0xbb00ca8e, 0xda3ba7b4, 0xc4b592d1, 0xfdf2732f, 0x4436274e, 0x2561b3c8, 0xebdd4aa6, 0xa0136c00]; assert_eq!(results, expected); } #[test] fn test_chacha_true_values_c() { let seed = [0, 0xff, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; let expected = [0xfb4dd572, 0x4bc42ef1, 0xdf922636, 0x327f1394, 0xa78dea8f, 0x5e269039, 0xa1bebbc1, 0xcaf09aae, 0xa25ab213, 0x48a6b46c, 0x1b9d9bcb, 0x092c5be6, 0x546ca624, 0x1bec45d5, 0x87f47473, 0x96f0992e]; let mut results = [0u32; 16]; let mut rng1 = ChaChaRng::from_seed(seed); for _ in 0..32 { rng1.next_u32(); } for i in results.iter_mut() { *i = rng1.next_u32(); } assert_eq!(results, expected); let mut rng2 = ChaChaRng::from_seed(seed); rng2.set_counter(2, 0); for i in results.iter_mut() { *i = rng2.next_u32(); } assert_eq!(results, expected); } #[test] fn test_chacha_multiple_blocks() { let seed = [0,0,0,0, 1,0,0,0, 2,0,0,0, 3,0,0,0, 4,0,0,0, 5,0,0,0, 6,0,0,0, 7,0,0,0]; let mut rng = ChaChaRng::from_seed(seed); let mut results = [0u32; 16]; for i in results.iter_mut() { *i = rng.next_u32(); for _ in 0..16 { rng.next_u32(); } } let expected = [0xf225c81a, 0x6ab1be57, 0x04d42951, 0x70858036, 0x49884684, 0x64efec72, 0x4be2d186, 0x3615b384, 0x11cfa18e, 0xd3c50049, 0x75c775f6, 0x434c6530, 0x2c5bad8f, 0x898881dc, 0x5f1c86d9, 0xc1f8e7f4]; assert_eq!(results, expected); } #[test] fn test_chacha_true_bytes() { let seed = [0u8; 32]; let mut rng = ChaChaRng::from_seed(seed); let mut results = [0u8; 32]; rng.fill_bytes(&mut results); let expected = [118, 184, 224, 173, 160, 241, 61, 144, 64, 93, 106, 229, 83, 134, 189, 40, 189, 210, 25, 184, 160, 141, 237, 26, 168, 54, 239, 204, 139, 119, 13, 199]; assert_eq!(results, expected); } #[test] fn test_chacha_set_counter() { let seed = [0u8; 32]; let mut rng = ChaChaRng::from_seed(seed); rng.set_counter(0, 2u64 << 56); let mut results = [0u32; 16]; for i in results.iter_mut() { *i = rng.next_u32(); } let expected = [0x374dc6c2, 0x3736d58c, 0xb904e24a, 0xcd3f93ef, 0x88228b1a, 0x96a4dfb3, 0x5b76ab72, 0xc727ee54, 0x0e0e978a, 0xf3145c95, 0x1b748ea8, 0xf786c297, 0x99c28f5f, 0x628314e8, 0x398a19fa, 0x6ded1b53]; assert_eq!(results, expected); } #[test] fn test_chacha_set_rounds() { let seed = [0u8; 32]; let mut rng = ChaChaRng::from_seed(seed); rng.set_rounds(8); let mut results = [0u32; 16]; for i in results.iter_mut() { *i = rng.next_u32(); } let expected = [0x2fef003e, 0xd6405f89, 0xe8b85b7f, 0xa1a5091f, 0xc30e842c, 0x3b7f9ace, 0x88e11b18, 0x1e1a71ef, 0x72e14c98, 0x416f21b9, 0x6753449f, 0x19566d45, 0xa3424a31, 0x01b086da, 0xb8fd7b38, 0x42fe0c0e]; assert_eq!(results, expected); } #[test] fn test_chacha_clone() { let seed = [0,0,0,0, 1,0,0,0, 2,0,0,0, 3,0,0,0, 4,0,0,0, 5,0,0,0, 6,0,0,0, 7,0,0,0]; let mut rng = ChaChaRng::from_seed(seed); let mut clone = rng.clone(); for _ in 0..16 { assert_eq!(rng.next_u64(), clone.next_u64()); } } }
use core::fmt; use rand_core::{BlockRngCore, CryptoRng, RngCore, SeedableRng, Error, le}; use rand_core::impls::BlockRng; const SEED_WORDS: usize = 8; const STATE_WORDS: usize = 16; #[derive(Clone, Debug)] pub struct ChaChaRng(BlockRng<ChaChaCore>); impl RngCore for ChaChaRng { #[inline] fn next_u32(&mut self) -> u32 { self.0.next_u32() } #[inline] fn next_u64(&mut self) -> u64 { self.0.next_u64() } #[inline] fn fill_bytes(&mut self, dest: &mut [u8]) { self.0.fill_bytes(dest) } #[inline] fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.0.try_fill_bytes(dest) } } impl SeedableRng for ChaChaRng { type Seed = <ChaChaCore as SeedableRng>::Seed; fn from_seed(seed: Self::Seed) -> Self { ChaChaRng(BlockRng::<ChaChaCore>::from_seed(seed)) } fn from_rng<R: RngCore>(rng: R) -> Result<Self, Error> { BlockRng::<ChaChaCore>::from_rng(rng).map(ChaChaRng) } } impl CryptoRng for ChaChaRng {} impl ChaChaRng { #[deprecated(since="0.5.0", note="use the NewRng or SeedableRng trait")] pub fn new_unseeded() -> ChaChaRng { ChaChaRng::from_seed([0; SEED_WORDS*4]) } pub fn set_counter(&mut self, counter_low: u64, counter_high: u64) { self.0.inner_mut().set_counter(counter_low, counter_high); self.0.reset(); } pub fn set_rounds(&mut self, rounds: usize) { self.0.inner_mut().set_rounds(rounds); self.0.reset(); } } #[derive(Clone)] pub struct ChaChaCore { state: [u32; STATE_WORDS], rounds: usize, } impl fmt::Debug for ChaChaCore { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ChaChaCore {{}}") } } macro_rules! quarter_round{ ($a: expr, $b: expr, $c: expr, $d: expr) => {{ $a = $a.wrapping_add($b); $d ^= $a; $d = $d.rotate_left(16); $c = $c.wrapping_add($d); $b ^= $c; $b = $b.rotate_left(12); $a = $a.wrapping_add($b); $d ^= $a; $d = $d.rotate_left( 8); $c = $c.wrapping_add($d); $b ^= $c; $b = $b.rotate_left( 7); }} } macro_rules! double_round{ ($x: expr) => {{ quarter_round!($x[ 0], $x[ 4], $x[ 8], $x[12]); quarter_round!($x[ 1], $x[ 5], $x[ 9], $x[13]); quarter_round!($x[ 2], $x[ 6], $x[10], $x[14]); quarter_round!($x[ 3], $x[ 7], $x[11], $x[15]); quarter_round!($x[ 0], $x[ 5], $x[10], $x[15]); quarter_round!($x[ 1], $x[ 6], $x[11], $x[12]); quarter_round!($x[ 2], $x[ 7], $x[ 8], $x[13]); quarter_round!($x[ 3], $x[ 4], $x[ 9], $x[14]); }} } impl BlockRngCore for ChaChaCore { type Item = u32; type Results = [u32; STATE_WORDS]; fn generate(&mut self, results: &mut Self::Results) { fn core(results: &mut [u32; STATE_WORDS], state: &[u32; STATE_WORDS], rounds: usize) { let mut tmp = *state; for _ in 0..rounds / 2 { double_round!(tmp); } for i in 0..STATE_WORDS { results[i] = tmp[i].wrapping_add(state[i]); } } core(results, &self.state, self.rounds); self.state[12] = self.state[12].wrapping_add(1); if self.state[12] != 0 { return; }; self.state[13] = self.state[13].wrapping_add(1); if self.state[13] != 0 { return; }; self.state[14] = self.state[14].wrapping_add(1); if self.state[14] != 0 { return; }; self.state[15] = self.state[15].wrapping_add(1); } } impl ChaChaCore { pub fn set_counter(&mut self, counter_low: u64, counter_high: u64) { self.state[12] = counter_low as u32; self.state[13] = (counter_low >> 32) as u32; self.state[14] = counter_high as u32; self.state[15] = (counter_high >> 32) as u32; } pub fn set_rounds(&mut self, rounds: usize) { assert!([4usize, 8, 12, 16, 20].iter().any(|x| *x == rounds)); self.rounds = rounds; } } impl SeedableRng for ChaChaCore { type Seed = [u8; SEED_WORDS*4]; fn from_seed(seed: Self::Seed) -> Self { let mut seed_le = [0u32; SEED_WORDS]; le::read_u32_into(&seed, &mut seed_le); Self { state: [0x61707865, 0x3320646E, 0x79622D32, 0x6B206574, seed_le[0], seed_le[1], seed_le[2], seed_le[3], seed_le[4], seed_le[5], seed_le[6], seed_le[7], 0, 0, 0, 0], rounds: 20, } } } impl CryptoRng for ChaChaCore {} #[cfg(test)] mod test { use {RngCore, SeedableRng}; use super::ChaChaRng; #[test] fn test_chacha_construction() { let seed = [0,0,0,0,0,0,0,0, 1,0,0,0,0,0,0,0, 2,0,0,0,0,0,0,0, 3,0,0,0,0,0,0,0]; let mut rng1 = ChaChaRng::from_seed(seed); assert_eq!(rng1.next_u32(), 137206642); let mut rng2 = ChaChaRng::from_rng(rng1).unwrap(); assert_eq!(rng2.next_u32(), 1325750369); } #[test] fn test_chacha_true_values_a() { let seed = [0u8; 32]; let mut rng = ChaChaRng::from_seed(seed); let mut results = [0u32; 16]; for i in results.iter_mut() { *i = rng.next_u32(); } let expected = [0xade0b876, 0x903df1a0, 0xe56a5d40, 0x28bd8653, 0xb819d2bd, 0x1aed8da0, 0xccef36a8, 0xc70d778b, 0x7c5941da, 0x8d485751, 0x3fe02477, 0x374ad8b8, 0xf4b8436a, 0x1ca11815, 0x69b687c3, 0x8665eeb2]; assert_eq!(results, expected); for i in results.iter_mut() { *i = rng.next_u32(); } let expected = [0xbee7079f, 0x7a385155, 0x7c97ba98, 0x0d082d73, 0xa0290fcb, 0x6965e348, 0x3e53c612, 0xed7aee32, 0x7621b729, 0x434ee69c, 0xb03371d5, 0xd539d874, 0x281fed31, 0x45fb0a51, 0x1f0ae1ac, 0x6f4d794b]; assert_eq!(results, expected); } #[test] fn test_chacha_true_values_b() { let seed = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]; let mut rng = ChaChaRng::from_seed(seed); for _ in 0..16 { rng.next_u32(); } let mut results = [0u32; 16]; for i in results.iter_mut() { *i = rng.next_u32(); } let expected = [0x2452eb3a, 0x9249f8ec, 0x8d829d9b, 0xddd4ceb1, 0xe8252083, 0x60818b01, 0xf38422b8, 0x5aaa49c9, 0xbb00ca8e, 0xda3ba7b4, 0xc4b592d1, 0xfdf2732f, 0x4436274e, 0x2561b3c8, 0xebdd4aa6, 0xa0136c00]; assert_eq!(results, expected); } #[test] fn test_chacha_true_values_c() { let seed = [0, 0xff, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; let expected = [0xfb4dd572, 0x4bc42ef1, 0xdf922636, 0x327f1394, 0xa78dea8f, 0x5e269039, 0xa1bebbc1, 0xcaf09aae, 0xa25ab213, 0x48a6b46c, 0x1b9d9bcb, 0x092c5be6, 0x546ca624, 0x1bec45d5, 0x87f47473, 0x96f0992e]; let mut results = [0u32; 16]; let mut rng1 = ChaChaRng::from_seed(seed); for _ in 0..32 { rng1.next_u32(); } for i in results.iter_mut() { *i = rng1.next_u32(); } assert_eq!(results, expected); let mut rng2 = ChaChaRng::from_seed(seed); rng2.set_counter(2, 0); for i in results.iter_mut() { *i = rng2.next_u32(); } assert_eq!(results, expected); } #[test] fn test_chacha_multiple_blocks() { let seed = [0,0,0,0, 1,0,0,0, 2,0,0,0, 3,0,0,0, 4,0,0,0, 5,0,0,0, 6,0,0,0, 7,0,0,0]; let mut rng = ChaChaRng::from_seed(seed); let mut results = [0u32; 16]; for i in results.iter_mut() { *i = rng.next_u32(); for _ in 0..16 { rng.next_u32(); } } let expected = [0xf225c81a, 0x6ab1be57, 0x04d42951, 0x70858036, 0x49884684, 0x64efec72, 0x4be2d186, 0x3615b384, 0x11cfa18e, 0xd3c50049, 0x75c775f6, 0x434c6530, 0x2c5bad8f, 0x898881dc, 0x5f1c86d9, 0xc1f8e7f4]; assert_eq!(results, expected); } #[test]
#[test] fn test_chacha_set_counter() { let seed = [0u8; 32]; let mut rng = ChaChaRng::from_seed(seed); rng.set_counter(0, 2u64 << 56); let mut results = [0u32; 16]; for i in results.iter_mut() { *i = rng.next_u32(); } let expected = [0x374dc6c2, 0x3736d58c, 0xb904e24a, 0xcd3f93ef, 0x88228b1a, 0x96a4dfb3, 0x5b76ab72, 0xc727ee54, 0x0e0e978a, 0xf3145c95, 0x1b748ea8, 0xf786c297, 0x99c28f5f, 0x628314e8, 0x398a19fa, 0x6ded1b53]; assert_eq!(results, expected); } #[test] fn test_chacha_set_rounds() { let seed = [0u8; 32]; let mut rng = ChaChaRng::from_seed(seed); rng.set_rounds(8); let mut results = [0u32; 16]; for i in results.iter_mut() { *i = rng.next_u32(); } let expected = [0x2fef003e, 0xd6405f89, 0xe8b85b7f, 0xa1a5091f, 0xc30e842c, 0x3b7f9ace, 0x88e11b18, 0x1e1a71ef, 0x72e14c98, 0x416f21b9, 0x6753449f, 0x19566d45, 0xa3424a31, 0x01b086da, 0xb8fd7b38, 0x42fe0c0e]; assert_eq!(results, expected); } #[test] fn test_chacha_clone() { let seed = [0,0,0,0, 1,0,0,0, 2,0,0,0, 3,0,0,0, 4,0,0,0, 5,0,0,0, 6,0,0,0, 7,0,0,0]; let mut rng = ChaChaRng::from_seed(seed); let mut clone = rng.clone(); for _ in 0..16 { assert_eq!(rng.next_u64(), clone.next_u64()); } } }
fn test_chacha_true_bytes() { let seed = [0u8; 32]; let mut rng = ChaChaRng::from_seed(seed); let mut results = [0u8; 32]; rng.fill_bytes(&mut results); let expected = [118, 184, 224, 173, 160, 241, 61, 144, 64, 93, 106, 229, 83, 134, 189, 40, 189, 210, 25, 184, 160, 141, 237, 26, 168, 54, 239, 204, 139, 119, 13, 199]; assert_eq!(results, expected); }
function_block-full_function
[ { "content": "/// Implement `fill_bytes` via `next_u64`, little-endian order.\n\npub fn fill_bytes_via_u64<R: RngCore + ?Sized>(rng: &mut R, dest: &mut [u8]) {\n\n fill_bytes_via!(rng, next_u64, 8, dest)\n\n}\n\n\n\nmacro_rules! impl_uint_from_fill {\n\n ($rng:expr, $ty:ty, $N:expr) => ({\n\n debug_assert!($N == size_of::<$ty>());\n\n\n\n let mut int: $ty = 0;\n\n unsafe {\n\n let ptr = &mut int as *mut $ty as *mut u8;\n\n let slice = slice::from_raw_parts_mut(ptr, $N);\n\n $rng.fill_bytes(slice);\n\n }\n\n int\n\n });\n\n}\n\n\n\nmacro_rules! fill_via_chunks {\n\n ($src:expr, $dst:expr, $ty:ty, $size:expr) => ({\n", "file_path": "rand_core/src/impls.rs", "rank": 0, "score": 306034.65432517696 }, { "content": "/// Implement `fill_bytes` via `next_u32`, little-endian order.\n\npub fn fill_bytes_via_u32<R: RngCore + ?Sized>(rng: &mut R, dest: &mut [u8]) {\n\n fill_bytes_via!(rng, next_u32, 4, dest)\n\n}\n\n\n", "file_path": "rand_core/src/impls.rs", "rank": 1, "score": 306021.4058850302 }, { "content": "/// Implement `next_u64` via `next_u32`, little-endian order.\n\npub fn next_u64_via_u32<R: RngCore + ?Sized>(rng: &mut R) -> u64 {\n\n // Use LE; we explicitly generate one value before the next.\n\n let x = u64::from(rng.next_u32());\n\n let y = u64::from(rng.next_u32());\n\n (y << 32) | x\n\n}\n\n\n\nmacro_rules! fill_bytes_via {\n\n ($rng:ident, $next_u:ident, $BYTES:expr, $dest:ident) => {{\n\n let mut left = $dest;\n\n while left.len() >= $BYTES {\n\n let (l, r) = {left}.split_at_mut($BYTES);\n\n left = r;\n\n let chunk: [u8; $BYTES] = unsafe {\n\n transmute($rng.$next_u().to_le())\n\n };\n\n l.copy_from_slice(&chunk);\n\n }\n\n let n = left.len();\n\n if n > 0 {\n\n let chunk: [u8; $BYTES] = unsafe {\n\n transmute($rng.$next_u().to_le())\n\n };\n\n left.copy_from_slice(&chunk[..n]);\n\n }\n\n }}\n\n}\n\n\n", "file_path": "rand_core/src/impls.rs", "rank": 2, "score": 289019.312618523 }, { "content": "/// Implement `next_u64` via `fill_bytes`, little-endian order.\n\npub fn next_u64_via_fill<R: RngCore + ?Sized>(rng: &mut R) -> u64 {\n\n impl_uint_from_fill!(rng, u64, 8)\n\n}\n\n\n\n/// Wrapper around PRNGs that implement [`BlockRngCore`] to keep a results\n\n/// buffer and offer the methods from [`RngCore`].\n\n///\n\n/// `BlockRng` has heavily optimized implementations of the [`RngCore`] methods\n\n/// reading values from the results buffer, as well as\n\n/// calling `BlockRngCore::generate` directly on the output array when\n\n/// `fill_bytes` / `try_fill_bytes` is called on a large array. These methods\n\n/// also handle the bookkeeping of when to generate a new batch of values.\n\n/// No generated values are ever thown away.\n\n///\n\n/// Currently `BlockRng` only implements `RngCore` for buffers which are slices\n\n/// of `u32` elements; this may be extended to other types in the future.\n\n///\n\n/// For easy initialization `BlockRng` also implements [`SeedableRng`].\n\n///\n\n/// [`BlockRngCore`]: ../BlockRngCore.t.html\n", "file_path": "rand_core/src/impls.rs", "rank": 3, "score": 269128.5250346506 }, { "content": "/// Implement `next_u32` via `fill_bytes`, little-endian order.\n\npub fn next_u32_via_fill<R: RngCore + ?Sized>(rng: &mut R) -> u32 {\n\n impl_uint_from_fill!(rng, u32, 4)\n\n}\n\n\n", "file_path": "rand_core/src/impls.rs", "rank": 4, "score": 269016.7892652724 }, { "content": "/// Implement `fill_bytes` by reading chunks from the output buffer of a block\n\n/// based RNG.\n\n///\n\n/// The return values are `(consumed_u64, filled_u8)`.\n\n/// `filled_u8` is the number of filled bytes in `dest`, which may be less than\n\n/// the length of `dest`.\n\n/// `consumed_u64` is the number of words consumed from `src`, which is the same\n\n/// as `filled_u8 / 8` rounded up.\n\n///\n\n/// See `fill_via_u32_chunks` for an example.\n\npub fn fill_via_u64_chunks(src: &[u64], dest: &mut [u8]) -> (usize, usize) {\n\n fill_via_chunks!(src, dest, u64, 8)\n\n}\n\n\n", "file_path": "rand_core/src/impls.rs", "rank": 5, "score": 264012.9161006659 }, { "content": "/// Implement `fill_bytes` by reading chunks from the output buffer of a block\n\n/// based RNG.\n\n///\n\n/// The return values are `(consumed_u32, filled_u8)`.\n\n///\n\n/// `filled_u8` is the number of filled bytes in `dest`, which may be less than\n\n/// the length of `dest`.\n\n/// `consumed_u32` is the number of words consumed from `src`, which is the same\n\n/// as `filled_u8 / 4` rounded up.\n\n///\n\n/// # Example\n\n/// (from `IsaacRng`)\n\n///\n\n/// ```rust,ignore\n\n/// fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n/// let mut read_len = 0;\n\n/// while read_len < dest.len() {\n\n/// if self.index >= self.rsl.len() {\n\n/// self.isaac();\n\n/// }\n\n///\n\n/// let (consumed_u32, filled_u8) =\n\n/// impls::fill_via_u32_chunks(&mut self.rsl[self.index..],\n\n/// &mut dest[read_len..]);\n\n///\n\n/// self.index += consumed_u32;\n\n/// read_len += filled_u8;\n\n/// }\n\n/// }\n\n/// ```\n\npub fn fill_via_u32_chunks(src: &[u32], dest: &mut [u8]) -> (usize, usize) {\n\n fill_via_chunks!(src, dest, u32, 4)\n\n}\n\n\n", "file_path": "rand_core/src/impls.rs", "rank": 6, "score": 263913.3341893796 }, { "content": "#[inline]\n\npub fn read_u64_into(src: &[u8], dst: &mut [u64]) {\n\n read_slice!(src, dst, 8, to_le);\n\n}\n\n\n", "file_path": "rand_core/src/le.rs", "rank": 7, "score": 226598.13142919575 }, { "content": "#[inline]\n\npub fn read_u32_into(src: &[u8], dst: &mut [u32]) {\n\n read_slice!(src, dst, 4, to_le);\n\n}\n\n\n\n/// Reads unsigned 64 bit integers from `src` into `dst`.\n\n/// Borrowed from the `byteorder` crate.\n", "file_path": "rand_core/src/le.rs", "rank": 8, "score": 226470.78717731434 }, { "content": "// Returns the door the game host opens given our choice and knowledge of\n\n// where the car is. The game host will never open the door with the car.\n\nfn game_host_open<R: Rng>(car: u32, choice: u32, rng: &mut R) -> u32 {\n\n let choices = free_doors(&[car, choice]);\n\n rand::seq::sample_slice(rng, &choices, 1)[0]\n\n}\n\n\n", "file_path": "examples/monty-hall.rs", "rank": 9, "score": 224318.92166921875 }, { "content": "/// Randomly sample exactly `amount` indices from `0..length`.\n\n///\n\n/// The values are non-repeating and in random order.\n\n///\n\n/// This implementation uses `O(amount)` time and memory.\n\n///\n\n/// This method is used internally by the slice sampling methods, but it can sometimes be useful to\n\n/// have the indices themselves so this is provided as an alternative.\n\n///\n\n/// Panics if `amount > length`\n\npub fn sample_indices<R>(rng: &mut R, length: usize, amount: usize) -> Vec<usize>\n\n where R: Rng + ?Sized,\n\n{\n\n if amount > length {\n\n panic!(\"`amount` must be less than or equal to `slice.len()`\");\n\n }\n\n\n\n // We are going to have to allocate at least `amount` for the output no matter what. However,\n\n // if we use the `cached` version we will have to allocate `amount` as a HashMap as well since\n\n // it inserts an element for every loop.\n\n //\n\n // Therefore, if `amount >= length / 2` then inplace will be both faster and use less memory.\n\n // In fact, benchmarks show the inplace version is faster for length up to about 20 times\n\n // faster than amount.\n\n //\n\n // TODO: there is probably even more fine-tuning that can be done here since\n\n // `HashMap::with_capacity(amount)` probably allocates more than `amount` in practice,\n\n // and a trade off could probably be made between memory/cpu, since hashmap operations\n\n // are slower than array index swapping.\n\n if amount >= length / 20 {\n\n sample_indices_inplace(rng, length, amount)\n\n } else {\n\n sample_indices_cache(rng, length, amount)\n\n }\n\n}\n\n\n", "file_path": "src/seq.rs", "rank": 10, "score": 215989.4181953139 }, { "content": "// Run a single simulation of the Monty Hall problem.\n\nfn simulate<R: Rng>(random_door: &Range<u32>, rng: &mut R)\n\n -> SimulationResult {\n\n let car = random_door.sample(rng);\n\n\n\n // This is our initial choice\n\n let mut choice = random_door.sample(rng);\n\n\n\n // The game host opens a door\n\n let open = game_host_open(car, choice, rng);\n\n\n\n // Shall we switch?\n\n let switch = rng.gen();\n\n if switch {\n\n choice = switch_door(choice, open);\n\n }\n\n\n\n SimulationResult { win: choice == car, switch }\n\n}\n\n\n", "file_path": "examples/monty-hall.rs", "rank": 11, "score": 214940.25702038966 }, { "content": "/// Randomly sample `amount` elements from a finite iterator.\n\n///\n\n/// The following can be returned:\n\n///\n\n/// - `Ok`: `Vec` of `amount` non-repeating randomly sampled elements. The order is not random.\n\n/// - `Err`: `Vec` of all the elements from `iterable` in sequential order. This happens when the\n\n/// length of `iterable` was less than `amount`. This is considered an error since exactly\n\n/// `amount` elements is typically expected.\n\n///\n\n/// This implementation uses `O(len(iterable))` time and `O(amount)` memory.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use rand::{thread_rng, seq};\n\n///\n\n/// let mut rng = thread_rng();\n\n/// let sample = seq::sample_iter(&mut rng, 1..100, 5).unwrap();\n\n/// println!(\"{:?}\", sample);\n\n/// ```\n\npub fn sample_iter<T, I, R>(rng: &mut R, iterable: I, amount: usize) -> Result<Vec<T>, Vec<T>>\n\n where I: IntoIterator<Item=T>,\n\n R: Rng + ?Sized,\n\n{\n\n let mut iter = iterable.into_iter();\n\n let mut reservoir = Vec::with_capacity(amount);\n\n reservoir.extend(iter.by_ref().take(amount));\n\n\n\n // Continue unless the iterator was exhausted\n\n //\n\n // note: this prevents iterators that \"restart\" from causing problems.\n\n // If the iterator stops once, then so do we.\n\n if reservoir.len() == amount {\n\n for (i, elem) in iter.enumerate() {\n\n let k = rng.gen_range(0, i + 1 + amount);\n\n if let Some(spot) = reservoir.get_mut(k) {\n\n *spot = elem;\n\n }\n\n }\n\n Ok(reservoir)\n\n } else {\n\n // Don't hang onto extra memory. There is a corner case where\n\n // `amount` was much less than `len(iterable)`.\n\n reservoir.shrink_to_fit();\n\n Err(reservoir)\n\n }\n\n}\n\n\n", "file_path": "src/seq.rs", "rank": 12, "score": 203313.36015100806 }, { "content": "#[cfg(feature=\"std\")]\n\n#[inline(always)]\n\n#[deprecated(since=\"0.4.0\", note=\"renamed to seq::sample_iter\")]\n\npub fn sample<T, I, R>(rng: &mut R, iterable: I, amount: usize) -> Vec<T>\n\n where I: IntoIterator<Item=T>,\n\n R: Rng,\n\n{\n\n // the legacy sample didn't care whether amount was met\n\n seq::sample_iter(rng, iterable, amount)\n\n .unwrap_or_else(|e| e)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use mock::StepRng;\n\n use super::*;\n\n #[cfg(all(not(feature=\"std\"), feature=\"alloc\"))] use alloc::boxed::Box;\n\n\n\n pub struct TestRng<R> { inner: R }\n\n\n\n impl<R: RngCore> RngCore for TestRng<R> {\n\n fn next_u32(&mut self) -> u32 {\n\n self.inner.next_u32()\n", "file_path": "src/lib.rs", "rank": 13, "score": 197806.14826991872 }, { "content": "/// Randomly sample exactly `amount` values from `slice`.\n\n///\n\n/// The values are non-repeating and in random order.\n\n///\n\n/// This implementation uses `O(amount)` time and memory.\n\n///\n\n/// Panics if `amount > slice.len()`\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use rand::{thread_rng, seq};\n\n///\n\n/// let mut rng = thread_rng();\n\n/// let values = vec![5, 6, 1, 3, 4, 6, 7];\n\n/// println!(\"{:?}\", seq::sample_slice(&mut rng, &values, 3));\n\n/// ```\n\npub fn sample_slice<R, T>(rng: &mut R, slice: &[T], amount: usize) -> Vec<T>\n\n where R: Rng + ?Sized,\n\n T: Clone\n\n{\n\n let indices = sample_indices(rng, slice.len(), amount);\n\n\n\n let mut out = Vec::with_capacity(amount);\n\n out.extend(indices.iter().map(|i| slice[*i].clone()));\n\n out\n\n}\n\n\n", "file_path": "src/seq.rs", "rank": 14, "score": 191636.74069779765 }, { "content": "/// Sample an amount of indices using an inplace partial fisher yates method.\n\n///\n\n/// This allocates the entire `length` of indices and randomizes only the first `amount`.\n\n/// It then truncates to `amount` and returns.\n\n///\n\n/// This is better than using a `HashMap` \"cache\" when `amount >= length / 2`\n\n/// since it does not require allocating an extra cache and is much faster.\n\nfn sample_indices_inplace<R>(rng: &mut R, length: usize, amount: usize) -> Vec<usize>\n\n where R: Rng + ?Sized,\n\n{\n\n debug_assert!(amount <= length);\n\n let mut indices: Vec<usize> = Vec::with_capacity(length);\n\n indices.extend(0..length);\n\n for i in 0..amount {\n\n let j: usize = rng.gen_range(i, length);\n\n indices.swap(i, j);\n\n }\n\n indices.truncate(amount);\n\n debug_assert_eq!(indices.len(), amount);\n\n indices\n\n}\n\n\n\n\n", "file_path": "src/seq.rs", "rank": 15, "score": 186708.285371516 }, { "content": "/// An automatically-implemented extension trait on [`RngCore`] providing high-level\n\n/// generic methods for sampling values and other convenience methods.\n\n/// \n\n/// This is the primary trait to use when generating random values.\n\n/// \n\n/// # Generic usage\n\n/// \n\n/// The basic pattern is `fn foo<R: Rng + ?Sized>(rng: &mut R)`. Some\n\n/// things are worth noting here:\n\n/// \n\n/// - Since `Rng: RngCore` and every `RngCore` implements `Rng`, it makes no\n\n/// difference whether we use `R: Rng` or `R: RngCore`.\n\n/// - The `+ ?Sized` un-bounding allows functions to be called directly on\n\n/// type-erased references; i.e. `foo(r)` where `r: &mut RngCore`. Without\n\n/// this it would be necessary to write `foo(&mut r)`.\n\n/// \n\n/// An alternative pattern is possible: `fn foo<R: Rng>(rng: R)`. This has some\n\n/// trade-offs. It allows the argument to be consumed directly without a `&mut`\n\n/// (which is how `from_rng(thread_rng())` works); also it still works directly\n\n/// on references (including type-erased references). Unfortunately within the\n\n/// function `foo` it is not known whether `rng` is a reference type or not,\n\n/// hence many uses of `rng` require an extra reference, either explicitly\n\n/// (`distr.sample(&mut rng)`) or implicitly (`rng.gen()`); one may hope the\n\n/// optimiser can remove redundant references later.\n\n/// \n\n/// Example:\n\n/// \n\n/// ```rust\n\n/// # use rand::thread_rng;\n\n/// use rand::Rng;\n\n/// \n\n/// fn foo<R: Rng + ?Sized>(rng: &mut R) -> f32 {\n\n/// rng.gen()\n\n/// }\n\n///\n\n/// # let v = foo(&mut thread_rng());\n\n/// ```\n\n/// \n\n/// [`RngCore`]: trait.RngCore.html\n\npub trait Rng: RngCore {\n\n /// Return a random value supporting the [`Standard`] distribution.\n\n ///\n\n /// [`Standard`]: distributions/struct.Standard.html\n\n ///\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// use rand::{thread_rng, Rng};\n\n ///\n\n /// let mut rng = thread_rng();\n\n /// let x: u32 = rng.gen();\n\n /// println!(\"{}\", x);\n\n /// println!(\"{:?}\", rng.gen::<(f64, bool)>());\n\n /// ```\n\n #[inline(always)]\n\n fn gen<T>(&mut self) -> T where Standard: Distribution<T> {\n\n Standard.sample(self)\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 16, "score": 186032.03016218595 }, { "content": "#[cfg(feature=\"std\")]\n\npub trait NewRng: SeedableRng {\n\n /// Creates a new instance, automatically seeded with fresh entropy.\n\n ///\n\n /// Normally this will use `OsRng`, but if that fails `JitterRng` will be\n\n /// used instead. Both should be suitable for cryptography. It is possible\n\n /// that both entropy sources will fail though unlikely; failures would\n\n /// almost certainly be platform limitations or build issues, i.e. most\n\n /// applications targetting PC/mobile platforms should not need to worry\n\n /// about this failing.\n\n ///\n\n /// If all entropy sources fail this will panic. If you need to handle\n\n /// errors, use the following code, equivalent aside from error handling:\n\n ///\n\n /// ```rust\n\n /// # use rand::Error;\n\n /// use rand::{Rng, StdRng, EntropyRng, SeedableRng};\n\n ///\n\n /// # fn try_inner() -> Result<(), Error> {\n\n /// // This uses StdRng, but is valid for any R: SeedableRng\n\n /// let mut rng = StdRng::from_rng(EntropyRng::new())?;\n", "file_path": "src/lib.rs", "rank": 17, "score": 182793.20550830226 }, { "content": "/// Randomly sample exactly `amount` references from `slice`.\n\n///\n\n/// The references are non-repeating and in random order.\n\n///\n\n/// This implementation uses `O(amount)` time and memory.\n\n///\n\n/// Panics if `amount > slice.len()`\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use rand::{thread_rng, seq};\n\n///\n\n/// let mut rng = thread_rng();\n\n/// let values = vec![5, 6, 1, 3, 4, 6, 7];\n\n/// println!(\"{:?}\", seq::sample_slice_ref(&mut rng, &values, 3));\n\n/// ```\n\npub fn sample_slice_ref<'a, R, T>(rng: &mut R, slice: &'a [T], amount: usize) -> Vec<&'a T>\n\n where R: Rng + ?Sized\n\n{\n\n let indices = sample_indices(rng, slice.len(), amount);\n\n\n\n let mut out = Vec::with_capacity(amount);\n\n out.extend(indices.iter().map(|i| &slice[*i]));\n\n out\n\n}\n\n\n", "file_path": "src/seq.rs", "rank": 18, "score": 180509.69103357883 }, { "content": "/// A marker trait used to indicate that an [`RngCore`] or [`BlockRngCore`]\n\n/// implementation is supposed to be cryptographically secure.\n\n/// \n\n/// *Cryptographically secure generators*, also known as *CSPRNGs*, should\n\n/// satisfy an additional properties over other generators: given the first\n\n/// *k* bits of an algorithm's output\n\n/// sequence, it should not be possible using polynomial-time algorithms to\n\n/// predict the next bit with probability significantly greater than 50%.\n\n/// \n\n/// Some generators may satisfy an additional property, however this is not\n\n/// required by this trait: if the CSPRNG's state is revealed, it should not be\n\n/// computationally-feasible to reconstruct output prior to this. Some other\n\n/// generators allow backwards-computation and are consided *reversible*.\n\n/// \n\n/// Note that this trait is provided for guidance only and cannot guarantee\n\n/// suitability for cryptographic applications. In general it should only be\n\n/// implemented for well-reviewed code implementing well-regarded algorithms.\n\n/// \n\n/// Note also that use of a `CryptoRng` does not protect against other\n\n/// weaknesses such as seeding from a weak entropy source or leaking state.\n\n/// \n\n/// [`RngCore`]: trait.RngCore.html\n\n/// [`BlockRngCore`]: trait.BlockRngCore.html\n\npub trait CryptoRng {}\n\n\n", "file_path": "rand_core/src/lib.rs", "rank": 19, "score": 156824.81012565384 }, { "content": "/// The core of a random number generator.\n\n/// \n\n/// This trait encapsulates the low-level functionality common to all\n\n/// generators, and is the \"back end\", to be implemented by generators.\n\n/// End users should normally use [`Rng`] from the [rand] crate, which is\n\n/// automatically implemented for every type implementing `RngCore`.\n\n/// \n\n/// Three different methods for generating random data are provided since the\n\n/// optimal implementation of each is dependent on the type of generator. There\n\n/// is no required relationship between the output of each; e.g. many\n\n/// implementations of [`fill_bytes`] consume a whole number of `u32` or `u64`\n\n/// values and drop any remaining unused bytes.\n\n/// \n\n/// The [`try_fill_bytes`] method is a variant of [`fill_bytes`] allowing error\n\n/// handling; it is not deemed sufficiently useful to add equivalents for\n\n/// [`next_u32`] or [`next_u64`] since the latter methods are almost always used\n\n/// with algorithmic generators (PRNGs), which are normally infallible.\n\n/// \n\n/// Algorithmic generators implementing [`SeedableRng`] should normally have\n\n/// *portable, reproducible* output, i.e. fix Endianness when converting values\n\n/// to avoid platform differences, and avoid making any changes which affect\n\n/// output (except by communicating that the release has breaking changes).\n\n/// \n\n/// Typically implementators will implement only one of the methods available\n\n/// in this trait directly, then use the helper functions from the\n\n/// [`rand_core::impls`] module to implement the other methods.\n\n/// \n\n/// It is recommended that implementations also implement:\n\n/// \n\n/// - `Debug` with a custom implementation which *does not* print any internal\n\n/// state (at least, [`CryptoRng`]s should not risk leaking state through Debug)\n\n/// - `Serialize` and `Deserialize` (from Serde), preferably making Serde\n\n/// support optional at the crate level in PRNG libs\n\n/// - `Clone` if, and only if, the clone will have identical output to the\n\n/// original (i.e. all deterministic PRNGs but not external generators)\n\n/// - *never* implement `Copy` (accidental copies may cause repeated values)\n\n/// - also *do not* implement `Default`, but instead implement `SeedableRng`\n\n/// thus allowing use of `rand::NewRng` (which is automatically implemented)\n\n/// - `Eq` and `PartialEq` could be implemented, but are probably not useful\n\n/// \n\n/// # Example\n\n/// \n\n/// A simple example, obviously not generating very *random* output:\n\n/// \n\n/// ```rust\n\n/// use rand_core::{RngCore, Error, impls};\n\n/// \n\n/// struct CountingRng(u64);\n\n/// \n\n/// impl RngCore for CountingRng {\n\n/// fn next_u32(&mut self) -> u32 {\n\n/// self.next_u64() as u32\n\n/// }\n\n/// \n\n/// fn next_u64(&mut self) -> u64 {\n\n/// self.0 += 1;\n\n/// self.0\n\n/// }\n\n/// \n\n/// fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n/// impls::fill_bytes_via_u64(self, dest)\n\n/// }\n\n/// \n\n/// fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {\n\n/// Ok(self.fill_bytes(dest))\n\n/// }\n\n/// }\n\n/// ```\n\n/// \n\n/// [rand]: https://crates.io/crates/rand\n\n/// [`Rng`]: ../rand/trait.Rng.html\n\n/// [`SeedableRng`]: trait.SeedableRng.html\n\n/// [`rand_core::impls`]: ../rand_core/impls/index.html\n\n/// [`try_fill_bytes`]: trait.RngCore.html#tymethod.try_fill_bytes\n\n/// [`fill_bytes`]: trait.RngCore.html#tymethod.fill_bytes\n\n/// [`next_u32`]: trait.RngCore.html#tymethod.next_u32\n\n/// [`next_u64`]: trait.RngCore.html#tymethod.next_u64\n\n/// [`CryptoRng`]: trait.CryptoRng.html\n\npub trait RngCore {\n\n /// Return the next random `u32`.\n\n ///\n\n /// RNGs must implement at least one method from this trait directly. In\n\n /// the case this method is not implemented directly, it can be implemented\n\n /// using `self.next_u64() as u32` or\n\n /// [via `fill_bytes`](../rand_core/impls/fn.next_u32_via_fill.html).\n\n fn next_u32(&mut self) -> u32;\n\n\n\n /// Return the next random `u64`.\n\n ///\n\n /// RNGs must implement at least one method from this trait directly. In\n\n /// the case this method is not implemented directly, it can be implemented\n\n /// [via `next_u32`](../rand_core/impls/fn.next_u64_via_u32.html) or\n\n /// [via `fill_bytes`](../rand_core/impls/fn.next_u64_via_fill.html).\n\n fn next_u64(&mut self) -> u64;\n\n\n\n /// Fill `dest` with random data.\n\n ///\n\n /// RNGs must implement at least one method from this trait directly. In\n", "file_path": "rand_core/src/lib.rs", "rank": 20, "score": 156530.43088169408 }, { "content": "#[cfg(feature=\"std\")]\n\n#[inline(always)]\n\nfn ziggurat<R: Rng + ?Sized, P, Z>(\n\n rng: &mut R,\n\n symmetric: bool,\n\n x_tab: ziggurat_tables::ZigTable,\n\n f_tab: ziggurat_tables::ZigTable,\n\n mut pdf: P,\n\n mut zero_case: Z)\n\n -> f64 where P: FnMut(f64) -> f64, Z: FnMut(&mut R, f64) -> f64 {\n\n loop {\n\n // As an optimisation we re-implement the conversion to a f64.\n\n // From the remaining 12 most significant bits we use 8 to construct `i`.\n\n // This saves us generating a whole extra random number, while the added\n\n // precision of using 64 bits for f64 does not buy us much.\n\n let bits = rng.next_u64();\n\n let i = bits as usize & 0xff;\n\n\n\n let u = if symmetric {\n\n // Convert to a value in the range [2,4) and substract to get [-1,1)\n\n // We can't convert to an open range directly, that would require\n\n // substracting `3.0 - EPSILON`, which is not representable.\n", "file_path": "src/distributions/mod.rs", "rank": 21, "score": 153697.92431218177 }, { "content": "/// A random number generator that can be explicitly seeded.\n\n///\n\n/// This trait encapsulates the low-level functionality common to all\n\n/// pseudo-random number generators (PRNGs, or algorithmic generators).\n\n/// \n\n/// The [`rand::NewRng`] trait is automatically implemented for every type\n\n/// implementing `SeedableRng`, providing a convenient `new()` method.\n\n/// \n\n/// [`rand::NewRng`]: ../rand/trait.NewRng.html\n\npub trait SeedableRng: Sized {\n\n /// Seed type, which is restricted to types mutably-dereferencable as `u8`\n\n /// arrays (we recommend `[u8; N]` for some `N`).\n\n ///\n\n /// It is recommended to seed PRNGs with a seed of at least circa 100 bits,\n\n /// which means an array of `[u8; 12]` or greater to avoid picking RNGs with\n\n /// partially overlapping periods.\n\n ///\n\n /// For cryptographic RNG's a seed of 256 bits is recommended, `[u8; 32]`.\n\n type Seed: Sized + Default + AsMut<[u8]>;\n\n\n\n /// Create a new PRNG using the given seed.\n\n ///\n\n /// PRNG implementations are allowed to assume that bits in the seed are\n\n /// well distributed. That means usually that the number of one and zero\n\n /// bits are about equal, and values like 0, 1 and (size - 1) are unlikely.\n\n ///\n\n /// PRNG implementations are recommended to be reproducible. A PRNG seeded\n\n /// using this function with a fixed seed should produce the same sequence\n\n /// of output in the future and on different architectures (with for example\n", "file_path": "rand_core/src/lib.rs", "rank": 22, "score": 146110.4621755206 }, { "content": "#[bench]\n\nfn gen_u64_jitter(b: &mut Bencher) {\n\n let mut rng = JitterRng::new().unwrap();\n\n b.iter(|| {\n\n black_box(rng.gen::<u64>());\n\n });\n\n b.bytes = size_of::<u64>() as u64;\n\n}\n\n\n\nmacro_rules! init_gen {\n\n ($fnn:ident, $gen:ident) => {\n\n #[bench]\n\n fn $fnn(b: &mut Bencher) {\n\n let mut rng = XorShiftRng::new();\n\n b.iter(|| {\n\n let r2 = $gen::from_rng(&mut rng).unwrap();\n\n black_box(r2);\n\n });\n\n }\n\n }\n\n}\n\n\n\ninit_gen!(init_xorshift, XorShiftRng);\n\ninit_gen!(init_hc128, Hc128Rng);\n\ninit_gen!(init_isaac, IsaacRng);\n\ninit_gen!(init_isaac64, Isaac64Rng);\n\ninit_gen!(init_chacha, ChaChaRng);\n\n\n", "file_path": "benches/generators.rs", "rank": 23, "score": 142446.30200120184 }, { "content": "/// Retrieve the lazily-initialized thread-local random number\n\n/// generator, seeded by the system. Intended to be used in method\n\n/// chaining style, e.g. `thread_rng().gen::<i32>()`, or cached locally, e.g.\n\n/// `let mut rng = thread_rng();`.\n\n///\n\n/// `ThreadRng` uses [`ReseedingRng`] wrapping the same PRNG as [`StdRng`],\n\n/// which is reseeded after generating 32 MiB of random data. A single instance\n\n/// is cached per thread and the returned `ThreadRng` is a reference to this\n\n/// instance — hence `ThreadRng` is neither `Send` nor `Sync` but is safe to use\n\n/// within a single thread. This RNG is seeded and reseeded via [`EntropyRng`]\n\n/// as required.\n\n/// \n\n/// Note that the reseeding is done as an extra precaution against entropy\n\n/// leaks and is in theory unnecessary — to predict `thread_rng`'s output, an\n\n/// attacker would have to either determine most of the RNG's seed or internal\n\n/// state, or crack the algorithm used.\n\n/// \n\n/// Like [`StdRng`], `ThreadRng` is a cryptographically secure PRNG. The current\n\n/// algorithm used is [HC-128], which is an array-based PRNG that trades memory\n\n/// usage for better performance. This makes it similar to ISAAC, the algorithm\n\n/// used in `ThreadRng` before rand 0.5.\n\n///\n\n/// [`ReseedingRng`]: reseeding/struct.ReseedingRng.html\n\n/// [`StdRng`]: struct.StdRng.html\n\n/// [`EntropyRng`]: struct.EntropyRng.html\n\n/// [HC-128]: prng/hc128/struct.Hc128Rng.html\n\npub fn thread_rng() -> ThreadRng {\n\n ThreadRng { rng: THREAD_RNG_KEY.with(|t| t.clone()) }\n\n}\n\n\n\nimpl RngCore for ThreadRng {\n\n #[inline(always)]\n\n fn next_u32(&mut self) -> u32 {\n\n unsafe { (*self.rng.get()).next_u32() }\n\n }\n\n\n\n #[inline(always)]\n\n fn next_u64(&mut self) -> u64 {\n\n unsafe { (*self.rng.get()).next_u64() }\n\n }\n\n\n\n fn fill_bytes(&mut self, bytes: &mut [u8]) {\n\n unsafe { (*self.rng.get()).fill_bytes(bytes) }\n\n }\n\n\n\n fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {\n", "file_path": "src/thread_rng.rs", "rank": 24, "score": 139438.07182607183 }, { "content": "/// Trait for casting types to byte slices\n\n/// \n\n/// This is used by the [`fill`] and [`try_fill`] methods.\n\n/// \n\n/// [`fill`]: trait.Rng.html#method.fill\n\n/// [`try_fill`]: trait.Rng.html#method.try_fill\n\npub trait AsByteSliceMut {\n\n /// Return a mutable reference to self as a byte slice\n\n fn as_byte_slice_mut(&mut self) -> &mut [u8];\n\n \n\n /// Call `to_le` on each element (i.e. byte-swap on Big Endian platforms).\n\n fn to_le(&mut self);\n\n}\n\n\n\nimpl AsByteSliceMut for [u8] {\n\n fn as_byte_slice_mut(&mut self) -> &mut [u8] {\n\n self\n\n }\n\n \n\n fn to_le(&mut self) {}\n\n}\n\n\n\nmacro_rules! impl_as_byte_slice {\n\n ($t:ty) => {\n\n impl AsByteSliceMut for [$t] {\n\n fn as_byte_slice_mut(&mut self) -> &mut [u8] {\n", "file_path": "src/lib.rs", "rank": 25, "score": 136885.9195129733 }, { "content": "#[deprecated(since=\"0.5.0\", note=\"removed in favor of SmallRng\")]\n\n#[cfg(feature=\"std\")]\n\npub fn weak_rng() -> XorShiftRng {\n\n XorShiftRng::from_rng(thread_rng()).unwrap_or_else(|err|\n\n panic!(\"weak_rng failed: {:?}\", err))\n\n}\n\n\n\n/// DEPRECATED: use `seq::sample_iter` instead.\n\n///\n\n/// Randomly sample up to `amount` elements from a finite iterator.\n\n/// The order of elements in the sample is not random.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # #![allow(deprecated)]\n\n/// use rand::{thread_rng, sample};\n\n///\n\n/// let mut rng = thread_rng();\n\n/// let sample = sample(&mut rng, 1..100, 5);\n\n/// println!(\"{:?}\", sample);\n\n/// ```\n", "file_path": "src/lib.rs", "rank": 26, "score": 133475.2584464866 }, { "content": "/// Types (distributions) that can be used to create a random instance of `T`.\n\n///\n\n/// All implementations are expected to be immutable; this has the significant\n\n/// advantage of not needing to consider thread safety, and for most\n\n/// distributions efficient state-less sampling algorithms are available.\n\npub trait Distribution<T> {\n\n /// Generate a random value of `T`, using `rng` as the source of randomness.\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> T;\n\n\n\n /// Create an iterator that generates random values of `T`, using `rng` as\n\n /// the source of randomness.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// use rand::thread_rng;\n\n /// use rand::distributions::{Distribution, Alphanumeric, Range, Standard};\n\n ///\n\n /// let mut rng = thread_rng();\n\n ///\n\n /// // Vec of 16 x f32:\n\n /// let v: Vec<f32> = Standard.sample_iter(&mut rng).take(16).collect();\n\n ///\n\n /// // String:\n\n /// let s: String = Alphanumeric.sample_iter(&mut rng).take(7).collect();\n", "file_path": "src/distributions/mod.rs", "rank": 27, "score": 132413.63658626075 }, { "content": "#[deprecated(since=\"0.5.0\", note=\"use Distribution instead\")]\n\npub trait Sample<Support> {\n\n /// Generate a random value of `Support`, using `rng` as the\n\n /// source of randomness.\n\n fn sample<R: Rng>(&mut self, rng: &mut R) -> Support;\n\n}\n\n\n\n/// `Sample`s that do not require keeping track of state.\n\n///\n\n/// Since no state is recorded, each sample is (statistically)\n\n/// independent of all others, assuming the `Rng` used has this\n\n/// property.\n", "file_path": "src/distributions/mod.rs", "rank": 28, "score": 132403.3170187438 }, { "content": "/// A trait for RNGs which do not generate random numbers individually, but in\n\n/// blocks (typically `[u32; N]`). This technique is commonly used by\n\n/// cryptographic RNGs to improve performance.\n\n/// \n\n/// Usage of this trait is optional, but provides two advantages:\n\n/// implementations only need to concern themselves with generation of the\n\n/// block, not the various [`RngCore`] methods (especially [`fill_bytes`], where the\n\n/// optimal implementations are not trivial), and this allows `ReseedingRng` to\n\n/// perform periodic reseeding with very low overhead.\n\n/// \n\n/// # Example\n\n/// \n\n/// ```norun\n\n/// use rand_core::BlockRngCore;\n\n/// use rand_core::impls::BlockRng;\n\n/// \n\n/// struct MyRngCore;\n\n/// \n\n/// impl BlockRngCore for MyRngCore {\n\n/// type Results = [u32; 16];\n\n/// \n\n/// fn generate(&mut self, results: &mut Self::Results) {\n\n/// unimplemented!()\n\n/// }\n\n/// }\n\n/// \n\n/// impl SeedableRng for MyRngCore {\n\n/// type Seed = unimplemented!();\n\n/// fn from_seed(seed: Self::Seed) -> Self {\n\n/// unimplemented!()\n\n/// }\n\n/// }\n\n/// \n\n/// // optionally, also implement CryptoRng for MyRngCore\n\n/// \n\n/// // Final RNG.\n\n/// type MyRng = BlockRng<u32, MyRngCore>;\n\n/// ```\n\n/// \n\n/// [`RngCore`]: trait.RngCore.html\n\n/// [`fill_bytes`]: trait.RngCore.html#tymethod.fill_bytes\n\npub trait BlockRngCore {\n\n /// Results element type, e.g. `u32`.\n\n type Item;\n\n \n\n /// Results type. This is the 'block' an RNG implementing `BlockRngCore`\n\n /// generates, which will usually be an array like `[u32; 16]`.\n\n type Results: AsRef<[Self::Item]> + Default;\n\n\n\n /// Generate a new block of results.\n\n fn generate(&mut self, results: &mut Self::Results);\n\n}\n\n\n", "file_path": "rand_core/src/lib.rs", "rank": 29, "score": 128784.90099457299 }, { "content": "/// Helper trait handling actual range sampling.\n\n///\n\n/// If you want to implement `Range` sampling for your own type, then\n\n/// implement both this trait and `SampleRange`:\n\n///\n\n/// ```rust\n\n/// use rand::{Rng, thread_rng};\n\n/// use rand::distributions::Distribution;\n\n/// use rand::distributions::range::{Range, SampleRange, RangeImpl, RangeFloat};\n\n///\n\n/// #[derive(Clone, Copy, PartialEq, PartialOrd)]\n\n/// struct MyF32(f32);\n\n///\n\n/// #[derive(Clone, Copy, Debug)]\n\n/// struct RangeMyF32 {\n\n/// inner: RangeFloat<f32>,\n\n/// }\n\n/// impl RangeImpl for RangeMyF32 {\n\n/// type X = MyF32;\n\n/// fn new(low: Self::X, high: Self::X) -> Self {\n\n/// RangeMyF32 {\n\n/// inner: RangeFloat::<f32>::new(low.0, high.0),\n\n/// }\n\n/// }\n\n/// fn new_inclusive(low: Self::X, high: Self::X) -> Self {\n\n/// RangeImpl::new(low, high)\n\n/// }\n\n/// fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Self::X {\n\n/// MyF32(self.inner.sample(rng))\n\n/// }\n\n/// }\n\n///\n\n/// impl SampleRange for MyF32 {\n\n/// type Impl = RangeMyF32;\n\n/// }\n\n///\n\n/// let (low, high) = (MyF32(17.0f32), MyF32(22.0f32));\n\n/// let range = Range::new(low, high);\n\n/// let x = range.sample(&mut thread_rng());\n\n/// ```\n\npub trait RangeImpl: Sized {\n\n /// The type sampled by this implementation.\n\n type X: PartialOrd;\n\n\n\n /// Construct self, with inclusive lower bound and exclusive upper bound\n\n /// `[low, high)`.\n\n ///\n\n /// Usually users should not call this directly but instead use\n\n /// `Range::new`, which asserts that `low < high` before calling this.\n\n fn new(low: Self::X, high: Self::X) -> Self;\n\n\n\n /// Construct self, with inclusive bounds `[low, high]`.\n\n ///\n\n /// Usually users should not call this directly but instead use\n\n /// `Range::new_inclusive`, which asserts that `low < high` before calling\n\n /// this.\n\n fn new_inclusive(low: Self::X, high: Self::X) -> Self;\n\n\n\n /// Sample a value.\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Self::X;\n", "file_path": "src/distributions/range.rs", "rank": 30, "score": 128255.23425578969 }, { "content": "#[bench]\n\nfn misc_shuffle_100(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(thread_rng()).unwrap();\n\n let x : &mut [usize] = &mut [1; 100];\n\n b.iter(|| {\n\n rng.shuffle(x);\n\n black_box(&x);\n\n })\n\n}\n\n\n", "file_path": "benches/misc.rs", "rank": 31, "score": 116136.24101993651 }, { "content": "#[bench]\n\nfn init_jitter(b: &mut Bencher) {\n\n b.iter(|| {\n\n black_box(JitterRng::new().unwrap());\n\n });\n\n}\n\n\n\nmacro_rules! chacha_rounds {\n\n ($fn1:ident, $fn2:ident, $fn3:ident, $rounds:expr) => {\n\n #[bench]\n\n fn $fn1(b: &mut Bencher) {\n\n let mut rng = ChaChaRng::new();\n\n rng.set_rounds($rounds);\n\n let mut buf = [0u8; BYTES_LEN];\n\n b.iter(|| {\n\n for _ in 0..RAND_BENCH_N {\n\n rng.fill_bytes(&mut buf);\n\n black_box(buf);\n\n }\n\n });\n\n b.bytes = BYTES_LEN as u64 * RAND_BENCH_N;\n", "file_path": "benches/generators.rs", "rank": 32, "score": 116136.24101993651 }, { "content": "#[bench]\n\nfn dist_iter(b: &mut Bencher) {\n\n let mut rng = XorShiftRng::new();\n\n let distr = Normal::new(-2.71828, 3.14159);\n\n let mut iter = distr.sample_iter(&mut rng);\n\n\n\n b.iter(|| {\n\n let mut accum = 0.0;\n\n for _ in 0..::RAND_BENCH_N {\n\n accum += iter.next().unwrap();\n\n }\n\n black_box(accum);\n\n });\n\n b.bytes = size_of::<f64>() as u64 * ::RAND_BENCH_N;\n\n}\n", "file_path": "benches/distributions.rs", "rank": 33, "score": 116136.24101993651 }, { "content": "#[bench]\n\nfn gen_1k_fill(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();\n\n let mut buf = [0u64; 128];\n\n b.iter(|| {\n\n rng.fill(&mut buf[..]);\n\n black_box(buf);\n\n });\n\n b.bytes = 1024;\n\n}\n", "file_path": "benches/misc.rs", "rank": 34, "score": 116136.24101993651 }, { "content": "#[test]\n\nfn test_read() {\n\n let bytes = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];\n\n \n\n let mut buf = [0u32; 4];\n\n read_u32_into(&bytes, &mut buf);\n\n assert_eq!(buf[0], 0x04030201);\n\n assert_eq!(buf[3], 0x100F0E0D);\n\n \n\n let mut buf = [0u32; 3];\n\n read_u32_into(&bytes[1..13], &mut buf); // unaligned\n\n assert_eq!(buf[0], 0x05040302);\n\n assert_eq!(buf[2], 0x0D0C0B0A);\n\n \n\n let mut buf = [0u64; 2];\n\n read_u64_into(&bytes, &mut buf);\n\n assert_eq!(buf[0], 0x0807060504030201);\n\n assert_eq!(buf[1], 0x100F0E0D0C0B0A09);\n\n \n\n let mut buf = [0u64; 1];\n\n read_u64_into(&bytes[7..15], &mut buf); // unaligned\n\n assert_eq!(buf[0], 0x0F0E0D0C0B0A0908);\n\n}\n", "file_path": "rand_core/src/le.rs", "rank": 35, "score": 115547.36415809559 }, { "content": "#[allow(deprecated)]\n\n#[deprecated(since=\"0.5.0\", note=\"use Distribution instead\")]\n\npub trait IndependentSample<Support>: Sample<Support> {\n\n /// Generate a random value.\n\n fn ind_sample<R: Rng>(&self, &mut R) -> Support;\n\n}\n\n\n\n#[allow(deprecated)]\n\nmod impls {\n\n use Rng;\n\n use distributions::{Distribution, Sample, IndependentSample,\n\n WeightedChoice};\n\n #[cfg(feature=\"std\")]\n\n use distributions::exponential::Exp;\n\n #[cfg(feature=\"std\")]\n\n use distributions::gamma::{Gamma, ChiSquared, FisherF, StudentT};\n\n #[cfg(feature=\"std\")]\n\n use distributions::normal::{Normal, LogNormal};\n\n use distributions::range::{Range, SampleRange};\n\n\n\n impl<'a, T: Clone> Sample<T> for WeightedChoice<'a, T> {\n\n fn sample<R: Rng>(&mut self, rng: &mut R) -> T {\n", "file_path": "src/distributions/mod.rs", "rank": 36, "score": 114082.5097220845 }, { "content": "#[bench]\n\n#[allow(deprecated)]\n\nfn gen_1k_gen_iter(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();\n\n b.iter(|| {\n\n let v: Vec<u64> = rng.gen_iter().take(128).collect();\n\n black_box(v);\n\n });\n\n b.bytes = 1024;\n\n}\n\n\n", "file_path": "benches/misc.rs", "rank": 37, "score": 113328.85601977633 }, { "content": "#[bench]\n\nfn reseeding_hc128_bytes(b: &mut Bencher) {\n\n let mut rng = ReseedingRng::new(Hc128Core::new(),\n\n RESEEDING_THRESHOLD,\n\n EntropyRng::new());\n\n let mut buf = [0u8; BYTES_LEN];\n\n b.iter(|| {\n\n for _ in 0..RAND_BENCH_N {\n\n rng.fill_bytes(&mut buf);\n\n black_box(buf);\n\n }\n\n });\n\n b.bytes = BYTES_LEN as u64 * RAND_BENCH_N;\n\n}\n\n\n\nmacro_rules! reseeding_uint {\n\n ($fnn:ident, $ty:ty) => {\n\n #[bench]\n\n fn $fnn(b: &mut Bencher) {\n\n let mut rng = ReseedingRng::new(Hc128Core::new(),\n\n RESEEDING_THRESHOLD,\n", "file_path": "benches/generators.rs", "rank": 38, "score": 113328.85601977633 }, { "content": "#[bench]\n\nfn gen_1k_iter_repeat(b: &mut Bencher) {\n\n use std::iter;\n\n let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();\n\n b.iter(|| {\n\n let v: Vec<u64> = iter::repeat(()).map(|()| rng.gen()).take(128).collect();\n\n black_box(v);\n\n });\n\n b.bytes = 1024;\n\n}\n\n\n", "file_path": "benches/misc.rs", "rank": 39, "score": 113328.85601977633 }, { "content": "#[bench]\n\nfn misc_gen_bool(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();\n\n b.iter(|| {\n\n let mut accum = true;\n\n for _ in 0..::RAND_BENCH_N {\n\n accum ^= rng.gen_bool(0.18);\n\n }\n\n black_box(accum);\n\n })\n\n}\n\n\n", "file_path": "benches/misc.rs", "rank": 40, "score": 113328.85601977633 }, { "content": "#[bench]\n\nfn misc_sample_iter_10_of_100(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(thread_rng()).unwrap();\n\n let x : &[usize] = &[1; 100];\n\n b.iter(|| {\n\n black_box(sample_iter(&mut rng, x, 10).unwrap_or_else(|e| e));\n\n })\n\n}\n\n\n", "file_path": "benches/misc.rs", "rank": 41, "score": 113328.85601977633 }, { "content": "#[bench]\n\nfn misc_sample_slice_10_of_100(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(thread_rng()).unwrap();\n\n let x : &[usize] = &[1; 100];\n\n b.iter(|| {\n\n black_box(sample_slice(&mut rng, x, 10));\n\n })\n\n}\n\n\n", "file_path": "benches/misc.rs", "rank": 42, "score": 113328.85601977633 }, { "content": "#[bench]\n\nfn gen_1k_sample_iter(b: &mut Bencher) {\n\n use rand::distributions::{Distribution, Standard};\n\n let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();\n\n b.iter(|| {\n\n let v: Vec<u64> = Standard.sample_iter(&mut rng).take(128).collect();\n\n black_box(v);\n\n });\n\n b.bytes = 1024;\n\n}\n\n\n", "file_path": "benches/misc.rs", "rank": 43, "score": 113328.85601977633 }, { "content": "// Returns the door we switch to, given our current choice and\n\n// the open door. There will only be one valid door.\n\nfn switch_door(choice: u32, open: u32) -> u32 {\n\n free_doors(&[choice, open])[0]\n\n}\n\n\n", "file_path": "examples/monty-hall.rs", "rank": 44, "score": 112202.4073744379 }, { "content": "#[bench]\n\nfn misc_gen_bool_var(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();\n\n b.iter(|| {\n\n let mut p = 0.18;\n\n let mut accum = true;\n\n for _ in 0..::RAND_BENCH_N {\n\n accum ^= rng.gen_bool(p);\n\n p += 0.0001;\n\n }\n\n black_box(accum);\n\n })\n\n}\n\n\n", "file_path": "benches/misc.rs", "rank": 45, "score": 110770.00404172175 }, { "content": "#[bench]\n\nfn misc_sample_slice_ref_10_of_100(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(thread_rng()).unwrap();\n\n let x : &[usize] = &[1; 100];\n\n b.iter(|| {\n\n black_box(sample_slice_ref(&mut rng, x, 10));\n\n })\n\n}\n\n\n\nmacro_rules! sample_indices {\n\n ($name:ident, $amount:expr, $length:expr) => {\n\n #[bench]\n\n fn $name(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(thread_rng()).unwrap();\n\n b.iter(|| {\n\n black_box(sample_indices(&mut rng, $length, $amount));\n\n })\n\n }\n\n }\n\n}\n\n\n\nsample_indices!(misc_sample_indices_10_of_1k, 10, 1000);\n\nsample_indices!(misc_sample_indices_50_of_1k, 50, 1000);\n\nsample_indices!(misc_sample_indices_100_of_1k, 100, 1000);\n\n\n", "file_path": "benches/misc.rs", "rank": 46, "score": 110770.00404172175 }, { "content": "#[allow(non_camel_case_types)]\n\ntype w64 = w<u64>;\n\n\n\nconst RAND_SIZE_LEN: usize = 8;\n\nconst RAND_SIZE: usize = 1 << RAND_SIZE_LEN;\n\n\n\n/// A random number generator that uses ISAAC-64, the 64-bit variant of the\n\n/// ISAAC algorithm.\n\n///\n\n/// ISAAC stands for \"Indirection, Shift, Accumulate, Add, and Count\" which are\n\n/// the principal bitwise operations employed. It is the most advanced of a\n\n/// series of array based random number generator designed by Robert Jenkins\n\n/// in 1996[1].\n\n///\n\n/// ISAAC-64 is mostly similar to ISAAC. Because it operates on 64-bit integers\n\n/// instead of 32-bit, it uses twice as much memory to hold its state and\n\n/// results. Also it uses different constants for shifts and indirect indexing,\n\n/// optimized to give good results for 64bit arithmetic.\n\n///\n\n/// ISAAC-64 is notably fast and produces excellent quality random numbers for\n\n/// non-cryptographic applications.\n", "file_path": "src/prng/isaac64.rs", "rank": 47, "score": 107692.1683983184 }, { "content": "#[allow(non_camel_case_types)]\n\ntype w32 = w<u32>;\n\n\n\nconst RAND_SIZE_LEN: usize = 8;\n\nconst RAND_SIZE: usize = 1 << RAND_SIZE_LEN;\n\n\n\n/// A random number generator that uses the ISAAC algorithm.\n\n///\n\n/// ISAAC stands for \"Indirection, Shift, Accumulate, Add, and Count\" which are\n\n/// the principal bitwise operations employed. It is the most advanced of a\n\n/// series of array based random number generator designed by Robert Jenkins\n\n/// in 1996[1][2].\n\n///\n\n/// ISAAC is notably fast and produces excellent quality random numbers for\n\n/// non-cryptographic applications.\n\n///\n\n/// In spite of being designed with cryptographic security in mind, ISAAC hasn't\n\n/// been stringently cryptanalyzed and thus cryptographers do not not\n\n/// consensually trust it to be secure. When looking for a secure RNG, prefer\n\n/// [`Hc128Rng`] instead, which, like ISAAC, is an array-based RNG and one of\n\n/// the stream-ciphers selected the by eSTREAM contest.\n", "file_path": "src/prng/isaac.rs", "rank": 48, "score": 107526.55082834547 }, { "content": "fn free_doors(blocked: &[u32]) -> Vec<u32> {\n\n (0..3).filter(|x| !blocked.contains(x)).collect()\n\n}\n\n\n", "file_path": "examples/monty-hall.rs", "rank": 49, "score": 106311.20603945668 }, { "content": "/// This method performs a partial fisher-yates on a range of indices using a\n\n/// `HashMap` as a cache to record potential collisions.\n\n///\n\n/// The cache avoids allocating the entire `length` of values. This is especially useful when\n\n/// `amount <<< length`, i.e. select 3 non-repeating from `1_000_000`\n\nfn sample_indices_cache<R>(\n\n rng: &mut R,\n\n length: usize,\n\n amount: usize,\n\n) -> Vec<usize>\n\n where R: Rng + ?Sized,\n\n{\n\n debug_assert!(amount <= length);\n\n #[cfg(feature=\"std\")] let mut cache = HashMap::with_capacity(amount);\n\n #[cfg(not(feature=\"std\"))] let mut cache = BTreeMap::new();\n\n let mut out = Vec::with_capacity(amount);\n\n for i in 0..amount {\n\n let j: usize = rng.gen_range(i, length);\n\n\n\n // equiv: let tmp = slice[i];\n\n let tmp = match cache.get(&i) {\n\n Some(e) => *e,\n\n None => i,\n\n };\n\n\n", "file_path": "src/seq.rs", "rank": 50, "score": 105683.59646692214 }, { "content": "#[deprecated(since=\"0.5.0\", note=\"replaced by distributions::Standard\")]\n\npub trait Rand : Sized {\n\n /// Generates a random instance of this type using the specified source of\n\n /// randomness.\n\n fn rand<R: Rng>(rng: &mut R) -> Self;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 51, "score": 105168.62764855407 }, { "content": "/// Calculates ln(gamma(x)) (natural logarithm of the gamma\n\n/// function) using the Lanczos approximation.\n\n///\n\n/// The approximation expresses the gamma function as:\n\n/// `gamma(z+1) = sqrt(2*pi)*(z+g+0.5)^(z+0.5)*exp(-z-g-0.5)*Ag(z)`\n\n/// `g` is an arbitrary constant; we use the approximation with `g=5`.\n\n///\n\n/// Noting that `gamma(z+1) = z*gamma(z)` and applying `ln` to both sides:\n\n/// `ln(gamma(z)) = (z+0.5)*ln(z+g+0.5)-(z+g+0.5) + ln(sqrt(2*pi)*Ag(z)/z)`\n\n///\n\n/// `Ag(z)` is an infinite series with coefficients that can be calculated\n\n/// ahead of time - we use just the first 6 terms, which is good enough\n\n/// for most purposes.\n\npub fn log_gamma(x: f64) -> f64 {\n\n // precalculated 6 coefficients for the first 6 terms of the series\n\n let coefficients: [f64; 6] = [\n\n 76.18009172947146,\n\n -86.50532032941677,\n\n 24.01409824083091,\n\n -1.231739572450155,\n\n 0.1208650973866179e-2,\n\n -0.5395239384953e-5,\n\n ];\n\n\n\n // (x+0.5)*ln(x+g+0.5)-(x+g+0.5)\n\n let tmp = x + 5.5;\n\n let log = (x + 0.5) * tmp.ln() - tmp;\n\n\n\n // the first few terms of the series for Ag(x)\n\n let mut a = 1.000000000190015;\n\n let mut denom = x;\n\n for coeff in &coefficients {\n\n denom += 1.0;\n\n a += coeff / denom;\n\n }\n\n\n\n // get everything together\n\n // a is Ag(x)\n\n // 2.5066... is sqrt(2pi)\n\n log + (2.5066282746310005 * a / x).ln()\n\n}\n", "file_path": "src/distributions/log_gamma.rs", "rank": 52, "score": 104320.80445599157 }, { "content": "#[derive(Debug)]\n\nstruct ReseedingCore<R, Rsdr> {\n\n inner: R,\n\n reseeder: Rsdr,\n\n threshold: i64,\n\n bytes_until_reseed: i64,\n\n}\n\n\n\nimpl<R, Rsdr> BlockRngCore for ReseedingCore<R, Rsdr>\n\nwhere R: BlockRngCore + SeedableRng,\n\n Rsdr: RngCore\n\n{\n\n type Item = <R as BlockRngCore>::Item;\n\n type Results = <R as BlockRngCore>::Results;\n\n\n\n fn generate(&mut self, results: &mut Self::Results) {\n\n if self.bytes_until_reseed <= 0 {\n\n // We get better performance by not calling only `auto_reseed` here\n\n // and continuing with the rest of the function, but by directly\n\n // returning from a non-inlined function.\n\n return self.reseed_and_generate(results);\n", "file_path": "src/reseeding.rs", "rank": 53, "score": 103800.6499858125 }, { "content": "#[cfg(feature=\"simd_support\")]\n\npub trait SimdMath\n\nwhere\n\n Self: Sized,\n\n{\n\n /// Returns the natural logarithm of each lane of the vector.\n\n #[inline(always)]\n\n fn ln(&self) -> Self;\n\n\n\n /// Simultaneously computes the sine and cosine of the vector. Returns\n\n /// (sin, cos).\n\n #[inline(always)]\n\n fn sincos(&self) -> (Self, Self);\n\n\n\n /// Returns the square root of each lane of the vector.\n\n /// It should compile down to a single instruction.\n\n #[inline(always)]\n\n fn sqrt(&self) -> Self;\n\n\n\n /// Returns `e^(self)`, (the exponential function).\n\n #[inline(always)]\n", "file_path": "src/distributions/box_muller.rs", "rank": 54, "score": 103704.25233694486 }, { "content": "#[deprecated(since=\"0.5.0\", note=\"removed in favor of thread_rng().gen()\")]\n\n#[inline]\n\npub fn random<T>() -> T where Standard: Distribution<T> {\n\n thread_rng().gen()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #[test]\n\n #[cfg(not(feature=\"stdweb\"))]\n\n fn test_thread_rng() {\n\n use Rng;\n\n let mut r = ::thread_rng();\n\n r.gen::<i32>();\n\n let mut v = [1, 1, 1];\n\n r.shuffle(&mut v);\n\n let b: &[_] = &[1, 1, 1];\n\n assert_eq!(v, b);\n\n assert_eq!(r.gen_range(0, 1), 0);\n\n }\n\n\n\n #[test]\n", "file_path": "src/thread_rng.rs", "rank": 55, "score": 102733.01405716923 }, { "content": "trait WideningMultiply<RHS = Self> {\n\n type Output;\n\n\n\n fn wmul(self, x: RHS) -> Self::Output;\n\n}\n\n\n\nmacro_rules! wmul_impl {\n\n ($ty:ty, $wide:ty, $shift:expr) => {\n\n impl WideningMultiply for $ty {\n\n type Output = ($ty, $ty);\n\n\n\n #[inline(always)]\n\n fn wmul(self, x: $ty) -> Self::Output {\n\n let tmp = (self as $wide) * (x as $wide);\n\n ((tmp >> $shift) as $ty, tmp as $ty)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/distributions/range.rs", "rank": 56, "score": 101191.79963863606 }, { "content": "// Entropy collector state.\n\n// These values are not necessary to preserve across runs.\n\nstruct EcState {\n\n // Previous time stamp to determine the timer delta\n\n prev_time: u64,\n\n // Deltas used for the stuck test\n\n last_delta: i32,\n\n last_delta2: i32,\n\n // Memory for the Memory Access noise source\n\n mem: [u8; MEMORY_SIZE],\n\n}\n\n\n\nimpl EcState {\n\n // Stuck test by checking the:\n\n // - 1st derivation of the jitter measurement (time delta)\n\n // - 2nd derivation of the jitter measurement (delta of time deltas)\n\n // - 3rd derivation of the jitter measurement (delta of delta of time\n\n // deltas)\n\n //\n\n // All values must always be non-zero.\n\n // This test is a heuristic to see whether the last measurement holds\n\n // entropy.\n", "file_path": "src/jitter.rs", "rank": 57, "score": 94611.07585185046 }, { "content": "/// The core of the Box–Muller transform. We implement `sample` here because\n\n/// `Distribution` does not allow mutable state.\n\npub trait BoxMullerCore<T> {\n\n /// The core Box-Muller transform.\n\n ///\n\n /// Returns two independent random numbers with a standard normal\n\n /// distribution.\n\n #[inline(always)]\n\n fn box_muller<R: Rng>(rng: &mut R) -> (T, T);\n\n\n\n /// Generate a random value of `T`, using `rng` as the source of randomness.\n\n #[inline(always)]\n\n fn sample<R: Rng>(&mut self, rng: &mut R) -> T;\n\n}\n\n\n\nimpl BoxMullerCore<f64> for BoxMuller<f64> {\n\n fn box_muller<R: Rng>(rng: &mut R) -> (f64, f64) {\n\n const TWO_PI: f64 = PI_64 * 2.0;\n\n\n\n let (u0, u1): (f64, f64) = rng.gen();\n\n\n\n let radius = (-2.0 * u0.ln()).sqrt();\n", "file_path": "src/distributions/box_muller.rs", "rank": 58, "score": 94336.54654780423 }, { "content": "struct SimulationResult {\n\n win: bool,\n\n switch: bool,\n\n}\n\n\n", "file_path": "examples/monty-hall.rs", "rank": 59, "score": 91142.81555769613 }, { "content": "/// Helper trait for creating objects using the correct implementation of\n\n/// `RangeImpl` for the sampling type; this enables `Range::new(a, b)` to work.\n\npub trait SampleRange: PartialOrd+Sized {\n\n /// The `RangeImpl` implementation supporting type `X`.\n\n type Impl: RangeImpl<X = Self>;\n\n}\n\n\n", "file_path": "src/distributions/range.rs", "rank": 60, "score": 90683.76755941508 }, { "content": "fn main() {\n\n // The estimation will be more accurate with more simulations\n\n let num_simulations = 10000;\n\n\n\n let mut rng = rand::thread_rng();\n\n let random_door = Range::new(0u32, 3);\n\n\n\n let (mut switch_wins, mut switch_losses) = (0, 0);\n\n let (mut keep_wins, mut keep_losses) = (0, 0);\n\n\n\n println!(\"Running {} simulations...\", num_simulations);\n\n for _ in 0..num_simulations {\n\n let result = simulate(&random_door, &mut rng);\n\n\n\n match (result.win, result.switch) {\n\n (true, true) => switch_wins += 1,\n\n (true, false) => keep_wins += 1,\n\n (false, true) => switch_losses += 1,\n\n (false, false) => keep_losses += 1,\n\n }\n", "file_path": "examples/monty-hall.rs", "rank": 61, "score": 57676.68331861513 }, { "content": "fn main() {\n\n let range = Range::new(-1.0f64, 1.0);\n\n let mut rng = rand::thread_rng();\n\n\n\n let total = 1_000_000;\n\n let mut in_circle = 0;\n\n\n\n for _ in 0..total {\n\n let a = range.sample(&mut rng);\n\n let b = range.sample(&mut rng);\n\n if a*a + b*b <= 1.0 {\n\n in_circle += 1;\n\n }\n\n }\n\n\n\n // prints something close to 3.14159...\n\n println!(\"π is approximately {}\", 4. * (in_circle as f64) / (total as f64));\n\n}\n", "file_path": "examples/monte-carlo.rs", "rank": 62, "score": 57676.68331861513 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct GammaSmallShape {\n\n inv_shape: f64,\n\n large_shape: GammaLargeShape\n\n}\n\n\n\n/// Gamma distribution where the shape parameter is larger than 1.\n\n///\n\n/// See `Gamma` for sampling from a Gamma distribution with general\n\n/// shape parameters.\n", "file_path": "src/distributions/gamma.rs", "rank": 63, "score": 57460.05971692181 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct GammaLargeShape {\n\n scale: f64,\n\n c: f64,\n\n d: f64\n\n}\n\n\n\nimpl Gamma {\n\n /// Construct an object representing the `Gamma(shape, scale)`\n\n /// distribution.\n\n ///\n\n /// Panics if `shape <= 0` or `scale <= 0`.\n\n #[inline]\n\n pub fn new(shape: f64, scale: f64) -> Gamma {\n\n assert!(shape > 0.0, \"Gamma::new called with shape <= 0\");\n\n assert!(scale > 0.0, \"Gamma::new called with scale <= 0\");\n\n\n\n let repr = if shape == 1.0 {\n\n One(Exp::new(1.0 / scale))\n\n } else if shape < 1.0 {\n\n Small(GammaSmallShape::new_raw(shape, scale))\n", "file_path": "src/distributions/gamma.rs", "rank": 64, "score": 57460.05971692181 }, { "content": "// A function that is opaque to the optimizer to assist in avoiding dead-code\n\n// elimination. Taken from `bencher`.\n\nfn black_box<T>(dummy: T) -> T {\n\n unsafe {\n\n let ret = ptr::read_volatile(&dummy);\n\n mem::forget(dummy);\n\n ret\n\n }\n\n}\n\n\n\nimpl RngCore for JitterRng {\n\n fn next_u32(&mut self) -> u32 {\n\n // We want to use both parts of the generated entropy\n\n if self.data_half_used {\n\n self.data_half_used = false;\n\n (self.data >> 32) as u32\n\n } else {\n\n self.data = self.next_u64();\n\n self.data_half_used = true;\n\n self.data as u32\n\n }\n\n }\n", "file_path": "src/jitter.rs", "rank": 65, "score": 43860.227399888856 }, { "content": "/// interval a nicer choice.\n\n///\n\n/// [`Exp1`]: struct.Exp1.html\n\n/// [`StandardNormal`]: struct.StandardNormal.html\n\n#[derive(Debug)]\n\npub struct Standard;\n\n\n\n#[allow(deprecated)]\n\nimpl<T> ::Rand for T where Standard: Distribution<T> {\n\n fn rand<R: Rng>(rng: &mut R) -> Self {\n\n Standard.sample(rng)\n\n }\n\n}\n\n\n\n\n\n/// A value with a particular weight for use with `WeightedChoice`.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct Weighted<T> {\n\n /// The numerical weight of this item\n\n pub weight: u32,\n", "file_path": "src/distributions/mod.rs", "rank": 66, "score": 35539.651574249925 }, { "content": "/// using `R` as the source of randomness.\n\n///\n\n/// This `struct` is created by the [`sample_iter`] method on [`Distribution`].\n\n/// See its documentation for more.\n\n///\n\n/// [`Distribution`]: trait.Distribution.html\n\n/// [`sample_iter`]: trait.Distribution.html#method.sample_iter\n\n#[derive(Debug)]\n\npub struct DistIter<'a, D, R, T> where D: Distribution<T> + 'a, R: Rng + 'a {\n\n distr: &'a D,\n\n rng: &'a mut R,\n\n phantom: ::core::marker::PhantomData<T>,\n\n}\n\n\n\nimpl<'a, D, R, T> Iterator for DistIter<'a, D, R, T>\n\n where D: Distribution<T>, R: Rng + 'a\n\n{\n\n type Item = T;\n\n\n\n #[inline(always)]\n", "file_path": "src/distributions/mod.rs", "rank": 67, "score": 35537.189236274724 }, { "content": " if f_tab[i + 1] + (f_tab[i] - f_tab[i + 1]) * rng.gen::<f64>() < pdf(x) {\n\n return x;\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use Rng;\n\n use mock::StepRng;\n\n use super::{WeightedChoice, Weighted, Distribution};\n\n\n\n #[test]\n\n fn test_weighted_choice() {\n\n // this makes assumptions about the internal implementation of\n\n // WeightedChoice. It may fail when the implementation in\n\n // `distributions::range::RangeInt changes.\n\n\n\n macro_rules! t {\n\n ($items:expr, $expected:expr) => {{\n", "file_path": "src/distributions/mod.rs", "rank": 68, "score": 35533.61169184049 }, { "content": " let mut items = $items;\n\n let mut total_weight = 0;\n\n for item in &items { total_weight += item.weight; }\n\n\n\n let wc = WeightedChoice::new(&mut items);\n\n let expected = $expected;\n\n\n\n // Use extremely large steps between the random numbers, because\n\n // we test with small ranges and RangeInt is designed to prefer\n\n // the most significant bits.\n\n let mut rng = StepRng::new(0, !0 / (total_weight as u64));\n\n\n\n for &val in expected.iter() {\n\n assert_eq!(wc.sample(&mut rng), val)\n\n }\n\n }}\n\n }\n\n\n\n t!([Weighted { weight: 1, item: 10}], [10]);\n\n\n", "file_path": "src/distributions/mod.rs", "rank": 69, "score": 35533.406160761246 }, { "content": "/// Weighted { weight: 1, item: 'c' });\n\n/// let wc = WeightedChoice::new(&mut items);\n\n/// let mut rng = rand::thread_rng();\n\n/// for _ in 0..16 {\n\n/// // on average prints 'a' 4 times, 'b' 8 and 'c' twice.\n\n/// println!(\"{}\", wc.sample(&mut rng));\n\n/// }\n\n/// ```\n\n#[derive(Debug)]\n\npub struct WeightedChoice<'a, T:'a> {\n\n items: &'a mut [Weighted<T>],\n\n weight_range: Range<u32>,\n\n}\n\n\n\nimpl<'a, T: Clone> WeightedChoice<'a, T> {\n\n /// Create a new `WeightedChoice`.\n\n ///\n\n /// Panics if:\n\n ///\n\n /// - `items` is empty\n", "file_path": "src/distributions/mod.rs", "rank": 70, "score": 35531.863583336104 }, { "content": " fn sample<R: Rng>(&mut self, _: &mut R) -> T { self.val }\n\n }\n\n impl<T: Copy> IndependentSample<T> for Constant<T> {\n\n fn ind_sample<R: Rng>(&self, _: &mut R) -> T { self.val }\n\n }\n\n\n\n let mut sampler = Constant{ val: 293 };\n\n assert_eq!(sampler.sample(&mut ::test::rng(233)), 293);\n\n assert_eq!(sampler.ind_sample(&mut ::test::rng(234)), 293);\n\n }\n\n\n\n #[cfg(feature=\"std\")]\n\n #[test] #[allow(deprecated)]\n\n fn test_backwards_compat_exp() {\n\n use distributions::{IndependentSample, Exp};\n\n let sampler = Exp::new(1.0);\n\n sampler.ind_sample(&mut ::test::rng(235));\n\n }\n\n\n\n #[cfg(feature=\"std\")]\n", "file_path": "src/distributions/mod.rs", "rank": 71, "score": 35530.088944335395 }, { "content": " #[test]\n\n fn test_distributions_iter() {\n\n use distributions::Normal;\n\n let mut rng = ::test::rng(210);\n\n let distr = Normal::new(10.0, 10.0);\n\n let results: Vec<_> = distr.sample_iter(&mut rng).take(100).collect();\n\n println!(\"{:?}\", results);\n\n }\n\n}\n", "file_path": "src/distributions/mod.rs", "rank": 72, "score": 35528.597622945126 }, { "content": " #[test] #[should_panic]\n\n fn test_weighted_choice_zero_weight() {\n\n WeightedChoice::new(&mut [Weighted { weight: 0, item: 0},\n\n Weighted { weight: 0, item: 1}]);\n\n }\n\n #[test] #[should_panic]\n\n fn test_weighted_choice_weight_overflows() {\n\n let x = ::core::u32::MAX / 2; // x + x + 2 is the overflow\n\n WeightedChoice::new(&mut [Weighted { weight: x, item: 0 },\n\n Weighted { weight: 1, item: 1 },\n\n Weighted { weight: x, item: 2 },\n\n Weighted { weight: 1, item: 3 }]);\n\n }\n\n\n\n #[test] #[allow(deprecated)]\n\n fn test_backwards_compat_sample() {\n\n use distributions::{Sample, IndependentSample};\n\n\n\n struct Constant<T> { val: T }\n\n impl<T: Copy> Sample<T> for Constant<T> {\n", "file_path": "src/distributions/mod.rs", "rank": 73, "score": 35527.983521667324 }, { "content": " #[cfg(feature=\"std\")]\n\n macro_rules! impl_f64 {\n\n ($($name: ident), *) => {\n\n $(\n\n impl Sample<f64> for $name {\n\n fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 {\n\n Distribution::sample(self, rng)\n\n }\n\n }\n\n impl IndependentSample<f64> for $name {\n\n fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {\n\n Distribution::sample(self, rng)\n\n }\n\n }\n\n )*\n\n }\n\n }\n\n #[cfg(feature=\"std\")]\n\n impl_f64!(Exp, Gamma, ChiSquared, FisherF, StudentT, Normal, LogNormal);\n\n}\n\n\n", "file_path": "src/distributions/mod.rs", "rank": 74, "score": 35527.92330122652 }, { "content": "//! values (with some lag).\n\n\n\npub mod chacha;\n\npub mod hc128;\n\npub mod isaac;\n\npub mod isaac64;\n\nmod sfc32;\n\nmod xorshift;\n\n\n\nmod isaac_array;\n\n\n\npub use self::chacha::ChaChaRng;\n\npub use self::hc128::Hc128Rng;\n\npub use self::isaac::IsaacRng;\n\npub use self::isaac64::Isaac64Rng;\n\npub use self::sfc32::Sfc32Rng;\n\npub use self::sfc32::*;\n\npub use self::xorshift::XorShiftRng;\n", "file_path": "src/prng/mod.rs", "rank": 75, "score": 35527.7537414407 }, { "content": " idx = i;\n\n // we need the `/ 2` to round up otherwise we'll drop\n\n // the trailing elements when `modifier` is odd.\n\n modifier += 1;\n\n } else {\n\n // otherwise we're too big, so go left. (i.e. do\n\n // nothing)\n\n }\n\n modifier /= 2;\n\n }\n\n self.items[idx + 1].item.clone()\n\n }\n\n}\n\n\n\n/// Sample a random number using the Ziggurat method (specifically the\n\n/// ZIGNOR variant from Doornik 2005). Most of the arguments are\n\n/// directly from the paper:\n\n///\n\n/// * `rng`: source of randomness\n\n/// * `symmetric`: whether this is a symmetric distribution, or one-sided with P(x < 0) = 0.\n\n/// * `X`: the $x_i$ abscissae.\n\n/// * `F`: precomputed values of the PDF at the $x_i$, (i.e. $f(x_i)$)\n\n/// * `F_DIFF`: precomputed values of $f(x_i) - f(x_{i+1})$\n\n/// * `pdf`: the probability density function\n\n/// * `zero_case`: manual sampling from the tail when we chose the\n\n/// bottom box (i.e. i == 0)\n\n\n\n// the perf improvement (25-50%) is definitely worth the extra code\n\n// size from force-inlining.\n", "file_path": "src/distributions/mod.rs", "rank": 76, "score": 35525.87451501345 }, { "content": " /// The actual item which is being weighted\n\n pub item: T,\n\n}\n\n\n\n/// A distribution that selects from a finite collection of weighted items.\n\n///\n\n/// Each item has an associated weight that influences how likely it\n\n/// is to be chosen: higher weight is more likely.\n\n///\n\n/// The `Clone` restriction is a limitation of the `Distribution` trait.\n\n/// Note that `&T` is (cheaply) `Clone` for all `T`, as is `u32`, so one can\n\n/// store references or indices into another vector.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use rand::distributions::{Weighted, WeightedChoice, Distribution};\n\n///\n\n/// let mut items = vec!(Weighted { weight: 2, item: 'a' },\n\n/// Weighted { weight: 4, item: 'b' },\n", "file_path": "src/distributions/mod.rs", "rank": 77, "score": 35525.8486502885 }, { "content": " Distribution::sample(self, rng)\n\n }\n\n }\n\n impl<'a, T: Clone> IndependentSample<T> for WeightedChoice<'a, T> {\n\n fn ind_sample<R: Rng>(&self, rng: &mut R) -> T {\n\n Distribution::sample(self, rng)\n\n }\n\n }\n\n\n\n impl<T: SampleRange> Sample<T> for Range<T> {\n\n fn sample<R: Rng>(&mut self, rng: &mut R) -> T {\n\n Distribution::sample(self, rng)\n\n }\n\n }\n\n impl<T: SampleRange> IndependentSample<T> for Range<T> {\n\n fn ind_sample<R: Rng>(&self, rng: &mut R) -> T {\n\n Distribution::sample(self, rng)\n\n }\n\n }\n\n\n", "file_path": "src/distributions/mod.rs", "rank": 78, "score": 35525.554214381475 }, { "content": " fn test_weighted_clone_change_weight() {\n\n let initial : Weighted<u32> = Weighted {weight: 1, item: 1};\n\n let mut clone = initial.clone();\n\n clone.weight = 5;\n\n assert_eq!(initial.weight, clone.weight);\n\n }\n\n\n\n #[test] #[should_panic]\n\n fn test_weighted_clone_change_item() {\n\n let initial : Weighted<u32> = Weighted {weight: 1, item: 1};\n\n let mut clone = initial.clone();\n\n clone.item = 5;\n\n assert_eq!(initial.item, clone.item);\n\n\n\n }\n\n\n\n #[test] #[should_panic]\n\n fn test_weighted_choice_no_items() {\n\n WeightedChoice::<isize>::new(&mut []);\n\n }\n", "file_path": "src/distributions/mod.rs", "rank": 79, "score": 35525.230847839004 }, { "content": "\n\npub mod range;\n\n#[cfg(feature=\"std\")]\n\npub mod gamma;\n\n#[cfg(feature=\"std\")]\n\npub mod normal;\n\n#[cfg(feature=\"std\")]\n\npub mod exponential;\n\n#[cfg(feature = \"std\")]\n\npub mod poisson;\n\n#[cfg(feature = \"std\")]\n\npub mod binomial;\n\npub mod box_muller;\n\n\n\nmod float;\n\nmod integer;\n\n#[cfg(feature=\"std\")]\n\nmod log_gamma;\n\nmod other;\n\n#[cfg(feature=\"std\")]\n\nmod ziggurat_tables;\n\n#[cfg(feature=\"std\")]\n\nuse distributions::float::IntoFloat;\n\n\n\n/// Types that can be used to create a random instance of `Support`.\n\n#[deprecated(since=\"0.5.0\", note=\"use Distribution instead\")]\n", "file_path": "src/distributions/mod.rs", "rank": 80, "score": 35524.3479999048 }, { "content": "//!\n\n//! [`Distribution`]: trait.Distribution.html\n\n//! [`Range`]: range/struct.Range.html\n\n//! [`Standard`]: struct.Standard.html\n\n\n\nuse Rng;\n\n\n\npub use self::other::Alphanumeric;\n\npub use self::range::Range;\n\n#[cfg(feature=\"std\")]\n\npub use self::gamma::{Gamma, ChiSquared, FisherF, StudentT};\n\n#[cfg(feature=\"std\")]\n\npub use self::normal::{Normal, LogNormal, StandardNormal};\n\n#[cfg(feature=\"std\")]\n\npub use self::exponential::{Exp, Exp1};\n\n#[cfg(feature = \"std\")]\n\npub use self::poisson::Poisson;\n\n#[cfg(feature = \"std\")]\n\npub use self::binomial::Binomial;\n\npub use self::box_muller::{BoxMuller, BoxMullerCore, LogBoxMuller};\n", "file_path": "src/distributions/mod.rs", "rank": 81, "score": 35524.245294294305 }, { "content": " ///\n\n /// // Dice-rolling:\n\n /// let die_range = Range::new_inclusive(1, 6);\n\n /// let mut roll_die = die_range.sample_iter(&mut rng);\n\n /// while roll_die.next().unwrap() != 6 {\n\n /// println!(\"Not a 6; rolling again!\");\n\n /// }\n\n /// ```\n\n fn sample_iter<'a, R: Rng>(&'a self, rng: &'a mut R)\n\n -> DistIter<'a, Self, R, T> where Self: Sized\n\n {\n\n DistIter {\n\n distr: self,\n\n rng: rng,\n\n phantom: ::core::marker::PhantomData,\n\n }\n\n }\n\n}\n\n\n\n/// An iterator that generates random values of `T` with distribution `D`,\n", "file_path": "src/distributions/mod.rs", "rank": 82, "score": 35524.04174643806 }, { "content": " assert!(running_total != 0, \"WeightedChoice::new called with a total weight of 0\");\n\n\n\n WeightedChoice {\n\n items,\n\n // we're likely to be generating numbers in this range\n\n // relatively often, so might as well cache it\n\n weight_range: Range::new(0, running_total)\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, T: Clone> Distribution<T> for WeightedChoice<'a, T> {\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> T {\n\n // we want to find the first element that has cumulative\n\n // weight > sample_weight, which we do by binary since the\n\n // cumulative weights of self.items are sorted.\n\n\n\n // choose a weight in [0, total_weight)\n\n let sample_weight = self.weight_range.sample(rng);\n\n\n", "file_path": "src/distributions/mod.rs", "rank": 83, "score": 35522.89592269342 }, { "content": " /// - the total weight is 0\n\n /// - the total weight is larger than a `u32` can contain.\n\n pub fn new(items: &'a mut [Weighted<T>]) -> WeightedChoice<'a, T> {\n\n // strictly speaking, this is subsumed by the total weight == 0 case\n\n assert!(!items.is_empty(), \"WeightedChoice::new called with no items\");\n\n\n\n let mut running_total: u32 = 0;\n\n\n\n // we convert the list from individual weights to cumulative\n\n // weights so we can binary search. This *could* drop elements\n\n // with weight == 0 as an optimisation.\n\n for item in items.iter_mut() {\n\n running_total = match running_total.checked_add(item.weight) {\n\n Some(n) => n,\n\n None => panic!(\"WeightedChoice::new called with a total weight \\\n\n larger than a u32 can contain\")\n\n };\n\n\n\n item.weight = running_total;\n\n }\n", "file_path": "src/distributions/mod.rs", "rank": 84, "score": 35522.48173943694 }, { "content": " fn next(&mut self) -> Option<T> {\n\n Some(self.distr.sample(self.rng))\n\n }\n\n}\n\n\n\nimpl<'a, T, D: Distribution<T>> Distribution<T> for &'a D {\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> T {\n\n (*self).sample(rng)\n\n }\n\n}\n\n\n\n/// A generic random value distribution. Generates values for various types\n\n/// with numerically uniform distribution.\n\n///\n\n/// For floating-point numbers, this generates values from the open range\n\n/// `(0, 1)` (i.e. excluding 0.0 and 1.0).\n\n///\n\n/// ## Built-in Implementations\n\n///\n\n/// This crate implements the distribution `Standard` for various primitive\n", "file_path": "src/distributions/mod.rs", "rank": 85, "score": 35521.734042931705 }, { "content": "///\n\n/// # Example\n\n/// ```rust\n\n/// use rand::{NewRng, SmallRng, Rng};\n\n/// use rand::distributions::Standard;\n\n///\n\n/// let val: f32 = SmallRng::new().sample(Standard);\n\n/// println!(\"f32 from (0,1): {}\", val);\n\n/// ```\n\n///\n\n/// With dynamic dispatch (type erasure of `Rng`):\n\n///\n\n/// ```rust\n\n/// use rand::{thread_rng, Rng, RngCore};\n\n/// use rand::distributions::Standard;\n\n///\n\n/// let mut rng = thread_rng();\n\n/// let erased_rng: &mut RngCore = &mut rng;\n\n/// let val: f32 = erased_rng.sample(Standard);\n\n/// println!(\"f32 from (0,1): {}\", val);\n", "file_path": "src/distributions/mod.rs", "rank": 86, "score": 35520.54080819159 }, { "content": " [40, 41, 42, 43, 44]);\n\n t!([Weighted { weight: 1, item: 50},\n\n Weighted { weight: 1, item: 51},\n\n Weighted { weight: 1, item: 52},\n\n Weighted { weight: 1, item: 53},\n\n Weighted { weight: 1, item: 54},\n\n Weighted { weight: 1, item: 55},\n\n Weighted { weight: 1, item: 56}],\n\n [50, 54, 51, 55, 52, 56, 53]);\n\n }\n\n\n\n #[test]\n\n fn test_weighted_clone_initialization() {\n\n let initial : Weighted<u32> = Weighted {weight: 1, item: 1};\n\n let clone = initial.clone();\n\n assert_eq!(initial.weight, clone.weight);\n\n assert_eq!(initial.item, clone.item);\n\n }\n\n\n\n #[test] #[should_panic]\n", "file_path": "src/distributions/mod.rs", "rank": 87, "score": 35520.48428537073 }, { "content": "/// types. Assuming the provided `Rng` is well-behaved, these implementations\n\n/// generate values with the following ranges and distributions:\n\n///\n\n/// * Integers (`i32`, `u32`, `isize`, `usize`, etc.): Uniformly distributed\n\n/// over all values of the type.\n\n/// * `char`: Uniformly distributed over all Unicode scalar values, i.e. all\n\n/// code points in the range `0...0x10_FFFF`, except for the range\n\n/// `0xD800...0xDFFF` (the surrogate code points). This includes\n\n/// unassigned/reserved code points.\n\n/// * `bool`: Generates `false` or `true`, each with probability 0.5.\n\n/// * Floating point types (`f32` and `f64`): Uniformly distributed in the\n\n/// open range `(0, 1)`.\n\n///\n\n/// The following aggregate types also implement the distribution `Standard` as\n\n/// long as their component types implement it:\n\n///\n\n/// * Tuples and arrays: Each element of the tuple or array is generated\n\n/// independently, using the `Standard` distribution recursively.\n\n/// * `Option<T>`: Returns `None` with probability 0.5; otherwise generates a\n\n/// random `T` and returns `Some(T)`.\n", "file_path": "src/distributions/mod.rs", "rank": 88, "score": 35520.35397690968 }, { "content": "// Copyright 2013-2017 The Rust Project Developers. See the COPYRIGHT\n\n// file at the top-level directory of this distribution and at\n\n// https://rust-lang.org/COPYRIGHT.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n//! Sampling from random distributions.\n\n//!\n\n//! Distributions are stateless (i.e. immutable) objects controlling the\n\n//! production of values of some type `T` from a presumed uniform randomness\n\n//! source. These objects may have internal parameters set at contruction time\n\n//! (e.g. [`Range`], which has configurable bounds) or may have no internal\n\n//! parameters (e.g. [`Standard`]).\n\n//!\n\n//! All distributions support the [`Distribution`] trait, and support usage\n\n//! via `distr.sample(&mut rng)` as well as via `rng.sample(distr)`.\n", "file_path": "src/distributions/mod.rs", "rank": 89, "score": 35517.59412551706 }, { "content": "// Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\n// file at the top-level directory of this distribution and at\n\n// https://rust-lang.org/COPYRIGHT.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n//! Pseudo random number generators are algorithms to produce *apparently\n\n//! random* numbers deterministically, and usually fairly quickly.\n\n//!\n\n//! So long as the algorithm is computationally secure, is initialised with\n\n//! sufficient entropy (i.e. unknown by an attacker), and its internal state is\n\n//! also protected (unknown to an attacker), the output will also be\n\n//! *computationally secure*. Computationally Secure Pseudo Random Number\n\n//! Generators (CSPRNGs) are thus suitable sources of random numbers for\n\n//! cryptography. There are a couple of gotchas here, however. First, the seed\n\n//! used for initialisation must be unknown. Usually this should be provided by\n", "file_path": "src/prng/mod.rs", "rank": 90, "score": 35516.368711594994 }, { "content": " // short circuit when it's the first item\n\n if sample_weight < self.items[0].weight {\n\n return self.items[0].item.clone();\n\n }\n\n\n\n let mut idx = 0;\n\n let mut modifier = self.items.len();\n\n\n\n // now we know that every possibility has an element to the\n\n // left, so we can just search for the last element that has\n\n // cumulative weight <= sample_weight, then the next one will\n\n // be \"it\". (Note that this greatest element will never be the\n\n // last element of the vector, since sample_weight is chosen\n\n // in [0, total_weight) and the cumulative weight of the last\n\n // one is exactly the total weight.)\n\n while modifier > 1 {\n\n let i = idx + modifier / 2;\n\n if self.items[i].weight <= sample_weight {\n\n // we're small, so look to the right, but allow this\n\n // exact element still.\n", "file_path": "src/distributions/mod.rs", "rank": 91, "score": 35515.63194576831 }, { "content": "//! the operating system and should usually be secure, however this may not\n\n//! always be the case (especially soon after startup). Second, user-space\n\n//! memory may be vulnerable, for example when written to swap space, and after\n\n//! forking a child process should reinitialise any user-space PRNGs. For this\n\n//! reason it may be preferable to source random numbers directly from the OS\n\n//! for cryptographic applications.\n\n//!\n\n//! PRNGs are also widely used for non-cryptographic uses: randomised\n\n//! algorithms, simulations, games. In these applications it is usually not\n\n//! important for numbers to be cryptographically *unguessable*, but even\n\n//! distribution and independence from other samples (from the point of view\n\n//! of someone unaware of the algorithm used, at least) may still be important.\n\n//! Good PRNGs should satisfy these properties, but do not take them for\n\n//! granted; Wikipedia's article on\n\n//! [Pseudorandom number generators](https://en.wikipedia.org/wiki/Pseudorandom_number_generator)\n\n//! provides some background on this topic.\n\n//!\n\n//! Care should be taken when seeding (initialising) PRNGs. Some PRNGs have\n\n//! short periods for some seeds. If one PRNG is seeded from another using the\n\n//! same algorithm, it is possible that both will yield the same sequence of\n", "file_path": "src/prng/mod.rs", "rank": 92, "score": 35514.60159370311 }, { "content": " // It is possible with an extra step, but an open range does not\n\n // seem neccesary for the ziggurat algorithm anyway.\n\n (bits >> 12).into_float_with_exponent(1) - 3.0\n\n } else {\n\n // Convert to a value in the range [1,2) and substract to get (0,1)\n\n (bits >> 12).into_float_with_exponent(0)\n\n - (1.0 - ::core::f64::EPSILON / 2.0)\n\n };\n\n let x = u * x_tab[i];\n\n\n\n let test_x = if symmetric { x.abs() } else {x};\n\n\n\n // algebraically equivalent to |u| < x_tab[i+1]/x_tab[i] (or u < x_tab[i+1]/x_tab[i])\n\n if test_x < x_tab[i + 1] {\n\n return x;\n\n }\n\n if i == 0 {\n\n return zero_case(rng, u);\n\n }\n\n // algebraically equivalent to f1 + DRanU()*(f0 - f1) < 1\n", "file_path": "src/distributions/mod.rs", "rank": 93, "score": 35513.8721583163 }, { "content": " // skip some\n\n t!([Weighted { weight: 0, item: 20},\n\n Weighted { weight: 2, item: 21},\n\n Weighted { weight: 0, item: 22},\n\n Weighted { weight: 1, item: 23}],\n\n [21, 21, 23]);\n\n\n\n // different weights\n\n t!([Weighted { weight: 4, item: 30},\n\n Weighted { weight: 3, item: 31}],\n\n [30, 31, 30, 31, 30, 31, 30]);\n\n\n\n // check that we're binary searching\n\n // correctly with some vectors of odd\n\n // length.\n\n t!([Weighted { weight: 1, item: 40},\n\n Weighted { weight: 1, item: 41},\n\n Weighted { weight: 1, item: 42},\n\n Weighted { weight: 1, item: 43},\n\n Weighted { weight: 1, item: 44}],\n", "file_path": "src/distributions/mod.rs", "rank": 94, "score": 35513.26327402601 }, { "content": "/// ```\n\n///\n\n/// # Open interval for floats\n\n/// In theory it is possible to choose between an open interval `(0, 1)`, and\n\n/// the half-open intervals `[0, 1)` and `(0, 1]`. All can give a distribution\n\n/// with perfectly uniform intervals. Many libraries in other programming\n\n/// languages default to the closed-open interval `[0, 1)`. We choose here to go\n\n/// with *open*, with the arguments:\n\n///\n\n/// - The chance to generate a specific value, like exactly 0.0, is *tiny*. No\n\n/// (or almost no) sensible code relies on an exact floating-point value to be\n\n/// generated with a very small chance (1 in 2<sup>23</sup> (approx. 8\n\n/// million) for `f32`, and 1 in 2<sup>52</sup> for `f64`). What is relied on\n\n/// is having a uniform distribution and a mean of `0.5`.\n\n/// - Several common algorithms rely on never seeing the value `0.0` generated,\n\n/// i.e. they rely on an open interval. For example when the logarithm of the\n\n/// value is taken, or used as a devisor.\n\n///\n\n/// In other words, the guarantee some value *could* be generated is less useful\n\n/// than the guarantee some value (`0.0`) is never generated. That makes an open\n", "file_path": "src/distributions/mod.rs", "rank": 95, "score": 35510.441588899484 }, { "content": " }\n\n}\n\n\n\nimpl RngCore for EntropyRng {\n\n fn next_u32(&mut self) -> u32 {\n\n impls::next_u32_via_fill(self)\n\n }\n\n\n\n fn next_u64(&mut self) -> u64 {\n\n impls::next_u64_via_fill(self)\n\n }\n\n\n\n fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n self.try_fill_bytes(dest).unwrap_or_else(|err|\n\n panic!(\"all entropy sources failed; first error: {}\", err))\n\n }\n\n\n\n fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {\n\n fn try_os_new(dest: &mut [u8]) -> Result<OsRng, Error>\n\n {\n", "file_path": "src/entropy_rng.rs", "rank": 96, "score": 35493.04190624489 }, { "content": " } else {\n\n return rng.try_fill_bytes(dest); // use JitterRng\n\n }\n\n }\n\n }\n\n if let Some(rng) = switch_rng {\n\n self.rng = rng;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl CryptoRng for EntropyRng {}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_entropy() {\n\n let mut rng = EntropyRng::new();\n\n let n = (rng.next_u32() ^ rng.next_u32()).count_ones();\n\n assert!(n >= 2); // p(failure) approx 1e-7\n\n }\n\n}\n", "file_path": "src/entropy_rng.rs", "rank": 97, "score": 35484.40122941289 }, { "content": " let mut rng = OsRng::new()?;\n\n rng.try_fill_bytes(dest)?;\n\n Ok(rng)\n\n }\n\n\n\n fn try_jitter_new(dest: &mut [u8]) -> Result<JitterRng, Error>\n\n {\n\n let mut rng = JitterRng::new()?;\n\n rng.try_fill_bytes(dest)?;\n\n Ok(rng)\n\n }\n\n\n\n let mut switch_rng = None;\n\n match self.rng {\n\n EntropySource::None => {\n\n let os_rng_result = try_os_new(dest);\n\n match os_rng_result {\n\n Ok(os_rng) => {\n\n debug!(\"EntropyRng: using OsRng\");\n\n switch_rng = Some(EntropySource::Os(os_rng));\n", "file_path": "src/entropy_rng.rs", "rank": 98, "score": 35482.634047103136 }, { "content": " if let Err(os_rng_error) = os_rng_result {\n\n warn!(\"EntropyRng: OsRng failed [falling back to JitterRng]: {}\",\n\n os_rng_error);\n\n match try_jitter_new(dest) {\n\n Ok(jitter_rng) => {\n\n debug!(\"EntropyRng: using JitterRng\");\n\n switch_rng = Some(EntropySource::Jitter(jitter_rng));\n\n }\n\n Err(_jitter_error) => {\n\n warn!(\"EntropyRng: JitterRng failed: {}\",\n\n _jitter_error);\n\n return Err(os_rng_error);\n\n }\n\n }\n\n }\n\n }\n\n EntropySource::Jitter(ref mut rng) => {\n\n if let Ok(os_rng) = try_os_new(dest) {\n\n debug!(\"EntropyRng: using OsRng\");\n\n switch_rng = Some(EntropySource::Os(os_rng));\n", "file_path": "src/entropy_rng.rs", "rank": 99, "score": 35478.685335586335 } ]
Rust
src/main.rs
harksin/noria-mysql
7ece4f4107d453c922b452cbf0d824be7625a8e4
#![feature(box_syntax, box_patterns)] #![feature(nll)] #![feature(try_from)] extern crate arccstr; extern crate chrono; #[macro_use] extern crate clap; extern crate noria; extern crate msql_srv; extern crate nom_sql; #[macro_use] extern crate lazy_static; #[macro_use] extern crate slog; extern crate slog_term; extern crate regex; mod convert; mod rewrite; mod schema; mod backend; mod utils; use msql_srv::MysqlIntermediary; use nom_sql::SelectStatement; use std::collections::HashMap; use std::io::{self, BufReader, BufWriter}; use std::net; use std::sync::atomic::AtomicUsize; use std::sync::{Arc, RwLock}; use std::thread; use schema::Schema; use backend::NoriaBackend; pub fn logger_pls() -> slog::Logger { use slog::Drain; use slog::Logger; use slog_term::term_full; use std::sync::Mutex; Logger::root(Mutex::new(term_full()).fuse(), o!()) } fn main() { use clap::{App, Arg}; let matches = App::new("distributary-mysql") .version("0.0.1") .about("MySQL shim for Noria.") .arg( Arg::with_name("deployment") .long("deployment") .takes_value(true) .required(true) .help("Noria deployment ID to attach to."), ).arg( Arg::with_name("zk_addr") .long("zookeeper-address") .short("z") .default_value("127.0.0.1:2181") .help("IP:PORT for Zookeeper."), ).arg( Arg::with_name("port") .long("port") .short("p") .default_value("3306") .takes_value(true) .help("Port to listen on."), ).arg( Arg::with_name("slowlog") .long("log-slow") .help("Log slow queries (> 5ms)"), ).arg( Arg::with_name("no-static-responses") .long("no-static-responses") .takes_value(false) .help("Disable checking for queries requiring static responses. Improves latency."), ).arg( Arg::with_name("no-sanitize") .long("no-sanitize") .takes_value(false) .help("Disable query sanitization. Improves latency."), ).arg(Arg::with_name("verbose").long("verbose").short("v")) .get_matches(); let deployment = matches.value_of("deployment").unwrap().to_owned(); let port = value_t_or_exit!(matches, "port", u16); let slowlog = matches.is_present("slowlog"); let zk_addr = matches.value_of("zk_addr").unwrap().to_owned(); let sanitize = !matches.is_present("no-sanitize"); let static_responses = !matches.is_present("no-static-responses"); let listener = net::TcpListener::bind(format!("127.0.0.1:{}", port)).unwrap(); let log = logger_pls(); info!(log, "listening on port {}", port); let query_counter = Arc::new(AtomicUsize::new(0)); let schemas: Arc<RwLock<HashMap<String, Schema>>> = Arc::default(); let auto_increments: Arc<RwLock<HashMap<String, AtomicUsize>>> = Arc::default(); let query_cache: Arc<RwLock<HashMap<SelectStatement, String>>> = Arc::default(); let mut threads = Vec::new(); let mut i = 0; while let Ok((s, _)) = listener.accept() { s.set_nodelay(true).unwrap(); let builder = thread::Builder::new().name(format!("handler{}", i)); let (schemas, auto_increments, query_cache, query_counter, log) = ( schemas.clone(), auto_increments.clone(), query_cache.clone(), query_counter.clone(), log.clone(), ); let zk_addr = zk_addr.clone(); let deployment = deployment.clone(); let jh = builder .spawn(move || { let b = NoriaBackend::new( &zk_addr, &deployment, schemas, auto_increments, query_cache, query_counter, slowlog, static_responses, sanitize, log, ); let rs = s.try_clone().unwrap(); if let Err(e) = MysqlIntermediary::run_on(b, BufReader::new(rs), BufWriter::new(s)) { match e.kind() { io::ErrorKind::ConnectionReset | io::ErrorKind::BrokenPipe => {} _ => { panic!("{:?}", e); } } } }).unwrap(); threads.push(jh); i += 1; } for t in threads.drain(..) { t.join() .map_err(|e| e.downcast::<io::Error>().unwrap()) .unwrap(); } }
#![feature(box_syntax, box_patterns)] #![feature(nll)] #![feature(try_from)] extern crate arccstr; extern crate chrono; #[macro_use] extern crate clap; extern crate noria; extern crate msql_srv; extern crate nom_sql; #[macro_use] extern crate lazy_static; #[macro_use] extern crate slog; extern crate slog_term; extern crate regex; mod convert; mod rewrite; mod schema; mod backend; mod utils; use msql_srv::MysqlIntermediary; use nom_sql::SelectStatement; use std::collections::HashMap; use std::io::{self, BufReader, BufWriter}; use std::net; use std::sync::atomic::AtomicUsize; use std::sync::{Arc, RwLock}; use std::thread; use schema::Schema; use backend::NoriaBackend; pub fn logger_pls() -> slog::Logger { use slog::Drain; use slog::Logger; use slog_term::term_full; use std::sync::Mutex; Logger::root(Mutex::new(term_full()).fuse(), o!()) } fn main() { use clap::{App, Arg}; let matches = App::new("distributary-mysql") .version("0.0.1") .about("MySQL shim for Noria.") .arg( Arg::with_name("deployment") .long("deployment") .takes_value(true) .required(true) .help("Noria deployment ID to attach to."), ).arg( Arg::with_name("zk_addr") .long("zookeeper-address") .short("z") .default_value("127.0.0.1:2181") .help("IP:PORT for Zookeeper."), ).arg( Arg::with_name("port") .long("port") .short("p") .default_value("3306") .takes_value(true) .help("Port to listen on."), ).arg( Arg::with_name("slowlog") .long("log-slow") .help("Log slow queries (> 5ms)"), ).arg( Arg::with_name("no-static-responses") .long("no-static-responses") .takes_value(false) .help("Disable checking for queries requiring static responses. Improves latency."), ).arg( Arg::with_name("no-sanitize") .long("no-sanitize") .takes_value(false) .help("Disable query sanitization. Improves latency."), ).arg(Arg::with_name("verbose").long("verbose").short("v")) .get_matches(); let deployment = matches.value_of("deployment").unwrap().to_owned(); let port = value_t_or_exit!(matches, "port", u16); let slowlog = matches.is_present("slowlog"); let zk_addr = matches.value_of("zk_addr").unwrap().to_owned(); let sanitize = !matches.is_present("no-sanitiz
e"); let static_responses = !matches.is_present("no-static-responses"); let listener = net::TcpListener::bind(format!("127.0.0.1:{}", port)).unwrap(); let log = logger_pls(); info!(log, "listening on port {}", port); let query_counter = Arc::new(AtomicUsize::new(0)); let schemas: Arc<RwLock<HashMap<String, Schema>>> = Arc::default(); let auto_increments: Arc<RwLock<HashMap<String, AtomicUsize>>> = Arc::default(); let query_cache: Arc<RwLock<HashMap<SelectStatement, String>>> = Arc::default(); let mut threads = Vec::new(); let mut i = 0; while let Ok((s, _)) = listener.accept() { s.set_nodelay(true).unwrap(); let builder = thread::Builder::new().name(format!("handler{}", i)); let (schemas, auto_increments, query_cache, query_counter, log) = ( schemas.clone(), auto_increments.clone(), query_cache.clone(), query_counter.clone(), log.clone(), ); let zk_addr = zk_addr.clone(); let deployment = deployment.clone(); let jh = builder .spawn(move || { let b = NoriaBackend::new( &zk_addr, &deployment, schemas, auto_increments, query_cache, query_counter, slowlog, static_responses, sanitize, log, ); let rs = s.try_clone().unwrap(); if let Err(e) = MysqlIntermediary::run_on(b, BufReader::new(rs), BufWriter::new(s)) { match e.kind() { io::ErrorKind::ConnectionReset | io::ErrorKind::BrokenPipe => {} _ => { panic!("{:?}", e); } } } }).unwrap(); threads.push(jh); i += 1; } for t in threads.drain(..) { t.join() .map_err(|e| e.downcast::<io::Error>().unwrap()) .unwrap(); } }
function_block-function_prefixed
[ { "content": "fn collapse_where_in_recursive(\n\n leftmost_param_index: &mut usize,\n\n expr: &mut ConditionExpression,\n\n rewrite_literals: bool,\n\n) -> Option<(usize, Vec<Literal>)> {\n\n match *expr {\n\n ConditionExpression::Base(ConditionBase::Literal(Literal::Placeholder)) => {\n\n *leftmost_param_index += 1;\n\n None\n\n }\n\n ConditionExpression::Base(ConditionBase::NestedSelect(ref mut sq)) => {\n\n if let Some(ref mut w) = sq.where_clause {\n\n collapse_where_in_recursive(leftmost_param_index, w, rewrite_literals)\n\n } else {\n\n None\n\n }\n\n }\n\n ConditionExpression::Base(ConditionBase::LiteralList(ref list)) => {\n\n *leftmost_param_index += list.iter().filter(|&l| *l == Literal::Placeholder).count();\n\n None\n", "file_path": "src/rewrite.rs", "rank": 2, "score": 53806.4077404141 }, { "content": "// Helper for flatten_conditional - returns true if the\n\n// expression is \"valid\" (i.e. not something like `a = 1 AND a = 2`.\n\n// Goes through the condition tree by gradually filling up primary key slots.\n\n//\n\n// Example:\n\n// (CREATE TABLE A (aid int, uid int, PRIMARY KEY(aid, uid))\n\n// `WHERE aid = 1 AND uid = 2` has the following tree:\n\n//\n\n// +--+ AND +--+\n\n// | |\n\n// + +\n\n// aid = 1 uid = 2\n\n//\n\n// After processing the left side `flattened` will look something like this: {[(aid, 1)]}\n\n// Then we'll check the right side, which will find a \"hole\" in the first key,\n\n// and we'll get {[(aid, 1), (uid, 2)]}.\n\nfn do_flatten_conditional(\n\n cond: &ConditionExpression,\n\n pkey: &Vec<&Column>,\n\n mut flattened: &mut HashSet<Vec<(String, DataType)>>,\n\n) -> bool {\n\n match *cond {\n\n ConditionExpression::ComparisonOp(ConditionTree {\n\n left: box ConditionExpression::Base(ConditionBase::Literal(ref l)),\n\n right: box ConditionExpression::Base(ConditionBase::Field(ref c)),\n\n operator: Operator::Equal,\n\n })\n\n | ConditionExpression::ComparisonOp(ConditionTree {\n\n left: box ConditionExpression::Base(ConditionBase::Field(ref c)),\n\n right: box ConditionExpression::Base(ConditionBase::Literal(ref l)),\n\n operator: Operator::Equal,\n\n }) => {\n\n if !pkey.contains(&c) {\n\n panic!(\"UPDATE/DELETE only supports WHERE-clauses on primary keys\");\n\n }\n\n\n", "file_path": "src/utils.rs", "rank": 3, "score": 53248.49322989989 }, { "content": "fn walk_update_where(\n\n col2v: &mut HashMap<String, DataType>,\n\n params: &mut Option<<ParamParser as IntoIterator>::IntoIter>,\n\n expr: ConditionExpression,\n\n) {\n\n match expr {\n\n ConditionExpression::ComparisonOp(ConditionTree {\n\n operator: Operator::Equal,\n\n left: box ConditionExpression::Base(ConditionBase::Field(c)),\n\n right: box ConditionExpression::Base(ConditionBase::Literal(l)),\n\n }) => {\n\n let v = match l {\n\n Literal::Placeholder => params\n\n .as_mut()\n\n .expect(\"Found placeholder in ad-hoc query\")\n\n .next()\n\n .map(|pv| pv.value.to_datatype())\n\n .expect(\"Not enough parameter values given in EXECUTE\"),\n\n v => DataType::from(v),\n\n };\n", "file_path": "src/utils.rs", "rank": 4, "score": 53246.796758364566 }, { "content": "// Initializes a Noria worker and starts processing MySQL queries against it.\n\nfn setup(deployment: &Deployment) -> mysql::Opts {\n\n let zk_addr = \"127.0.0.1:2181\";\n\n // Run with VERBOSE=1 for log output.\n\n let verbose = match env::var(\"VERBOSE\") {\n\n Ok(value) => {\n\n let i: u32 = value.parse().unwrap();\n\n i == 1\n\n }\n\n Err(_) => false,\n\n };\n\n\n\n let logger = if verbose {\n\n noria_server::logger_pls()\n\n } else {\n\n slog::Logger::root(slog::Discard, o!())\n\n };\n\n\n\n let l = logger.clone();\n\n let n = deployment.name.clone();\n\n thread::spawn(move || {\n", "file_path": "tests/integration.rs", "rank": 5, "score": 49176.78878145406 }, { "content": "struct NoriaBackendInner {\n\n noria: ControllerHandle<ZookeeperAuthority>,\n\n inputs: BTreeMap<String, Table>,\n\n outputs: BTreeMap<String, View>,\n\n}\n\n\n\nimpl NoriaBackendInner {\n\n fn new(zk_addr: &str, deployment: &str, log: &slog::Logger) -> Self {\n\n let mut zk_auth = ZookeeperAuthority::new(&format!(\"{}/{}\", zk_addr, deployment)).unwrap();\n\n zk_auth.log_with(log.clone());\n\n\n\n debug!(log, \"Connecting to Noria...\",);\n\n let mut ch = ControllerHandle::new(zk_auth).unwrap();\n\n\n\n let b = NoriaBackendInner {\n\n inputs: ch\n\n .inputs()\n\n .expect(\"couldn't get inputs from Noria\")\n\n .into_iter()\n\n .map(|(n, _)| (n.clone(), ch.table(&n).unwrap()))\n", "file_path": "src/backend.rs", "rank": 6, "score": 46442.82953654594 }, { "content": "fn sleep() {\n\n thread::sleep(Duration::from_millis(200));\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 7, "score": 34463.97373051017 }, { "content": "fn get_parameter_columns_recurse(cond: &ConditionExpression) -> Vec<&Column> {\n\n match *cond {\n\n ConditionExpression::ComparisonOp(ConditionTree {\n\n left: box ConditionExpression::Base(ConditionBase::Field(ref c)),\n\n right: box ConditionExpression::Base(ConditionBase::Literal(Literal::Placeholder)),\n\n operator: Operator::Equal,\n\n })\n\n | ConditionExpression::ComparisonOp(ConditionTree {\n\n left: box ConditionExpression::Base(ConditionBase::Literal(Literal::Placeholder)),\n\n right: box ConditionExpression::Base(ConditionBase::Field(ref c)),\n\n operator: Operator::Equal,\n\n }) => vec![c],\n\n ConditionExpression::ComparisonOp(ConditionTree {\n\n left: box ConditionExpression::Base(ConditionBase::Field(ref c)),\n\n right: box ConditionExpression::Base(ConditionBase::LiteralList(ref literals)),\n\n operator: Operator::In,\n\n })\n\n if (|| literals.iter().all(|l| *l == Literal::Placeholder))() =>\n\n {\n\n // the weird extra closure above is due to\n", "file_path": "src/utils.rs", "rank": 8, "score": 33195.803472320666 }, { "content": "#[test]\n\nfn delete_other_column() {\n\n let d = Deployment::new(\"delete_other_column\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"DELETE FROM Cats WHERE Cats.id = 1 OR Cats.name = \\\"Bob\\\"\")\n\n .unwrap_err();\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 9, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn delete_basic() {\n\n let d = Deployment::new(\"delete_basic\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Cats (id) VALUES (1)\").unwrap();\n\n sleep();\n\n\n\n let row = conn\n\n .query(\"SELECT Cats.id FROM Cats WHERE Cats.id = 1\")\n\n .unwrap()\n\n .next();\n\n assert!(row.is_some());\n\n\n\n {\n\n let deleted = conn.query(\"DELETE FROM Cats WHERE Cats.id = 1\").unwrap();\n\n assert_eq!(deleted.affected_rows(), 1);\n\n sleep();\n\n }\n\n\n\n let row = conn\n\n .query(\"SELECT Cats.id FROM Cats WHERE Cats.id = 1\")\n\n .unwrap()\n\n .next();\n\n assert!(row.is_none());\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 10, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn delete_no_keys() {\n\n let d = Deployment::new(\"delete_no_keys\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"DELETE FROM Cats WHERE 1 = 1\").unwrap_err();\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 11, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn update_separate() {\n\n let d = Deployment::new(\"update_separate\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Cats (id, name) VALUES (1, \\\"Bob\\\")\")\n\n .unwrap();\n\n sleep();\n\n\n\n {\n\n let updated = conn\n\n .query(\"UPDATE Cats SET Cats.name = \\\"Rusty\\\" WHERE Cats.id = 1\")\n\n .unwrap();\n\n assert_eq!(updated.affected_rows(), 1);\n\n sleep();\n\n }\n\n\n", "file_path": "tests/integration.rs", "rank": 12, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn update_other_column() {\n\n let d = Deployment::new(\"update_no_keys\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n let query = \"UPDATE Cats SET Cats.name = \\\"Rusty\\\" WHERE Cats.name = \\\"Bob\\\"\";\n\n conn.query(query).unwrap_err();\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 13, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn update_bogus() {\n\n let d = Deployment::new(\"update_bogus\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Cats (id, name) VALUES (1, \\\"Bob\\\")\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"UPDATE Cats SET Cats.name = \\\"Rusty\\\" WHERE Cats.id = 1 AND Cats.id = 2\")\n\n .unwrap_err();\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 14, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn update_pkey() {\n\n let d = Deployment::new(\"update_pkey\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Cats (id, name) VALUES (1, \\\"Bob\\\")\")\n\n .unwrap();\n\n sleep();\n\n\n\n {\n\n let query = \"UPDATE Cats SET Cats.name = \\\"Rusty\\\", Cats.id = 10 WHERE Cats.id = 1\";\n\n let updated = conn.query(query).unwrap();\n\n assert_eq!(updated.affected_rows(), 1);\n\n sleep();\n\n }\n\n\n\n let name: String = conn\n", "file_path": "tests/integration.rs", "rank": 15, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn update_no_keys() {\n\n let d = Deployment::new(\"update_no_keys\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n let query = \"UPDATE Cats SET Cats.name = \\\"Rusty\\\" WHERE 1 = 1\";\n\n conn.query(query).unwrap_err();\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 16, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn update_basic() {\n\n let d = Deployment::new(\"update_basic\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Cats (id, name) VALUES (1, \\\"Bob\\\")\")\n\n .unwrap();\n\n sleep();\n\n\n\n {\n\n let updated = conn\n\n .query(\"UPDATE Cats SET Cats.name = \\\"Rusty\\\" WHERE Cats.id = 1\")\n\n .unwrap();\n\n assert_eq!(updated.affected_rows(), 1);\n\n sleep();\n\n }\n\n\n\n let name: String = conn\n\n .first(\"SELECT Cats.name FROM Cats WHERE Cats.id = 1\")\n\n .unwrap()\n\n .unwrap();\n\n assert_eq!(name, String::from(\"\\\"Rusty\\\"\"));\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 17, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn delete_only_constraint() {\n\n let d = Deployment::new(\"delete_only_constraint\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n // Note that this doesn't have `id int PRIMARY KEY` like the other tests:\n\n conn.query(\"CREATE TABLE Cats (id int, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Cats (id, name) VALUES (1, \\\"Bob\\\")\")\n\n .unwrap();\n\n sleep();\n\n\n\n {\n\n let deleted = conn.query(\"DELETE FROM Cats WHERE Cats.id = 1\").unwrap();\n\n assert_eq!(deleted.affected_rows(), 1);\n\n sleep();\n\n }\n\n\n\n let row = conn\n\n .query(\"SELECT Cats.id FROM Cats WHERE Cats.id = 1\")\n\n .unwrap()\n\n .next();\n\n assert!(row.is_none());\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 18, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn update_only_constraint() {\n\n let d = Deployment::new(\"update_only_constraint\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n // Note that this doesn't have `id int PRIMARY KEY` like the other tests:\n\n conn.query(\"CREATE TABLE Cats (id int, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Cats (id, name) VALUES (1, \\\"Bob\\\")\")\n\n .unwrap();\n\n sleep();\n\n\n\n {\n\n let updated = conn\n\n .query(\"UPDATE Cats SET Cats.name = \\\"Rusty\\\" WHERE Cats.id = 1\")\n\n .unwrap();\n\n assert_eq!(updated.affected_rows(), 1);\n\n sleep();\n\n }\n\n\n\n let name: String = conn\n\n .first(\"SELECT Cats.name FROM Cats WHERE Cats.id = 1\")\n\n .unwrap()\n\n .unwrap();\n\n assert_eq!(name, String::from(\"\\\"Rusty\\\"\"));\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 19, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn select_collapse_where_in() {\n\n let d = Deployment::new(\"collapsed_where\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Cats (id, name) VALUES (1, \\\"Bob\\\")\")\n\n .unwrap();\n\n conn.query(\"INSERT INTO Cats (id, name) VALUES (2, \\\"Jane\\\")\")\n\n .unwrap();\n\n sleep();\n\n\n\n let names: Vec<String> = conn\n\n .query(\"SELECT Cats.name FROM Cats WHERE Cats.id IN (1, 2)\")\n\n .unwrap()\n\n .map(|row| row.unwrap().take::<String, _>(0).unwrap())\n\n .collect();\n\n assert_eq!(names.len(), 2);\n", "file_path": "tests/integration.rs", "rank": 20, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn delete_multiple() {\n\n let d = Deployment::new(\"delete_multiple\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n for i in 1..4 {\n\n conn.query(format!(\"INSERT INTO Cats (id) VALUES ({})\", i))\n\n .unwrap();\n\n sleep();\n\n }\n\n\n\n {\n\n let deleted = conn\n\n .query(\"DELETE FROM Cats WHERE Cats.id = 1 OR Cats.id = 2\")\n\n .unwrap();\n\n assert_eq!(deleted.affected_rows(), 2);\n\n sleep();\n", "file_path": "tests/integration.rs", "rank": 21, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn delete_bogus() {\n\n let d = Deployment::new(\"delete_bogus\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n // `id` can't be both 1 and 2!\n\n let deleted = conn\n\n .query(\"DELETE FROM Cats WHERE Cats.id = 1 AND Cats.id = 2\")\n\n .unwrap();\n\n assert_eq!(deleted.affected_rows(), 0);\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 22, "score": 32765.931830124966 }, { "content": "#[test]\n\n#[ignore]\n\nfn update_no_changes() {\n\n // ignored because we currently *always* return 1 row(s) affected.\n\n\n\n let d = Deployment::new(\"update_no_changes\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Cats (id, name) VALUES (1, \\\"Bob\\\")\")\n\n .unwrap();\n\n sleep();\n\n\n\n let updated = conn\n\n .query(\"UPDATE Cats SET Cats.name = \\\"Bob\\\" WHERE Cats.id = 1\")\n\n .unwrap();\n\n assert_eq!(updated.affected_rows(), 0);\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 23, "score": 32765.931830124966 }, { "content": "#[test]\n\nfn update_basic_prepared() {\n\n let d = Deployment::new(\"update_basic\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, name VARCHAR(255), PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Cats (id, name) VALUES (1, \\\"Bob\\\")\")\n\n .unwrap();\n\n sleep();\n\n\n\n {\n\n let updated = conn\n\n .prep_exec(\n\n \"UPDATE Cats SET Cats.name = \\\"Rusty\\\" WHERE Cats.id = ?\",\n\n (1,),\n\n ).unwrap();\n\n assert_eq!(updated.affected_rows(), 1);\n\n sleep();\n", "file_path": "tests/integration.rs", "rank": 24, "score": 31307.5515858839 }, { "content": "#[test]\n\nfn delete_bogus_valid_and() {\n\n let d = Deployment::new(\"delete_bogus_valid_and\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Cats (id) VALUES (1)\").unwrap();\n\n sleep();\n\n\n\n let row = conn\n\n .query(\"SELECT Cats.id FROM Cats WHERE Cats.id = 1\")\n\n .unwrap()\n\n .next();\n\n assert!(row.is_some());\n\n\n\n {\n\n // Not that it makes much sense, but we should support this regardless...\n\n let deleted = conn\n", "file_path": "tests/integration.rs", "rank": 25, "score": 31307.5515858839 }, { "content": "#[test]\n\nfn delete_bogus_valid_or() {\n\n let d = Deployment::new(\"delete_bogus_valid_or\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Cats (id int PRIMARY KEY, PRIMARY KEY(id))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Cats (id) VALUES (1)\").unwrap();\n\n sleep();\n\n\n\n let row = conn\n\n .query(\"SELECT Cats.id FROM Cats WHERE Cats.id = 1\")\n\n .unwrap()\n\n .next();\n\n assert!(row.is_some());\n\n\n\n {\n\n // Not that it makes much sense, but we should support this regardless...\n\n let deleted = conn\n", "file_path": "tests/integration.rs", "rank": 26, "score": 31307.5515858839 }, { "content": "#[test]\n\nfn update_compound_primary_key() {\n\n let d = Deployment::new(\"update_compound_primary_key\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Vote (aid int, uid int, reason VARCHAR(255), PRIMARY KEY(aid, uid))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Vote (aid, uid, reason) VALUES (1, 2, \\\"okay\\\")\")\n\n .unwrap();\n\n conn.query(\"INSERT INTO Vote (aid, uid, reason) VALUES (1, 3, \\\"still okay\\\")\")\n\n .unwrap();\n\n sleep();\n\n\n\n {\n\n let q = \"UPDATE Vote SET Vote.reason = \\\"better\\\" WHERE Vote.aid = 1 AND Vote.uid = 2\";\n\n let updated = conn.query(q).unwrap();\n\n assert_eq!(updated.affected_rows(), 1);\n\n sleep();\n\n }\n\n\n\n let q = \"SELECT Vote.reason FROM Vote WHERE Vote.aid = 1 AND Vote.uid = 2\";\n\n let name: String = conn.first(q).unwrap().unwrap();\n\n assert_eq!(name, String::from(\"\\\"better\\\"\"));\n\n\n\n let q = \"SELECT Vote.reason FROM Vote WHERE Vote.aid = 1 AND Vote.uid = 3\";\n\n let name: String = conn.first(q).unwrap().unwrap();\n\n assert_eq!(name, String::from(\"\\\"still okay\\\"\"));\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 27, "score": 30041.43883053317 }, { "content": "#[test]\n\nfn delete_compound_primary_key() {\n\n let d = Deployment::new(\"delete_compound_primary_key\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Vote (aid int, uid int, reason VARCHAR(255), PRIMARY KEY(aid, uid))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Vote (aid, uid) VALUES (1, 2)\")\n\n .unwrap();\n\n conn.query(\"INSERT INTO Vote (aid, uid) VALUES (1, 3)\")\n\n .unwrap();\n\n sleep();\n\n\n\n {\n\n let q = \"DELETE FROM Vote WHERE Vote.aid = 1 AND Vote.uid = 2\";\n\n let deleted = conn.query(q).unwrap();\n\n assert_eq!(deleted.affected_rows(), 1);\n\n sleep();\n\n }\n\n\n\n let q = \"SELECT Vote.uid FROM Vote WHERE Vote.aid = 1 AND Vote.uid = 2\";\n\n let row = conn.query(q).unwrap().next();\n\n assert!(row.is_none());\n\n\n\n let q = \"SELECT Vote.uid FROM Vote WHERE Vote.aid = 1 AND Vote.uid = 3\";\n\n let uid: i32 = conn.first(q).unwrap().unwrap();\n\n assert_eq!(uid, 3);\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 28, "score": 30041.43883053317 }, { "content": "#[test]\n\nfn delete_multi_compound_primary_key() {\n\n let d = Deployment::new(\"delete_multi_compound_primary_key\");\n\n let opts = setup(&d);\n\n let mut conn = mysql::Conn::new(opts).unwrap();\n\n conn.query(\"CREATE TABLE Vote (aid int, uid int, reason VARCHAR(255), PRIMARY KEY(aid, uid))\")\n\n .unwrap();\n\n sleep();\n\n\n\n conn.query(\"INSERT INTO Vote (aid, uid) VALUES (1, 2)\")\n\n .unwrap();\n\n conn.query(\"INSERT INTO Vote (aid, uid) VALUES (1, 3)\")\n\n .unwrap();\n\n sleep();\n\n\n\n {\n\n let q = \"DELETE FROM Vote WHERE (Vote.aid = 1 AND Vote.uid = 2) OR (Vote.aid = 1 AND Vote.uid = 3)\";\n\n let deleted = conn.query(q).unwrap();\n\n assert_eq!(deleted.affected_rows(), 2);\n\n sleep();\n\n }\n\n\n\n for _ in 2..4 {\n\n let q = \"SELECT Vote.uid FROM Vote WHERE Vote.aid = 1 AND Vote.uid = 2\";\n\n let row = conn.query(q).unwrap().next();\n\n assert!(row.is_none());\n\n }\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 29, "score": 28931.923135032033 }, { "content": "use chrono;\n\nuse noria::DataType;\n\nuse msql_srv::{Value, ValueInner};\n\nuse nom_sql::{Literal, Real};\n\n\n\nuse arccstr::ArcCStr;\n\nuse std::convert::TryFrom;\n\n\n\npub(crate) trait ToDataType {\n\n fn to_datatype(self) -> DataType;\n\n}\n\n\n\nconst TINYTEXT_WIDTH: usize = 15;\n\n\n\nimpl ToDataType for Literal {\n\n fn to_datatype(self) -> DataType {\n\n match self {\n\n Literal::Null => DataType::None,\n\n Literal::String(b) => b.into(),\n\n Literal::Blob(b) => {\n", "file_path": "src/convert.rs", "rank": 30, "score": 25231.917046276038 }, { "content": " Literal::CurrentTime | Literal::CurrentTimestamp => {\n\n DataType::Timestamp(chrono::Local::now().naive_local())\n\n }\n\n Literal::Placeholder => unreachable!(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> ToDataType for Value<'a> {\n\n fn to_datatype(self) -> DataType {\n\n match self.into_inner() {\n\n ValueInner::NULL => DataType::None,\n\n ValueInner::Bytes(b) => DataType::Text(ArcCStr::try_from(b).unwrap()),\n\n ValueInner::Int(i) => i.into(),\n\n ValueInner::UInt(i) => (i as i32).into(),\n\n ValueInner::Double(f) => f.into(),\n\n ValueInner::Datetime(_) => DataType::Timestamp(self.into()),\n\n _ => unimplemented!(),\n\n }\n\n }\n\n}\n", "file_path": "src/convert.rs", "rank": 31, "score": 25219.375822331538 }, { "content": " let len = b.len();\n\n if len <= TINYTEXT_WIDTH {\n\n let mut bytes = [0; TINYTEXT_WIDTH];\n\n if len != 0 {\n\n let bts = &mut bytes[0..len];\n\n bts.copy_from_slice(&b);\n\n }\n\n DataType::TinyText(bytes)\n\n } else {\n\n DataType::Text(ArcCStr::try_from(&b[..]).unwrap())\n\n }\n\n }\n\n Literal::Integer(i) => i.into(),\n\n Literal::FixedPoint(Real {\n\n integral,\n\n fractional,\n\n }) => DataType::Real(integral as i64, fractional as i32),\n\n Literal::CurrentDate => {\n\n DataType::Timestamp(chrono::Local::today().and_hms(0, 0, 0).naive_local())\n\n }\n", "file_path": "src/convert.rs", "rank": 32, "score": 25217.89356377187 }, { "content": "use msql_srv;\n\nuse nom_sql::{\n\n self, ColumnConstraint, CreateTableStatement, CreateViewStatement, FieldDefinitionExpression,\n\n FieldValueExpression, InsertStatement, Literal, SelectSpecification, SelectStatement, SqlQuery,\n\n SqlType,\n\n};\n\n\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug)]\n\npub enum Schema {\n\n Table(CreateTableStatement),\n\n View(CreateViewStatement),\n\n}\n\n\n\n#[allow(dead_code)]\n\npub(crate) fn schema_for_query(\n\n schemas: &HashMap<String, Schema>,\n\n q: &SqlQuery,\n\n) -> Vec<msql_srv::Column> {\n", "file_path": "src/schema.rs", "rank": 40, "score": 25011.55294414004 }, { "content": " match *q {\n\n SqlQuery::Select(ref q) => schema_for_select(schemas, q),\n\n SqlQuery::Insert(ref q) => schema_for_insert(schemas, q),\n\n\n\n _ => unimplemented!(),\n\n }\n\n}\n\n\n\npub(crate) fn schema_for_insert(\n\n schemas: &HashMap<String, Schema>,\n\n q: &InsertStatement,\n\n) -> Vec<msql_srv::Column> {\n\n let mut schema = Vec::new();\n\n for c in q.fields.as_ref().unwrap() {\n\n // XXX(malte): ewww the hackery\n\n let mut cc = c.clone();\n\n cc.table = Some(q.table.name.clone());\n\n schema.push(schema_for_column(schemas, &cc));\n\n }\n\n schema\n", "file_path": "src/schema.rs", "rank": 41, "score": 25010.501410201643 }, { "content": " }\n\n schema\n\n}\n\n\n\npub(crate) fn schema_for_column(\n\n schemas: &HashMap<String, Schema>,\n\n c: &nom_sql::Column,\n\n) -> msql_srv::Column {\n\n let get_fields = |sq: &SelectStatement| {\n\n sq.fields\n\n .iter()\n\n .filter_map(|f| match f {\n\n FieldDefinitionExpression::Col(ref cc) => if cc.name == c.name\n\n || (cc.alias.is_some() && &c.name == cc.alias.as_ref().unwrap())\n\n {\n\n let mut outc = schema_for_column(schemas, cc);\n\n // don't use the source table name\n\n outc.column = c.name.to_owned();\n\n Some(outc)\n\n } else {\n", "file_path": "src/schema.rs", "rank": 42, "score": 25009.324429443637 }, { "content": "}\n\n\n\npub(crate) fn schema_for_select(\n\n table_schemas: &HashMap<String, Schema>,\n\n q: &SelectStatement,\n\n) -> Vec<msql_srv::Column> {\n\n let mut schema = Vec::new();\n\n for fe in &q.fields {\n\n match *fe {\n\n nom_sql::FieldDefinitionExpression::Col(ref c) => {\n\n schema.push(schema_for_column(table_schemas, &c));\n\n }\n\n nom_sql::FieldDefinitionExpression::Value(FieldValueExpression::Literal(ref le)) => {\n\n schema.push(msql_srv::Column {\n\n table: \"\".to_owned(),\n\n column: match le.alias {\n\n Some(ref a) => a.to_owned(),\n\n None => le.value.to_string(),\n\n },\n\n coltype: match le.value {\n", "file_path": "src/schema.rs", "rank": 43, "score": 25008.537427782547 }, { "content": " }).next()\n\n .expect(&format!(\"column {} not found\", c.name))\n\n };\n\n\n\n if let Some(ref table) = c.table {\n\n match schemas\n\n .get(table)\n\n .expect(&format!(\"Table/view {} not found!\", table))\n\n {\n\n Schema::Table(CreateTableStatement { ref fields, .. }) => {\n\n let col_schema = fields\n\n .iter()\n\n .find(|cc| cc.column.name == c.name)\n\n .expect(&format!(\"column {} not found\", c.name));\n\n\n\n assert_eq!(col_schema.column.name, c.name);\n\n\n\n msql_srv::Column {\n\n table: table.clone(),\n\n column: c.name.clone(),\n", "file_path": "src/schema.rs", "rank": 44, "score": 25004.046746281096 }, { "content": " }\n\n ColumnConstraint::NotNull => {\n\n flags |= msql_srv::ColumnFlags::NOT_NULL_FLAG;\n\n }\n\n ColumnConstraint::PrimaryKey => {\n\n flags |= msql_srv::ColumnFlags::PRI_KEY_FLAG;\n\n }\n\n ColumnConstraint::Unique => {\n\n flags |= msql_srv::ColumnFlags::UNIQUE_KEY_FLAG;\n\n }\n\n _ => (),\n\n }\n\n }\n\n flags\n\n },\n\n }\n\n }\n\n Schema::View(CreateViewStatement { ref definition, .. }) => match **definition {\n\n SelectSpecification::Simple(ref sq) => get_fields(sq),\n\n SelectSpecification::Compound(ref csq) => get_fields(&csq.selects[0].1),\n", "file_path": "src/schema.rs", "rank": 45, "score": 25002.404277480186 }, { "content": " coltype: match col_schema.sql_type {\n\n SqlType::Mediumtext => msql_srv::ColumnType::MYSQL_TYPE_VAR_STRING,\n\n SqlType::Longtext => msql_srv::ColumnType::MYSQL_TYPE_BLOB,\n\n SqlType::Text => msql_srv::ColumnType::MYSQL_TYPE_STRING,\n\n SqlType::Varchar(_) => msql_srv::ColumnType::MYSQL_TYPE_VAR_STRING,\n\n SqlType::Int(_) => msql_srv::ColumnType::MYSQL_TYPE_LONG,\n\n SqlType::Bigint(_) => msql_srv::ColumnType::MYSQL_TYPE_LONGLONG,\n\n SqlType::Tinyint(_) => msql_srv::ColumnType::MYSQL_TYPE_TINY,\n\n SqlType::Bool => msql_srv::ColumnType::MYSQL_TYPE_BIT,\n\n SqlType::DateTime => msql_srv::ColumnType::MYSQL_TYPE_DATETIME,\n\n SqlType::Float => msql_srv::ColumnType::MYSQL_TYPE_DOUBLE,\n\n SqlType::Decimal(_, _) => msql_srv::ColumnType::MYSQL_TYPE_DECIMAL,\n\n _ => unimplemented!(),\n\n },\n\n colflags: {\n\n let mut flags = msql_srv::ColumnFlags::empty();\n\n for c in &col_schema.constraints {\n\n match *c {\n\n ColumnConstraint::AutoIncrement => {\n\n flags |= msql_srv::ColumnFlags::AUTO_INCREMENT_FLAG;\n", "file_path": "src/schema.rs", "rank": 46, "score": 25002.211430736315 }, { "content": " Literal::Integer(_) => msql_srv::ColumnType::MYSQL_TYPE_LONG,\n\n Literal::String(_) => msql_srv::ColumnType::MYSQL_TYPE_VAR_STRING,\n\n _ => unimplemented!(),\n\n },\n\n colflags: msql_srv::ColumnFlags::empty(),\n\n })\n\n }\n\n nom_sql::FieldDefinitionExpression::Value(FieldValueExpression::Arithmetic(ref ae)) => {\n\n schema.push(msql_srv::Column {\n\n table: \"\".to_owned(),\n\n column: match ae.alias {\n\n Some(ref a) => a.to_owned(),\n\n None => format!(\"{}\", ae),\n\n },\n\n coltype: msql_srv::ColumnType::MYSQL_TYPE_LONG,\n\n colflags: msql_srv::ColumnFlags::empty(),\n\n })\n\n }\n\n _ => unimplemented!(),\n\n }\n", "file_path": "src/schema.rs", "rank": 47, "score": 25002.160066634806 }, { "content": " None\n\n },\n\n FieldDefinitionExpression::Value(ref v) => {\n\n let alias = match *v {\n\n FieldValueExpression::Arithmetic(ref a) => &a.alias,\n\n FieldValueExpression::Literal(ref l) => &l.alias,\n\n };\n\n if let Some(a) = alias {\n\n if a == &c.name {\n\n return Some(msql_srv::Column {\n\n table: \"\".to_owned(),\n\n column: a.to_owned(),\n\n coltype: msql_srv::ColumnType::MYSQL_TYPE_LONG,\n\n colflags: msql_srv::ColumnFlags::empty(),\n\n });\n\n }\n\n }\n\n None\n\n }\n\n _ => None,\n", "file_path": "src/schema.rs", "rank": 48, "score": 25001.16174219861 }, { "content": " },\n\n }\n\n } else {\n\n msql_srv::Column {\n\n table: \"\".into(),\n\n column: c.name.clone(),\n\n coltype: msql_srv::ColumnType::MYSQL_TYPE_LONG,\n\n colflags: msql_srv::ColumnFlags::empty(),\n\n }\n\n }\n\n}\n", "file_path": "src/schema.rs", "rank": 49, "score": 24999.35132552946 }, { "content": "use nom_sql::{\n\n Column, ConditionBase, ConditionExpression, ConditionTree, CreateTableStatement,\n\n CreateViewStatement, FieldDefinitionExpression, FieldValueExpression, Literal, Operator,\n\n SelectSpecification, SelectStatement, SqlQuery,\n\n};\n\n\n\nuse std::collections::HashMap;\n\nuse std::mem;\n\n\n\nuse schema::Schema;\n\n\n\npub(crate) fn expand_stars(sq: &mut SelectStatement, table_schemas: &HashMap<String, Schema>) {\n\n let do_expand_select = |sq: &SelectStatement, table_name: &str| {\n\n sq.fields\n\n .iter()\n\n .map(|ref f| match *f {\n\n FieldDefinitionExpression::Col(ref c) => FieldDefinitionExpression::Col(Column {\n\n table: Some(table_name.to_owned()),\n\n name: c.alias.as_ref().unwrap_or(&c.name).to_owned(),\n\n alias: None,\n", "file_path": "src/rewrite.rs", "rank": 50, "score": 24817.94876696678 }, { "content": " );\n\n\n\n if literals.is_empty() {\n\n eprintln!(\"spotted empty WHERE IN ()\");\n\n }\n\n\n\n Some((*leftmost_param_index, literals))\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn collapse_where_in(\n\n query: &mut SqlQuery,\n\n rewrite_literals: bool,\n\n) -> Option<(usize, Vec<Literal>)> {\n\n if let SqlQuery::Select(ref mut sq) = *query {\n\n if let Some(ref mut w) = sq.where_clause {\n\n let mut left_edge = 0;\n\n return collapse_where_in_recursive(&mut left_edge, w, rewrite_literals);\n\n }\n", "file_path": "src/rewrite.rs", "rank": 51, "score": 24817.48292124485 }, { "content": " }\n\n None\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use nom_sql;\n\n\n\n #[test]\n\n fn collapsed_where_placeholders() {\n\n let mut q = nom_sql::parse_query(\"SELECT * FROM x WHERE x.y IN (?, ?, ?)\").unwrap();\n\n let rewritten = collapse_where_in(&mut q, false).unwrap();\n\n assert_eq!(rewritten.0, 0);\n\n assert_eq!(rewritten.1.len(), 3);\n\n assert_eq!(\n\n q,\n\n nom_sql::parse_query(\"SELECT * FROM x WHERE x.y = ?\").unwrap()\n\n );\n\n\n", "file_path": "src/rewrite.rs", "rank": 52, "score": 24815.12358353571 }, { "content": " Schema::Table(CreateTableStatement { ref fields, .. }) => fields\n\n .iter()\n\n .cloned()\n\n .map(move |f| {\n\n FieldDefinitionExpression::Col(Column {\n\n table: Some(table_name.to_owned()),\n\n name: f.column.name.clone(),\n\n alias: None,\n\n function: None,\n\n })\n\n }).collect::<Vec<_>>(),\n\n Schema::View(CreateViewStatement {\n\n ref name,\n\n ref definition,\n\n ..\n\n }) => match **definition {\n\n SelectSpecification::Compound(ref csq) => {\n\n // use the first select's columns\n\n do_expand_select(&csq.selects[0].1, name)\n\n }\n", "file_path": "src/rewrite.rs", "rank": 53, "score": 24813.30244937777 }, { "content": " }\n\n ConditionExpression::Base(_) => None,\n\n ConditionExpression::NegationOp(ref mut ce)\n\n | ConditionExpression::Bracketed(ref mut ce) => {\n\n collapse_where_in_recursive(leftmost_param_index, ce, rewrite_literals)\n\n }\n\n ConditionExpression::LogicalOp(ref mut ct) => {\n\n collapse_where_in_recursive(leftmost_param_index, &mut *ct.left, rewrite_literals)\n\n .or_else(|| {\n\n // we can't also try rewriting ct.right, as it'd make it hard to recover\n\n // literals: if we rewrote WHERE x IN (a, b) in left and WHERE y IN (1, 2) in\n\n // right into WHERE x = ? ... y = ?, then what param values should we use?\n\n collapse_where_in_recursive(\n\n leftmost_param_index,\n\n &mut *ct.right,\n\n rewrite_literals,\n\n )\n\n })\n\n }\n\n ConditionExpression::ComparisonOp(ref mut ct) if ct.operator != Operator::In => {\n", "file_path": "src/rewrite.rs", "rank": 54, "score": 24812.104031486095 }, { "content": " function: None,\n\n }),\n\n FieldDefinitionExpression::Value(ref v) => FieldDefinitionExpression::Col(Column {\n\n table: Some(table_name.to_owned()),\n\n name: match *v {\n\n FieldValueExpression::Arithmetic(ref a) => &a.alias,\n\n FieldValueExpression::Literal(ref l) => &l.alias,\n\n }.clone()\n\n .unwrap(),\n\n alias: None,\n\n function: None,\n\n }),\n\n _ => unimplemented!(),\n\n }).collect::<Vec<_>>()\n\n };\n\n\n\n let expand_table = |table_name: String| match table_schemas\n\n .get(&table_name)\n\n .expect(&format!(\"table/view named `{}` does not exist\", table_name))\n\n {\n", "file_path": "src/rewrite.rs", "rank": 55, "score": 24811.561220579908 }, { "content": " q,\n\n nom_sql::parse_query(\"SELECT * FROM x WHERE x.y IN (1, 2, 3)\").unwrap()\n\n );\n\n\n\n let mut q = nom_sql::parse_query(\"SELECT * FROM x WHERE x.y IN (1, 2, 3)\").unwrap();\n\n let rewritten = collapse_where_in(&mut q, true).unwrap();\n\n assert_eq!(rewritten.0, 0);\n\n assert_eq!(rewritten.1.len(), 3);\n\n assert_eq!(\n\n q,\n\n nom_sql::parse_query(\"SELECT * FROM x WHERE x.y = ?\").unwrap()\n\n );\n\n\n\n let mut q = nom_sql::parse_query(\"SELECT * FROM x WHERE y IN (1, 2, 3)\").unwrap();\n\n let rewritten = collapse_where_in(&mut q, true).unwrap();\n\n assert_eq!(rewritten.0, 0);\n\n assert_eq!(rewritten.1.len(), 3);\n\n assert_eq!(\n\n q,\n\n nom_sql::parse_query(\"SELECT * FROM x WHERE y = ?\").unwrap()\n", "file_path": "src/rewrite.rs", "rank": 56, "score": 24810.85706127347 }, { "content": " let mut q = nom_sql::parse_query(\"SELECT * FROM x WHERE y IN (?, ?, ?)\").unwrap();\n\n let rewritten = collapse_where_in(&mut q, false).unwrap();\n\n assert_eq!(rewritten.0, 0);\n\n assert_eq!(rewritten.1.len(), 3);\n\n assert_eq!(\n\n q,\n\n nom_sql::parse_query(\"SELECT * FROM x WHERE y = ?\").unwrap()\n\n );\n\n\n\n let mut q = nom_sql::parse_query(\"SELECT * FROM x WHERE AVG(y) IN (?, ?, ?)\").unwrap();\n\n let rewritten = collapse_where_in(&mut q, false).unwrap();\n\n assert_eq!(rewritten.0, 0);\n\n assert_eq!(rewritten.1.len(), 3);\n\n assert_eq!(\n\n q,\n\n nom_sql::parse_query(\"SELECT * FROM x WHERE AVG(y) = ?\").unwrap()\n\n );\n\n\n\n let mut q = nom_sql::parse_query(\"SELECT * FROM t WHERE x = ? AND y IN (?, ?, ?) OR z = ?\")\n\n .unwrap();\n", "file_path": "src/rewrite.rs", "rank": 57, "score": 24810.81747334639 }, { "content": " }\n\n } else {\n\n Vec::new()\n\n };\n\n\n\n if !do_it {\n\n return collapse_where_in_recursive(\n\n leftmost_param_index,\n\n &mut *ct.left,\n\n rewrite_literals,\n\n ).or_else(|| {\n\n collapse_where_in_recursive(\n\n leftmost_param_index,\n\n &mut *ct.right,\n\n rewrite_literals,\n\n )\n\n });\n\n }\n\n\n\n if let ConditionExpression::Base(ConditionBase::Field(_)) = *ct.left {\n", "file_path": "src/rewrite.rs", "rank": 58, "score": 24810.79944632365 }, { "content": " );\n\n\n\n let mut q = nom_sql::parse_query(\"SELECT * FROM x WHERE AVG(y) IN (1, 2, 3)\").unwrap();\n\n let rewritten = collapse_where_in(&mut q, true).unwrap();\n\n assert_eq!(rewritten.0, 0);\n\n assert_eq!(rewritten.1.len(), 3);\n\n assert_eq!(\n\n q,\n\n nom_sql::parse_query(\"SELECT * FROM x WHERE AVG(y) = ?\").unwrap()\n\n );\n\n }\n\n\n\n #[test]\n\n fn noninterference() {\n\n let mut q = nom_sql::parse_query(\"SELECT * FROM x WHERE x.y = 'foo'\").unwrap();\n\n assert_eq!(collapse_where_in(&mut q, true), None);\n\n assert_eq!(\n\n q,\n\n nom_sql::parse_query(\"SELECT * FROM x WHERE x.y = 'foo'\").unwrap()\n\n );\n\n }\n\n}\n", "file_path": "src/rewrite.rs", "rank": 59, "score": 24810.72870873342 }, { "content": " collapse_where_in_recursive(leftmost_param_index, &mut *ct.left, rewrite_literals)\n\n .or_else(|| {\n\n collapse_where_in_recursive(\n\n leftmost_param_index,\n\n &mut *ct.right,\n\n rewrite_literals,\n\n )\n\n })\n\n }\n\n ConditionExpression::ComparisonOp(ref mut ct) => {\n\n let mut do_it = false;\n\n let literals =\n\n if let ConditionExpression::Base(ConditionBase::LiteralList(ref mut list)) =\n\n *ct.right\n\n {\n\n if rewrite_literals || list.iter().all(|l| *l == Literal::Placeholder) {\n\n do_it = true;\n\n mem::replace(list, Vec::new())\n\n } else {\n\n Vec::new()\n", "file_path": "src/rewrite.rs", "rank": 60, "score": 24810.697529216828 }, { "content": "\n\n let mut q = nom_sql::parse_query(\n\n \"SELECT * FROM t WHERE x IN (SELECT * FROM z WHERE b = ? AND a IN (?, ?)) OR z = ?\",\n\n ).unwrap();\n\n let rewritten = collapse_where_in(&mut q, false).unwrap();\n\n assert_eq!(rewritten.0, 1);\n\n assert_eq!(rewritten.1.len(), 2);\n\n assert_eq!(\n\n q,\n\n nom_sql::parse_query(\n\n \"SELECT * FROM t WHERE x IN (SELECT * FROM z WHERE b = ? AND a = ?) OR z = ?\",\n\n ).unwrap()\n\n );\n\n }\n\n\n\n #[test]\n\n fn collapsed_where_literals() {\n\n let mut q = nom_sql::parse_query(\"SELECT * FROM x WHERE x.y IN (1, 2, 3)\").unwrap();\n\n assert_eq!(collapse_where_in(&mut q, false), None);\n\n assert_eq!(\n", "file_path": "src/rewrite.rs", "rank": 61, "score": 24810.554521750757 }, { "content": " let rewritten = collapse_where_in(&mut q, false).unwrap();\n\n assert_eq!(rewritten.0, 1);\n\n assert_eq!(rewritten.1.len(), 3);\n\n assert_eq!(\n\n q,\n\n nom_sql::parse_query(\"SELECT * FROM t WHERE x = ? AND y = ? OR z = ?\").unwrap()\n\n );\n\n\n\n let mut q = nom_sql::parse_query(\n\n \"SELECT * FROM t WHERE x IN (SELECT * FROM z WHERE a = ?) AND y IN (?, ?) OR z = ?\",\n\n ).unwrap();\n\n let rewritten = collapse_where_in(&mut q, false).unwrap();\n\n assert_eq!(rewritten.0, 1);\n\n assert_eq!(rewritten.1.len(), 2);\n\n assert_eq!(\n\n q,\n\n nom_sql::parse_query(\n\n \"SELECT * FROM t WHERE x IN (SELECT * FROM z WHERE a = ?) AND y = ? OR z = ?\"\n\n ).unwrap()\n\n );\n", "file_path": "src/rewrite.rs", "rank": 62, "score": 24810.518527500186 }, { "content": " SelectSpecification::Simple(ref sq) => do_expand_select(sq, name),\n\n },\n\n };\n\n\n\n let old_fields = mem::replace(&mut sq.fields, vec![]);\n\n sq.fields = old_fields\n\n .into_iter()\n\n .flat_map(|field| match field {\n\n FieldDefinitionExpression::All => {\n\n let v: Vec<_> = sq\n\n .tables\n\n .iter()\n\n .map(|t| t.name.clone())\n\n .flat_map(&expand_table)\n\n .collect();\n\n v.into_iter()\n\n }\n\n FieldDefinitionExpression::AllInTable(t) => {\n\n let v: Vec<_> = expand_table(t);\n\n v.into_iter()\n\n }\n\n e @ FieldDefinitionExpression::Value(_) => vec![e].into_iter(),\n\n FieldDefinitionExpression::Col(c) => {\n\n vec![FieldDefinitionExpression::Col(c)].into_iter()\n\n }\n\n }).collect();\n\n}\n\n\n", "file_path": "src/rewrite.rs", "rank": 63, "score": 24809.12183852274 }, { "content": " } else {\n\n unimplemented!();\n\n }\n\n\n\n let c = mem::replace(\n\n &mut ct.left,\n\n Box::new(ConditionExpression::Base(ConditionBase::Literal(\n\n Literal::Placeholder,\n\n ))),\n\n );\n\n\n\n mem::replace(\n\n ct,\n\n ConditionTree {\n\n operator: Operator::Equal,\n\n left: c,\n\n right: Box::new(ConditionExpression::Base(ConditionBase::Literal(\n\n Literal::Placeholder,\n\n ))),\n\n },\n", "file_path": "src/rewrite.rs", "rank": 64, "score": 24807.68483198364 }, { "content": " Regex::new(r\"(?i)select get_lock\\(.*\\) as lockstatus\").unwrap(),\n\n vec![(\"lockstatus\", \"1\")],\n\n ),\n\n (\n\n Regex::new(r\"(?i)select release_lock\\(.*\\) as lockstatus\").unwrap(),\n\n vec![(\"lockstatus\", \"1\")],\n\n ),\n\n ];\n\n pub(crate) static ref COMMENTS: Vec<(Regex, &'static str)> = vec![\n\n (Regex::new(r\"(?s)/\\*.*\\*/\").unwrap(), \"\"),\n\n (Regex::new(r\"--.*\\n\").unwrap(), \"\\n\"),\n\n ];\n\n pub(crate) static ref COLLAPSE_SPACES: (Regex, &'static str) =\n\n (Regex::new(r\" +\").unwrap(), \" \");\n\n}\n\n\n\npub(crate) fn sanitize_query(query: &str) -> String {\n\n let query = Cow::from(query);\n\n for &(ref pattern, replacement) in &*COMMENTS {\n\n pattern.replace_all(&query, replacement);\n", "file_path": "src/utils.rs", "rank": 65, "score": 24165.38528905791 }, { "content": "use std::collections::HashSet;\n\n\n\nuse convert::ToDataType;\n\nuse noria::{DataType, Modification, Operation};\n\nuse msql_srv::ParamParser;\n\nuse nom_sql::{\n\n ArithmeticBase, ArithmeticExpression, ArithmeticOperator, Column, ColumnConstraint,\n\n ConditionBase, ConditionExpression, ConditionTree, CreateTableStatement, FieldValueExpression,\n\n Literal, LiteralExpression, Operator, SqlQuery, TableKey, UpdateStatement,\n\n};\n\nuse regex::Regex;\n\nuse std::borrow::Cow;\n\nuse std::collections::HashMap;\n\n\n\nlazy_static! {\n\n pub(crate) static ref HARD_CODED_REPLIES: Vec<(Regex, Vec<(&'static str, &'static str)>)> = vec![\n\n (\n\n Regex::new(r\"(?i)select version\\(\\) limit 1\").unwrap(),\n\n vec![(\"version()\", \"10.1.26-MariaDB-0+deb9u1\")],\n\n ),\n", "file_path": "src/utils.rs", "rank": 66, "score": 24163.071306316902 }, { "content": "\n\npub(crate) fn get_parameter_columns(query: &SqlQuery) -> Vec<&Column> {\n\n match *query {\n\n SqlQuery::Select(ref query) => {\n\n if let Some(ref wc) = query.where_clause {\n\n get_parameter_columns_recurse(wc)\n\n } else {\n\n vec![]\n\n }\n\n }\n\n SqlQuery::Insert(ref query) => {\n\n assert_eq!(query.data.len(), 1);\n\n // need to find for which fields we *actually* have a parameter\n\n query.data[0]\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, v)| match *v {\n\n Literal::Placeholder => Some(&query.fields.as_ref().unwrap()[i]),\n\n _ => None,\n\n }).collect()\n", "file_path": "src/utils.rs", "rank": 67, "score": 24157.39580034757 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use nom_sql::{self, SqlQuery};\n\n\n\n fn compare_flatten<I>(cond_query: &str, key: Vec<&str>, expected: Option<Vec<Vec<I>>>)\n\n where\n\n I: Into<DataType>,\n\n {\n\n let cond = match nom_sql::parse_query(cond_query).unwrap() {\n\n SqlQuery::Update(u) => u.where_clause.unwrap(),\n\n SqlQuery::Delete(d) => d.where_clause.unwrap(),\n\n _ => unreachable!(),\n\n };\n\n\n\n let pkey: Vec<Column> = key\n\n .into_iter()\n\n .map(|k| Column {\n\n name: String::from(k),\n\n table: Some(String::from(\"T\")),\n", "file_path": "src/utils.rs", "rank": 68, "score": 24157.006570075424 }, { "content": " if key.len() != pkey.len() {\n\n panic!(\"UPDATE/DELETE requires all columns of a compound key to be present\");\n\n }\n\n\n\n key.into_iter().map(|(_c, v)| v).collect()\n\n }).collect();\n\n\n\n Some(keys)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n// Finds the primary for the given table, both by looking at constraints on individual\n\n// columns and by searching through keys.\n\npub(crate) fn get_primary_key(schema: &CreateTableStatement) -> Vec<(usize, &Column)> {\n\n schema\n\n .fields\n\n .iter()\n\n .enumerate()\n", "file_path": "src/utils.rs", "rank": 69, "score": 24156.12310534906 }, { "content": " fn get_schema(query: &str) -> CreateTableStatement {\n\n match nom_sql::parse_query(query).unwrap() {\n\n SqlQuery::CreateTable(c) => c,\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_flatten_conditional() {\n\n compare_flatten(\n\n \"DELETE FROM T WHERE T.a = 1\",\n\n vec![\"a\"],\n\n Some(vec![vec![1]]),\n\n );\n\n compare_flatten(\n\n \"DELETE FROM T WHERE T.a = 1 OR T.a = 2\",\n\n vec![\"a\"],\n\n Some(vec![vec![1], vec![2]]),\n\n );\n\n compare_flatten(\n", "file_path": "src/utils.rs", "rank": 70, "score": 24154.75372694518 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn test_flatten_conditional_partial_key_update() {\n\n compare_flatten(\n\n \"UPDATE T SET T.b = 2 WHERE T.a = 1\",\n\n vec![\"a\", \"b\"],\n\n Some(vec![vec![1]]),\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_parameter_column_extraction() {\n\n let query = \"SELECT `votes`.* FROM `votes` WHERE `votes`.`user_id` = 1 \\\n\n AND `votes`.`story_id` = ? AND `votes`.`comment_id` IS NULL \\\n\n ORDER BY `votes`.`id` ASC LIMIT 1\";\n\n let q = nom_sql::parse_query(query).unwrap();\n\n\n\n let pc = get_parameter_columns(&q);\n\n\n\n assert_eq!(pc, vec![&Column::from(\"votes.story_id\")]);\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 71, "score": 24154.02244200333 }, { "content": " );\n\n\n\n let with_both =\n\n get_schema(\"CREATE TABLE A (other int, id int PRIMARY KEY, PRIMARY KEY (id))\");\n\n assert_eq!(\n\n get_primary_key(&with_both),\n\n vec![(1, &with_both.fields[1].column)]\n\n );\n\n\n\n let with_none = get_schema(\"CREATE TABLE A (other int, id int)\");\n\n assert_eq!(get_primary_key(&with_none), vec![]);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn test_flatten_conditional_non_key_delete() {\n\n compare_flatten(\n\n \"DELETE FROM T WHERE T.b = 1\",\n\n vec![\"a\"],\n\n Some(vec![vec![1]]),\n", "file_path": "src/utils.rs", "rank": 72, "score": 24153.394402951162 }, { "content": " );\n\n compare_flatten::<DataType>(\n\n \"UPDATE T SET T.b = 2 WHERE T.a = 1 AND T.b = 2 AND T.a = 3\",\n\n vec![\"a\", \"b\"],\n\n None,\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_get_primary_key() {\n\n let with_field = get_schema(\"CREATE TABLE A (other int, id int PRIMARY KEY)\");\n\n assert_eq!(\n\n get_primary_key(&with_field),\n\n vec![(1, &with_field.fields[1].column)]\n\n );\n\n\n\n let with_const = get_schema(\"CREATE TABLE A (other int, id int, PRIMARY KEY (id))\");\n\n assert_eq!(\n\n get_primary_key(&with_const),\n\n vec![(1, &with_const.fields[1].column)]\n", "file_path": "src/utils.rs", "rank": 73, "score": 24153.105674512302 }, { "content": " let oldv = col2v.insert(c.name, v);\n\n assert!(oldv.is_none());\n\n }\n\n ConditionExpression::LogicalOp(ConditionTree {\n\n operator: Operator::And,\n\n left,\n\n right,\n\n }) => {\n\n // recurse\n\n walk_update_where(col2v, params, *left);\n\n walk_update_where(col2v, params, *right);\n\n }\n\n _ => unimplemented!(\"Fancy high-brow UPDATEs are not supported\"),\n\n }\n\n}\n\n\n\npub(crate) fn extract_update_params_and_fields(\n\n q: &mut UpdateStatement,\n\n params: &mut Option<<ParamParser as IntoIterator>::IntoIter>,\n\n schema: &CreateTableStatement,\n", "file_path": "src/utils.rs", "rank": 74, "score": 24152.845730007037 }, { "content": " }\n\n ArithmeticOperator::Subtract => {\n\n updates.push((i, Modification::Apply(Operation::Sub, l.into())))\n\n }\n\n _ => unimplemented!(),\n\n }\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n }\n\n updates\n\n}\n\n\n\npub(crate) fn extract_update(\n\n mut q: UpdateStatement,\n\n params: Option<ParamParser>,\n", "file_path": "src/utils.rs", "rank": 75, "score": 24152.819528367818 }, { "content": " }\n\n let query = COLLAPSE_SPACES.0.replace_all(&query, COLLAPSE_SPACES.1);\n\n let query = query.replace('\"', \"'\");\n\n let query = query.trim();\n\n query.to_owned()\n\n}\n\n\n\n// Helper for flatten_conditional - returns true if the\n\n// expression is \"valid\" (i.e. not something like `a = 1 AND a = 2`.\n\n// Goes through the condition tree by gradually filling up primary key slots.\n\n//\n\n// Example:\n\n// (CREATE TABLE A (aid int, uid int, PRIMARY KEY(aid, uid))\n\n// `WHERE aid = 1 AND uid = 2` has the following tree:\n\n//\n\n// +--+ AND +--+\n\n// | |\n\n// + +\n\n// aid = 1 uid = 2\n\n//\n\n// After processing the left side `flattened` will look something like this: {[(aid, 1)]}\n\n// Then we'll check the right side, which will find a \"hole\" in the first key,\n\n// and we'll get {[(aid, 1), (uid, 2)]}.\n", "file_path": "src/utils.rs", "rank": 76, "score": 24152.71913778495 }, { "content": ") -> Vec<(usize, Modification)> {\n\n let mut updates = Vec::new();\n\n for (i, field) in schema.fields.iter().enumerate() {\n\n if let Some(sets) = q\n\n .fields\n\n .iter()\n\n .position(|&(ref f, _)| f.name == field.column.name)\n\n {\n\n match q.fields.swap_remove(sets).1 {\n\n FieldValueExpression::Literal(LiteralExpression {\n\n value: Literal::Placeholder,\n\n alias: None,\n\n }) => {\n\n let v = params\n\n .as_mut()\n\n .expect(\"Found placeholder in ad-hoc query\")\n\n .next()\n\n .map(|pv| pv.value.to_datatype())\n\n .expect(\"Not enough parameter values given in EXECUTE\");\n\n updates.push((i, Modification::Set(v)));\n", "file_path": "src/utils.rs", "rank": 77, "score": 24152.342708370532 }, { "content": " .filter(|&(_, ref cs)| {\n\n cs.constraints.contains(&ColumnConstraint::PrimaryKey) || match schema.keys {\n\n // Try finding PRIMARY KEY constraints in keys as well:\n\n Some(ref keys) => keys.iter().any(|key| match *key {\n\n TableKey::PrimaryKey(ref cols) => cols.iter().any(|c| c == &cs.column),\n\n _ => false,\n\n }),\n\n _ => false,\n\n }\n\n }).map(|(i, cs)| (i, &cs.column))\n\n .collect()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 78, "score": 24152.306913867127 }, { "content": " }\n\n}\n\n\n\n// Takes a tree of conditional expressions for a DELETE/UPDATE statement and returns a list of all the\n\n// keys that should be mutated.\n\n// Panics if given a WHERE-clause containing other keys than the primary.\n\n// DELETE FROM a WHERE key = 1 OR key = 2 -> Some([[1], [2]])\n\n// DELETE FROM a WHERE key = 1 OR key = 2 AND key = 3 -> None // Bogus query\n\n// DELETE FROM a WHERE key = 1 AND key = 1 -> Some([[1]])\n\npub(crate) fn flatten_conditional(\n\n cond: &ConditionExpression,\n\n pkey: &Vec<&Column>,\n\n) -> Option<Vec<Vec<DataType>>> {\n\n let mut flattened = HashSet::new();\n\n if do_flatten_conditional(cond, pkey, &mut flattened) {\n\n let keys = flattened\n\n .into_iter()\n\n .map(|key| {\n\n // This will be the case if we got a cond without any primary keys,\n\n // or if we have a multi-column primary key and the cond only covers part of it.\n", "file_path": "src/utils.rs", "rank": 79, "score": 24151.998585294208 }, { "content": " (\n\n Regex::new(r\"(?i)show engines\").unwrap(),\n\n vec![\n\n (\"Engine\", \"InnoDB\"),\n\n (\"Support\", \"DEFAULT\"),\n\n (\"Comment\", \"\"),\n\n (\"Transactions\", \"YES\"),\n\n (\"XA\", \"YES\"),\n\n (\"Savepoints\", \"YES\"),\n\n ],\n\n ),\n\n (\n\n Regex::new(r\"SELECT 1 AS ping\").unwrap(),\n\n vec![(\"ping\", \"1\")],\n\n ),\n\n (\n\n Regex::new(r\"(?i)show global variables like 'read_only'\").unwrap(),\n\n vec![(\"Variable_name\", \"read_only\"), (\"Value\", \"OFF\")],\n\n ),\n\n (\n", "file_path": "src/utils.rs", "rank": 80, "score": 24151.302516523992 }, { "content": " }\n\n SqlQuery::Update(ref query) => {\n\n let field_params = query.fields.iter().filter_map(|f| {\n\n if let FieldValueExpression::Literal(LiteralExpression {\n\n value: Literal::Placeholder,\n\n alias: None,\n\n }) = f.1\n\n {\n\n Some(&f.0)\n\n } else {\n\n None\n\n }\n\n });\n\n\n\n let where_params = if let Some(ref wc) = query.where_clause {\n\n get_parameter_columns_recurse(wc)\n\n } else {\n\n vec![]\n\n };\n\n\n\n field_params.chain(where_params.into_iter()).collect()\n\n }\n\n _ => unimplemented!(),\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 81, "score": 24151.118188870238 }, { "content": " schema: &CreateTableStatement,\n\n) -> (Vec<DataType>, Vec<(usize, Modification)>) {\n\n let mut params = params.map(|p| p.into_iter());\n\n let updates = extract_update_params_and_fields(&mut q, &mut params, schema);\n\n\n\n let pkey = get_primary_key(schema);\n\n let where_clause = q\n\n .where_clause\n\n .expect(\"UPDATE without WHERE is not supported\");\n\n let mut col_to_val: HashMap<_, _> = HashMap::new();\n\n walk_update_where(&mut col_to_val, &mut params, where_clause);\n\n\n\n let key: Vec<_> = pkey\n\n .iter()\n\n .map(|&(_, c)| col_to_val.remove(&c.name).unwrap())\n\n .collect();\n\n\n\n (key, updates)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 82, "score": 24150.55662657407 }, { "content": " }\n\n FieldValueExpression::Literal(LiteralExpression {\n\n value: ref v,\n\n alias: None,\n\n }) => {\n\n updates.push((i, Modification::Set(DataType::from(v))));\n\n }\n\n FieldValueExpression::Arithmetic(ref ae) => {\n\n // we only support \"column = column +/- literal\"\n\n match ae {\n\n ArithmeticExpression {\n\n op,\n\n left: ArithmeticBase::Column(ref c),\n\n right: ArithmeticBase::Scalar(ref l),\n\n alias: None,\n\n } => {\n\n assert_eq!(c, &field.column);\n\n match op {\n\n ArithmeticOperator::Add => {\n\n updates.push((i, Modification::Apply(Operation::Add, l.into())))\n", "file_path": "src/utils.rs", "rank": 83, "score": 24150.252135603798 }, { "content": " flattened.insert(vec![(c.name.clone(), value)]);\n\n }\n\n\n\n true\n\n }\n\n ConditionExpression::ComparisonOp(ConditionTree {\n\n left: box ConditionExpression::Base(ConditionBase::Literal(ref left)),\n\n right: box ConditionExpression::Base(ConditionBase::Literal(ref right)),\n\n operator: Operator::Equal,\n\n })\n\n if left == right =>\n\n {\n\n true\n\n }\n\n ConditionExpression::LogicalOp(ConditionTree {\n\n operator: Operator::And,\n\n ref left,\n\n ref right,\n\n }) => {\n\n // When checking ANDs we want to make sure that both sides refer to the same key,\n", "file_path": "src/utils.rs", "rank": 84, "score": 24149.540899202053 }, { "content": " let value = DataType::from(l);\n\n // We want to look through our existing keys and see if any of them\n\n // are missing any columns. In that case we'll add the one we're looking\n\n // at now there.\n\n let with_space = flattened\n\n .iter()\n\n .find(|key| {\n\n key.len() < pkey.len() && !key.iter().any(|&(ref name, _)| name == &c.name)\n\n })\n\n // Not a very happy clone, but using a HashSet here simplifies the AND\n\n // logic by letting us ignore identical clauses (and we need the .clone()\n\n // to be able to \"mutate\" key).\n\n .and_then(|key| Some(key.clone()));\n\n\n\n if let Some(mut key) = with_space {\n\n flattened.remove(&key);\n\n key.push((c.name.clone(), value));\n\n flattened.insert(key);\n\n } else {\n\n // There were no existing keys with space, so let's create a new one:\n", "file_path": "src/utils.rs", "rank": 85, "score": 24149.311843697957 }, { "content": " );\n\n\n\n // Valid, but bogus, ANDs:\n\n compare_flatten(\n\n \"DELETE FROM T WHERE T.a = 1 AND T.a = 1\",\n\n vec![\"a\"],\n\n Some(vec![vec![1]]),\n\n );\n\n compare_flatten(\n\n \"UPDATE T SET T.b = 2 WHERE T.a = 1 AND T.a = 1\",\n\n vec![\"a\"],\n\n Some(vec![vec![1]]),\n\n );\n\n compare_flatten(\n\n \"DELETE FROM T WHERE T.a = 1 AND 1 = 1\",\n\n vec![\"a\"],\n\n Some(vec![vec![1]]),\n\n );\n\n compare_flatten(\n\n \"UPDATE T SET T.b = 2 WHERE T.a = 1 AND 1 = 1\",\n", "file_path": "src/utils.rs", "rank": 86, "score": 24147.877851837682 }, { "content": " operator: Operator::And,\n\n ref left,\n\n ref right,\n\n })\n\n | ConditionExpression::LogicalOp(ConditionTree {\n\n operator: Operator::Or,\n\n ref left,\n\n ref right,\n\n }) => {\n\n let mut l = get_parameter_columns_recurse(left);\n\n let mut r = get_parameter_columns_recurse(right);\n\n l.append(&mut r);\n\n l\n\n }\n\n ConditionExpression::NegationOp(ref expr) | ConditionExpression::Bracketed(ref expr) => {\n\n get_parameter_columns_recurse(expr)\n\n }\n\n _ => unimplemented!(),\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 87, "score": 24147.877851837682 }, { "content": " // e.g. WHERE A.a = 1 AND A.a = 1\n\n // or for compound primary keys:\n\n // WHERE A.a = AND a.b = 2\n\n // but also bogus stuff like `WHERE 1 = 1 AND 2 = 2`.\n\n let pre_count = flattened.len();\n\n do_flatten_conditional(&*left, pkey, &mut flattened) && {\n\n let count = flattened.len();\n\n let valid = do_flatten_conditional(&*right, pkey, &mut flattened);\n\n valid && (pre_count == flattened.len() || count == flattened.len())\n\n }\n\n }\n\n ConditionExpression::LogicalOp(ConditionTree {\n\n operator: Operator::Or,\n\n ref left,\n\n ref right,\n\n }) => {\n\n do_flatten_conditional(&*left, pkey, &mut flattened)\n\n && do_flatten_conditional(&*right, pkey, &mut flattened)\n\n }\n\n _ => false,\n", "file_path": "src/utils.rs", "rank": 88, "score": 24147.877851837682 }, { "content": " // https://github.com/rust-lang/rfcs/issues/1006\n\n vec![c; literals.len()]\n\n }\n\n ConditionExpression::ComparisonOp(ConditionTree {\n\n left: box ConditionExpression::Base(ConditionBase::Field(_)),\n\n right: box ConditionExpression::Base(ConditionBase::Literal(_)),\n\n operator: _,\n\n })\n\n | ConditionExpression::ComparisonOp(ConditionTree {\n\n left: box ConditionExpression::Base(ConditionBase::Literal(_)),\n\n right: box ConditionExpression::Base(ConditionBase::Field(_)),\n\n operator: _,\n\n }) => vec![],\n\n // comma joins and column equality comparisons\n\n ConditionExpression::ComparisonOp(ConditionTree {\n\n left: box ConditionExpression::Base(ConditionBase::Field(_)),\n\n right: box ConditionExpression::Base(ConditionBase::Field(_)),\n\n operator: _,\n\n }) => vec![],\n\n ConditionExpression::LogicalOp(ConditionTree {\n", "file_path": "src/utils.rs", "rank": 89, "score": 24147.877851837682 }, { "content": " \"UPDATE T SET T.b = 2 WHERE T.a = 1\",\n\n vec![\"a\"],\n\n Some(vec![vec![1]]),\n\n );\n\n compare_flatten(\n\n \"UPDATE T SET T.b = 2 WHERE T.a = 1 OR T.a = 2\",\n\n vec![\"a\"],\n\n Some(vec![vec![1], vec![2]]),\n\n );\n\n\n\n // Valid, but bogus, ORs:\n\n compare_flatten(\n\n \"DELETE FROM T WHERE T.a = 1 OR T.a = 1\",\n\n vec![\"a\"],\n\n Some(vec![vec![1]]),\n\n );\n\n compare_flatten(\n\n \"UPDATE T SET T.b = 2 WHERE T.a = 1 OR T.a = 1\",\n\n vec![\"a\"],\n\n Some(vec![vec![1]]),\n", "file_path": "src/utils.rs", "rank": 90, "score": 24147.877851837682 }, { "content": " compare_flatten(\n\n \"DELETE FROM T WHERE T.a = 1 AND T.b = 2\",\n\n vec![\"a\", \"b\"],\n\n Some(vec![vec![1, 2]]),\n\n );\n\n compare_flatten(\n\n \"DELETE FROM T WHERE (T.a = 1 AND T.b = 2) OR (T.a = 10 OR T.b = 20)\",\n\n vec![\"a\", \"b\"],\n\n Some(vec![vec![1, 2], vec![10, 20]]),\n\n );\n\n compare_flatten(\n\n \"UPDATE T SET T.b = 2 WHERE T.a = 1 AND T.b = 2\",\n\n vec![\"a\", \"b\"],\n\n Some(vec![vec![1, 2]]),\n\n );\n\n compare_flatten(\n\n \"UPDATE T SET T.b = 2 WHERE (T.a = 1 AND T.b = 2) OR (T.a = 10 OR T.b = 20)\",\n\n vec![\"a\", \"b\"],\n\n Some(vec![vec![1, 2], vec![10, 20]]),\n\n );\n", "file_path": "src/utils.rs", "rank": 91, "score": 24147.877851837682 }, { "content": " \"UPDATE T SET T.b = 2 WHERE (T.a = 1 AND T.b = 2) AND (T.a = 1 AND T.b = 2)\",\n\n vec![\"a\", \"b\"],\n\n Some(vec![vec![1, 2]]),\n\n );\n\n compare_flatten(\n\n \"DELETE FROM T WHERE (T.a = 1 AND T.b = 2) AND 1 = 1\",\n\n vec![\"a\", \"b\"],\n\n Some(vec![vec![1, 2]]),\n\n );\n\n compare_flatten(\n\n \"UPDATE T SET T.b = 2 WHERE (T.a = 1 AND T.b = 2) AND 1 = 1\",\n\n vec![\"a\", \"b\"],\n\n Some(vec![vec![1, 2]]),\n\n );\n\n\n\n // Invalid ANDs:\n\n compare_flatten::<DataType>(\n\n \"DELETE FROM T WHERE T.a = 1 AND T.b = 2 AND T.a = 3\",\n\n vec![\"a\", \"b\"],\n\n None,\n", "file_path": "src/utils.rs", "rank": 92, "score": 24147.877851837682 }, { "content": " alias: None,\n\n function: None,\n\n }).collect();\n\n\n\n let pkey_ref = pkey.iter().map(|c| c).collect();\n\n if let Some(mut actual) = flatten_conditional(&cond, &pkey_ref) {\n\n let mut expected: Vec<Vec<DataType>> = expected\n\n .unwrap()\n\n .into_iter()\n\n .map(|v| v.into_iter().map(|c| c.into()).collect())\n\n .collect();\n\n\n\n actual.sort();\n\n expected.sort();\n\n assert_eq!(actual, expected);\n\n } else {\n\n assert!(expected.is_none());\n\n }\n\n }\n\n\n", "file_path": "src/utils.rs", "rank": 93, "score": 24147.877851837682 }, { "content": " vec![\"a\"],\n\n Some(vec![vec![1]]),\n\n );\n\n\n\n // We can't really handle these at the moment, but in the future we might want to\n\n // delete/update all rows:\n\n compare_flatten::<DataType>(\"DELETE FROM T WHERE 1 = 1\", vec![\"a\"], Some(vec![]));\n\n compare_flatten::<DataType>(\"UPDATE T SET T.b = 2 WHERE 1 = 1\", vec![\"a\"], Some(vec![]));\n\n\n\n // Invalid ANDs:\n\n compare_flatten::<DataType>(\"DELETE FROM T WHERE T.a = 1 AND T.a = 2\", vec![\"a\"], None);\n\n compare_flatten::<DataType>(\n\n \"UPDATE T SET T.b = 2 WHERE T.a = 1 AND T.a = 2\",\n\n vec![\"a\"],\n\n None,\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_flatten_conditional_compound_key() {\n", "file_path": "src/utils.rs", "rank": 94, "score": 24147.877851837682 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn test_flatten_conditional_non_key_update() {\n\n compare_flatten(\n\n \"UPDATE T SET T.b = 2 WHERE T.b = 1\",\n\n vec![\"a\"],\n\n Some(vec![vec![1]]),\n\n );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn test_flatten_conditional_partial_key_delete() {\n\n compare_flatten(\n\n \"DELETE FROM T WHERE T.a = 1\",\n\n vec![\"a\", \"b\"],\n\n Some(vec![vec![1]]),\n", "file_path": "src/utils.rs", "rank": 95, "score": 24147.877851837682 }, { "content": "\n\n // Valid, but bogus, ORs:\n\n compare_flatten(\n\n \"DELETE FROM T WHERE (T.a = 1 AND T.b = 2) OR (T.a = 1 AND T.b = 2)\",\n\n vec![\"a\", \"b\"],\n\n Some(vec![vec![1, 2]]),\n\n );\n\n compare_flatten(\n\n \"UPDATE T SET T.b = 2 WHERE (T.a = 1 AND T.b = 2) OR (T.a = 1 AND T.b = 2)\",\n\n vec![\"a\", \"b\"],\n\n Some(vec![vec![1, 2]]),\n\n );\n\n\n\n // Valid, but bogus, ANDs:\n\n compare_flatten(\n\n \"DELETE FROM T WHERE (T.a = 1 AND T.b = 2) AND (T.a = 1 AND T.b = 2)\",\n\n vec![\"a\", \"b\"],\n\n Some(vec![vec![1, 2]]),\n\n );\n\n compare_flatten(\n", "file_path": "src/utils.rs", "rank": 96, "score": 24147.877851837682 }, { "content": " auto_increments: Arc<RwLock<HashMap<String, atomic::AtomicUsize>>>,\n\n query_cache: Arc<RwLock<HashMap<SelectStatement, String>>>,\n\n query_counter: Arc<atomic::AtomicUsize>,\n\n slowlog: bool,\n\n static_responses: bool,\n\n sanitize: bool,\n\n log: slog::Logger,\n\n ) -> Self {\n\n NoriaBackend {\n\n inner: NoriaBackendInner::new(zk_addr, deployment, &log),\n\n log: log,\n\n\n\n table_schemas: schemas,\n\n auto_increments: auto_increments,\n\n\n\n query_count: query_counter,\n\n\n\n prepared: HashMap::new(),\n\n prepared_count: 0,\n\n\n", "file_path": "src/backend.rs", "rank": 97, "score": 23403.747964360242 }, { "content": "use noria::{ControllerHandle, DataType, Table, View, ZookeeperAuthority};\n\n\n\nuse msql_srv::{self, *};\n\nuse nom_sql::{\n\n self, ColumnConstraint, InsertStatement, Literal, SelectSpecification, SelectStatement,\n\n SqlQuery, UpdateStatement,\n\n};\n\n\n\nuse slog;\n\nuse std::borrow::Cow;\n\nuse std::collections::{BTreeMap, HashMap};\n\nuse std::fmt;\n\nuse std::io;\n\nuse std::sync::atomic;\n\nuse std::sync::{self, Arc, RwLock};\n\nuse std::time;\n\n\n\nuse convert::ToDataType;\n\nuse rewrite;\n\nuse schema::{schema_for_column, schema_for_insert, schema_for_select, Schema};\n\nuse utils;\n\n\n\n#[derive(Clone)]\n", "file_path": "src/backend.rs", "rank": 98, "score": 23403.115708181307 }, { "content": " format!(\"{:?}\", e).as_bytes(),\n\n )\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<W: io::Write> MysqlShim<W> for NoriaBackend {\n\n fn on_prepare(&mut self, query: &str, info: StatementMetaWriter<W>) -> io::Result<()> {\n\n trace!(self.log, \"prepare: {}\", query);\n\n\n\n let query = if self.sanitize {\n\n utils::sanitize_query(query)\n\n } else {\n\n query.to_owned()\n\n };\n\n\n\n let sql_q = match self.parsed.get(&query) {\n\n None => match nom_sql::parse_query(&query) {\n\n Ok(mut sql_q) => {\n", "file_path": "src/backend.rs", "rank": 99, "score": 23402.83865641223 } ]
Rust
crates/tako/benches/benchmarks/worker.rs
User3574/hyperqueue
d4dea5a805925cb624eb81da65840d5a8226d4a9
use std::time::Duration; use criterion::measurement::WallTime; use criterion::{BatchSize, BenchmarkGroup, BenchmarkId, Criterion}; use tako::common::index::{AsIdVec, ItemId}; use tako::common::resources::descriptor::GenericResourceKindIndices; use tako::common::resources::map::ResourceMap; use tako::common::resources::{ CpuRequest, GenericResourceDescriptor, GenericResourceDescriptorKind, GenericResourceRequest, ResourceDescriptor, ResourceRequest, TimeRequest, }; use tokio::sync::mpsc::unbounded_channel; use tokio::sync::oneshot::Receiver; use tako::messages::worker::ComputeTaskMsg; use tako::worker::launcher::{TaskLaunchData, TaskLauncher}; use tako::worker::rqueue::ResourceWaitQueue; use tako::worker::state::{TaskMap, WorkerState, WorkerStateRef}; use tako::worker::task::Task; use tako::worker::taskenv::{StopReason, TaskResult}; use tako::TaskId; use crate::create_worker; struct BenchmarkTaskLauncher; impl TaskLauncher for BenchmarkTaskLauncher { fn build_task( &self, _state: &WorkerState, _task_id: TaskId, _stop_receiver: Receiver<StopReason>, ) -> tako::Result<TaskLaunchData> { Ok(TaskLaunchData::from_future(Box::pin(async move { Ok(TaskResult::Finished) }))) } } fn create_worker_state() -> WorkerStateRef { let worker = create_worker(1); let (tx, _) = unbounded_channel(); let (tx2, _) = unbounded_channel(); WorkerStateRef::new( worker.id, worker.configuration, None, tx, tx2, Default::default(), Default::default(), Box::new(BenchmarkTaskLauncher), ) } fn create_worker_task(id: u64) -> Task { Task::new(ComputeTaskMsg { id: TaskId::new(id as <TaskId as ItemId>::IdType), instance_id: Default::default(), dep_info: vec![], user_priority: 0, scheduler_priority: 0, resources: Default::default(), time_limit: None, n_outputs: 0, body: vec![], }) } macro_rules! measure_time { ($body: block) => {{ let start = ::std::time::Instant::now(); $body start.elapsed() }} } fn bench_add_task(c: &mut BenchmarkGroup<WallTime>) { for task_count in [10, 1_000, 100_000] { c.bench_with_input( BenchmarkId::new("add task", task_count), &task_count, |b, &task_count| { b.iter_custom(|iters| { let mut total = Duration::new(0, 0); for _ in 0..iters { let state = create_worker_state(); let mut state = state.get_mut(); for id in 0..task_count { state.add_task(create_worker_task(id)); } let task = create_worker_task(task_count); let duration = measure_time!({ state.add_task(task); }); total += duration; } total }); }, ); } } fn bench_add_tasks(c: &mut BenchmarkGroup<WallTime>) { for task_count in [10, 1_000, 100_000] { c.bench_with_input( BenchmarkId::new("add tasks", task_count), &task_count, |b, &task_count| { b.iter_batched( || { let state = create_worker_state(); let tasks: Vec<_> = (0..task_count).map(|id| create_worker_task(id)).collect(); (state, tasks) }, |(state, tasks)| { let mut state = state.get_mut(); for task in tasks { state.add_task(task); } }, BatchSize::SmallInput, ); }, ); } } fn bench_cancel_waiting_task(c: &mut BenchmarkGroup<WallTime>) { for task_count in [10, 1_000, 100_000] { c.bench_with_input( BenchmarkId::new("cancel waiting task", task_count), &task_count, |b, &task_count| { b.iter_batched_ref( || { let state = create_worker_state(); { let mut state = state.get_mut(); for id in 0..task_count { state.add_task(create_worker_task(id)); } } (state, TaskId::new(0)) }, |(state, task_id)| { let mut state = state.get_mut(); state.cancel_task(*task_id); }, BatchSize::SmallInput, ); }, ); } } fn create_resource_queue(num_cpus: u32) -> ResourceWaitQueue { ResourceWaitQueue::new( &ResourceDescriptor { cpus: vec![(0..num_cpus).collect::<Vec<_>>().to_ids()], generic: vec![GenericResourceDescriptor { name: "GPU".to_string(), kind: GenericResourceDescriptorKind::Indices(GenericResourceKindIndices { start: 0.into(), end: 8.into(), }), }], }, &ResourceMap::from_vec(vec!["GPU".to_string()]), ) } fn bench_resource_queue_add_task(c: &mut BenchmarkGroup<WallTime>) { c.bench_function("add task to resource queue", |b| { b.iter_batched_ref( || (create_resource_queue(64), create_worker_task(0)), |(queue, task)| queue.add_task(task), BatchSize::SmallInput, ); }); } fn bench_resource_queue_release_allocation(c: &mut BenchmarkGroup<WallTime>) { c.bench_function("release allocation from resource queue", |b| { b.iter_batched_ref( || { let mut queue = create_resource_queue(64); let mut task = create_worker_task(0); task.resources = ResourceRequest::new( CpuRequest::Compact(64), TimeRequest::new(0, 0), vec![GenericResourceRequest { resource: 0.into(), amount: 2, }] .into(), ); queue.add_task(&task); let mut map = TaskMap::default(); map.insert(task); let mut started = queue.try_start_tasks(&map, None); (queue, Some(started.pop().unwrap().1)) }, |(queue, allocation)| queue.release_allocation(allocation.take().unwrap()), BatchSize::SmallInput, ); }); } fn bench_resource_queue_start_tasks(c: &mut BenchmarkGroup<WallTime>) { for task_count in [1, 10, 1_000, 100_000] { c.bench_with_input( BenchmarkId::new("start tasks in resource queue", task_count), &task_count, |b, &task_count| { b.iter_batched_ref( || { let mut queue = create_resource_queue(64); let mut map = TaskMap::default(); for id in 0..task_count { let mut task = create_worker_task(id); task.resources = ResourceRequest::new( CpuRequest::Compact(64), TimeRequest::new(0, 0), vec![GenericResourceRequest { resource: 0.into(), amount: 2, }] .into(), ); queue.add_task(&task); map.insert(task); } (queue, map) }, |(queue, map)| queue.try_start_tasks(&map, None), BatchSize::SmallInput, ); }, ); } } pub fn benchmark(c: &mut Criterion) { let mut group = c.benchmark_group("worker"); bench_add_task(&mut group); bench_add_tasks(&mut group); bench_cancel_waiting_task(&mut group); bench_resource_queue_add_task(&mut group); bench_resource_queue_release_allocation(&mut group); bench_resource_queue_start_tasks(&mut group); }
use std::time::Duration; use criterion::measurement::WallTime; use criterion::{BatchSize, BenchmarkGroup, BenchmarkId, Criterion}; use tako::common::index::{AsIdVec, ItemId}; use tako::common::resources::descriptor::GenericResourceKindIndices; use tako::common::resources::map::ResourceMap; use tako::common::resources::{ CpuRequest, GenericResourceDescriptor, GenericResourceDescriptorKind, GenericResourceRequest, ResourceDescriptor, ResourceRequest, TimeRequest, }; use tokio::sync::mpsc::unbounded_channel; use tokio::sync::oneshot::Receiver; use tako::messages::worker::ComputeTaskMsg; use tako::worker::launcher::{TaskLaunchData, TaskLauncher}; use tako::worker::rqueue::ResourceWaitQueue; use tako::worker::state::{TaskMap, WorkerState, WorkerStateRef}; use tako::worker::task::Task; use tako::worker::taskenv::{StopReason, TaskResult}; use tako::TaskId; use crate::create_worker; struct BenchmarkTaskLauncher; impl TaskLauncher for BenchmarkTaskLauncher { fn build_task( &self, _state: &WorkerState, _task_id: TaskId, _stop_receiver: Receiver<StopReason>, ) -> tako::Result<TaskLaunchData> { Ok(TaskLaunchData::from_future(Box::pin(async move { Ok(TaskResult::Finished) }))) } } fn create_worker_state() -> WorkerStateRef { let worker = create_worker(1); let (tx, _) = unbounded_channel(); let (tx2, _) = unbounded_channel(); WorkerStateRef::new( worker.id, worker.configuration, None, tx, tx2, Default::default(), Default::default(), Box::new(BenchmarkTaskLauncher), ) } fn create_worker_task(id: u64) -> Task { Task::new(ComputeTaskMsg { id: TaskId::new(id as <TaskId as ItemId>::IdType), instance_id: Default::default(), dep_info: vec![], user_priority: 0, scheduler_priority: 0, resources: Default::default(), time_limit: None, n_outputs: 0, body: vec![], }) } macro_rules! measure_time { ($body: block) => {{ let start = ::std::time::Instant::now(); $body start.elapsed() }} } fn bench_add_task(c: &mut BenchmarkGroup<WallTime>) { for task_count in [10, 1_000, 100_000] { c.bench_with_input( BenchmarkId::new("add task", task_count), &task_count, |b, &task_count| { b.iter_custom(|iters| { let mut total = Duration::new(0, 0); for _ in 0..iters { let state = create_worker_state(); let mut state = state.get_mut(); for id in 0..task_count { state.add_task(create_worker_task(id));
}, &ResourceMap::from_vec(vec!["GPU".to_string()]), ) } fn bench_resource_queue_add_task(c: &mut BenchmarkGroup<WallTime>) { c.bench_function("add task to resource queue", |b| { b.iter_batched_ref( || (create_resource_queue(64), create_worker_task(0)), |(queue, task)| queue.add_task(task), BatchSize::SmallInput, ); }); } fn bench_resource_queue_release_allocation(c: &mut BenchmarkGroup<WallTime>) { c.bench_function("release allocation from resource queue", |b| { b.iter_batched_ref( || { let mut queue = create_resource_queue(64); let mut task = create_worker_task(0); task.resources = ResourceRequest::new( CpuRequest::Compact(64), TimeRequest::new(0, 0), vec![GenericResourceRequest { resource: 0.into(), amount: 2, }] .into(), ); queue.add_task(&task); let mut map = TaskMap::default(); map.insert(task); let mut started = queue.try_start_tasks(&map, None); (queue, Some(started.pop().unwrap().1)) }, |(queue, allocation)| queue.release_allocation(allocation.take().unwrap()), BatchSize::SmallInput, ); }); } fn bench_resource_queue_start_tasks(c: &mut BenchmarkGroup<WallTime>) { for task_count in [1, 10, 1_000, 100_000] { c.bench_with_input( BenchmarkId::new("start tasks in resource queue", task_count), &task_count, |b, &task_count| { b.iter_batched_ref( || { let mut queue = create_resource_queue(64); let mut map = TaskMap::default(); for id in 0..task_count { let mut task = create_worker_task(id); task.resources = ResourceRequest::new( CpuRequest::Compact(64), TimeRequest::new(0, 0), vec![GenericResourceRequest { resource: 0.into(), amount: 2, }] .into(), ); queue.add_task(&task); map.insert(task); } (queue, map) }, |(queue, map)| queue.try_start_tasks(&map, None), BatchSize::SmallInput, ); }, ); } } pub fn benchmark(c: &mut Criterion) { let mut group = c.benchmark_group("worker"); bench_add_task(&mut group); bench_add_tasks(&mut group); bench_cancel_waiting_task(&mut group); bench_resource_queue_add_task(&mut group); bench_resource_queue_release_allocation(&mut group); bench_resource_queue_start_tasks(&mut group); }
} let task = create_worker_task(task_count); let duration = measure_time!({ state.add_task(task); }); total += duration; } total }); }, ); } } fn bench_add_tasks(c: &mut BenchmarkGroup<WallTime>) { for task_count in [10, 1_000, 100_000] { c.bench_with_input( BenchmarkId::new("add tasks", task_count), &task_count, |b, &task_count| { b.iter_batched( || { let state = create_worker_state(); let tasks: Vec<_> = (0..task_count).map(|id| create_worker_task(id)).collect(); (state, tasks) }, |(state, tasks)| { let mut state = state.get_mut(); for task in tasks { state.add_task(task); } }, BatchSize::SmallInput, ); }, ); } } fn bench_cancel_waiting_task(c: &mut BenchmarkGroup<WallTime>) { for task_count in [10, 1_000, 100_000] { c.bench_with_input( BenchmarkId::new("cancel waiting task", task_count), &task_count, |b, &task_count| { b.iter_batched_ref( || { let state = create_worker_state(); { let mut state = state.get_mut(); for id in 0..task_count { state.add_task(create_worker_task(id)); } } (state, TaskId::new(0)) }, |(state, task_id)| { let mut state = state.get_mut(); state.cancel_task(*task_id); }, BatchSize::SmallInput, ); }, ); } } fn create_resource_queue(num_cpus: u32) -> ResourceWaitQueue { ResourceWaitQueue::new( &ResourceDescriptor { cpus: vec![(0..num_cpus).collect::<Vec<_>>().to_ids()], generic: vec![GenericResourceDescriptor { name: "GPU".to_string(), kind: GenericResourceDescriptorKind::Indices(GenericResourceKindIndices { start: 0.into(), end: 8.into(), }), }],
random
[ { "content": "pub fn task_transfer_cost(taskmap: &TaskMap, task: &Task, worker_id: WorkerId) -> u64 {\n\n // TODO: For large number of inputs, only sample inputs\n\n task.inputs\n\n .iter()\n\n .take(512)\n\n .map(|ti| {\n\n let t = taskmap.get_task(ti.task());\n\n let info = t.data_info().unwrap();\n\n if info.placement.contains(&worker_id) {\n\n 0u64\n\n } else if info.future_placement.contains_key(&worker_id) {\n\n 1u64\n\n } else {\n\n info.data_info.size\n\n }\n\n })\n\n .sum()\n\n}\n", "file_path": "crates/tako/src/scheduler/utils.rs", "rank": 1, "score": 302555.8814749165 }, { "content": "fn remove_task_if_possible(core: &mut Core, comm: &mut impl Comm, task_id: TaskId) {\n\n if !core.get_task(task_id).is_removable() {\n\n return;\n\n }\n\n\n\n let ws = match core.remove_task(task_id) {\n\n TaskRuntimeState::Finished(finfo) => finfo.placement,\n\n _ => unreachable!(),\n\n };\n\n for worker_id in ws {\n\n log::debug!(\n\n \"Task id={} is no longer needed, deleting from worker={}\",\n\n task_id,\n\n worker_id\n\n );\n\n comm.send_worker_message(\n\n worker_id,\n\n &ToWorkerMessage::DeleteData(TaskIdMsg { id: task_id }),\n\n );\n\n }\n\n}\n", "file_path": "crates/tako/src/server/reactor.rs", "rank": 2, "score": 294307.57672500634 }, { "content": "fn unregister_as_consumer(core: &mut Core, comm: &mut impl Comm, task_id: TaskId) {\n\n let inputs: Vec<TaskId> = core\n\n .get_task(task_id)\n\n .inputs\n\n .iter()\n\n .map(|ti| ti.task())\n\n .collect();\n\n for input_id in inputs {\n\n let input = core.get_task_mut(input_id);\n\n assert!(input.remove_consumer(task_id));\n\n remove_task_if_possible(core, comm, input_id);\n\n }\n\n}\n\n\n", "file_path": "crates/tako/src/server/reactor.rs", "rank": 4, "score": 290760.70368464163 }, { "content": "pub fn on_new_tasks(core: &mut Core, comm: &mut impl Comm, new_tasks: Vec<Task>) {\n\n assert!(!new_tasks.is_empty());\n\n\n\n let mut task_map: Map<_, _> = new_tasks.into_iter().map(|t| (t.id, t)).collect();\n\n let ids: Vec<_> = task_map.keys().copied().collect();\n\n for task_id in ids {\n\n let mut task = task_map.remove(&task_id).unwrap();\n\n\n\n let mut count = 0;\n\n for ti in &task.inputs {\n\n let input_id = ti.task();\n\n let task_dep = task_map\n\n .get_mut(&input_id)\n\n .unwrap_or_else(|| core.get_task_mut(input_id));\n\n task_dep.add_consumer(task.id);\n\n if !task_dep.is_finished() {\n\n count += 1\n\n }\n\n }\n\n\n", "file_path": "crates/tako/src/server/reactor.rs", "rank": 5, "score": 288881.08400057326 }, { "content": "pub fn on_reset_keep_flag(core: &mut Core, comm: &mut impl Comm, task_id: TaskId) {\n\n let task = core.get_task_mut(task_id);\n\n task.set_keep_flag(false);\n\n remove_task_if_possible(core, comm, task_id);\n\n}\n\n\n", "file_path": "crates/tako/src/server/reactor.rs", "rank": 6, "score": 279727.9092564259 }, { "content": "pub fn create_worker(id: u64) -> Worker {\n\n Worker::new(\n\n WorkerId::new(id as u32),\n\n WorkerConfiguration {\n\n resources: ResourceDescriptor {\n\n cpus: vec![vec![1.into()]],\n\n generic: vec![],\n\n },\n\n listen_address: \"\".to_string(),\n\n hostname: \"\".to_string(),\n\n work_dir: Default::default(),\n\n log_dir: Default::default(),\n\n heartbeat_interval: Default::default(),\n\n send_overview_interval: None,\n\n idle_timeout: None,\n\n time_limit: None,\n\n on_server_lost: ServerLostPolicy::Stop,\n\n extra: Default::default(),\n\n },\n\n Default::default(),\n\n )\n\n}\n\n\n", "file_path": "crates/tako/benches/utils/mod.rs", "rank": 7, "score": 272408.547798241 }, { "content": "pub fn start_on_worker<W: Into<WorkerId>, T: Into<TaskId>>(\n\n core: &mut Core,\n\n task_id: T,\n\n worker_id: W,\n\n) {\n\n let mut scheduler = create_test_scheduler();\n\n let mut comm = TestComm::default();\n\n force_assign(core, &mut scheduler, task_id.into(), worker_id.into());\n\n scheduler.finish_scheduling(core, &mut comm);\n\n}\n\n\n", "file_path": "crates/tako/src/tests/utils/schedule.rs", "rank": 8, "score": 266768.0733726961 }, { "content": "pub fn add_tasks(core: &mut Core, count: usize) -> Vec<TaskId> {\n\n let mut tasks = Vec::with_capacity(count);\n\n for id in 0..count {\n\n let task_id = TaskId::new(id as <TaskId as ItemId>::IdType);\n\n let task = create_task(task_id);\n\n core.add_task(task);\n\n tasks.push(task_id);\n\n }\n\n tasks\n\n}\n", "file_path": "crates/tako/benches/utils/mod.rs", "rank": 9, "score": 265848.7799487257 }, { "content": "pub fn start_and_finish_on_worker<W: Into<WorkerId>, T: Into<TaskId>>(\n\n core: &mut Core,\n\n task_id: T,\n\n worker_id: W,\n\n size: u64,\n\n) {\n\n let task_id = task_id.into();\n\n let worker_id = worker_id.into();\n\n\n\n start_on_worker(core, task_id, worker_id);\n\n finish_on_worker(core, task_id, worker_id, size);\n\n}\n\n\n\npub(crate) fn create_test_scheduler() -> SchedulerState {\n\n SchedulerState::new()\n\n}\n", "file_path": "crates/tako/src/tests/utils/schedule.rs", "rank": 10, "score": 262914.8052644111 }, { "content": "pub fn start_on_worker_running<W: Into<WorkerId>, T: Into<TaskId>>(\n\n core: &mut Core,\n\n task_id: T,\n\n worker_id: W,\n\n) {\n\n let task_id = task_id.into();\n\n let worker_id = worker_id.into();\n\n\n\n let mut scheduler = create_test_scheduler();\n\n let mut comm = TestComm::default();\n\n force_assign(core, &mut scheduler, task_id, worker_id);\n\n scheduler.finish_scheduling(core, &mut comm);\n\n on_task_running(core, &mut comm, worker_id, task_running_msg(task_id));\n\n}\n\n\n", "file_path": "crates/tako/src/tests/utils/schedule.rs", "rank": 11, "score": 262914.8052644111 }, { "content": "fn start_stealing<W: Into<WorkerId>, T: Into<TaskId>>(\n\n core: &mut Core,\n\n task_id: T,\n\n new_worker_id: W,\n\n) {\n\n let mut scheduler = schedule::create_test_scheduler();\n\n force_reassign(core, &mut scheduler, task_id.into(), new_worker_id.into());\n\n let mut comm = env::create_test_comm();\n\n scheduler.finish_scheduling(core, &mut comm);\n\n}\n\n\n", "file_path": "crates/tako/src/tests/test_reactor.rs", "rank": 12, "score": 262556.6222698348 }, { "content": "pub fn on_new_worker(core: &mut Core, comm: &mut impl Comm, worker: Worker) {\n\n comm.broadcast_worker_message(&ToWorkerMessage::NewWorker(NewWorkerMsg {\n\n worker_id: worker.id,\n\n address: worker.configuration.listen_address.clone(),\n\n }));\n\n\n\n comm.send_client_worker_new(worker.id, &worker.configuration);\n\n\n\n comm.ask_for_scheduling();\n\n core.new_worker(worker);\n\n}\n\n\n", "file_path": "crates/tako/src/server/reactor.rs", "rank": 13, "score": 258057.47077865782 }, { "content": "fn crawl<F1: Fn(&Task) -> &Set<TaskId>>(tasks: &mut TaskMap, predecessor_fn: F1) {\n\n let mut neighbours: Map<TaskId, u32> = Map::with_capacity(tasks.len());\n\n let mut stack: Vec<TaskId> = Vec::new();\n\n for task in tasks.tasks() {\n\n let len = predecessor_fn(task).len() as u32;\n\n if len == 0 {\n\n stack.push(task.id);\n\n } else {\n\n neighbours.insert(task.id, len);\n\n }\n\n }\n\n\n\n while let Some(task_id) = stack.pop() {\n\n let level = predecessor_fn(tasks.get_task(task_id))\n\n .iter()\n\n .map(|&pred_id| tasks.get_task(pred_id).get_scheduler_priority())\n\n .max()\n\n .unwrap_or(0);\n\n\n\n let task = tasks.get_task_mut(task_id);\n", "file_path": "crates/tako/src/scheduler/metrics.rs", "rank": 15, "score": 253675.91791654506 }, { "content": "pub fn create_task(id: TaskId) -> Task {\n\n let conf = TaskConfiguration {\n\n resources: Default::default(),\n\n user_priority: 0,\n\n time_limit: None,\n\n n_outputs: 0,\n\n };\n\n Task::new(id, vec![], Rc::new(conf), Default::default(), false, false)\n\n}\n", "file_path": "crates/tako/benches/utils/mod.rs", "rank": 16, "score": 249090.53050929445 }, { "content": "fn check_task_has_worker<T: Into<TaskId>, W: Into<WorkerId>>(\n\n core: &Core,\n\n task_id: T,\n\n worker_id: W,\n\n) {\n\n assert_eq!(\n\n core.get_task(task_id.into()).get_assigned_worker().unwrap(),\n\n worker_id.into()\n\n );\n\n}\n", "file_path": "crates/tako/src/tests/test_scheduler.rs", "rank": 17, "score": 247456.80107455223 }, { "content": "fn get_job_ids(state: &State, selector: Selector) -> Vec<JobId> {\n\n match selector {\n\n Selector::All => state.jobs().map(|job| job.job_id).collect(),\n\n Selector::LastN(n) => state.last_n_ids(n).collect(),\n\n Selector::Specific(array) => array.iter().map(|id| id.into()).collect(),\n\n }\n\n}\n\n\n\nasync fn compose_server_stats(_state_ref: &StateRef, backend: &Backend) -> ToClientMessage {\n\n let stream_stats = {\n\n let (sender, receiver) = oneshot::channel();\n\n backend.send_stream_control(StreamServerControlMessage::Stats(sender));\n\n receiver.await.unwrap()\n\n };\n\n ToClientMessage::StatsResponse(StatsResponse { stream_stats })\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/server/client/mod.rs", "rank": 18, "score": 244027.94488554442 }, { "content": "pub fn submit_test_tasks(core: &mut Core, tasks: Vec<Task>) {\n\n on_new_tasks(core, &mut TestComm::default(), tasks);\n\n}\n\n\n\npub(crate) fn force_assign<W: Into<WorkerId>, T: Into<TaskId>>(\n\n core: &mut Core,\n\n scheduler: &mut SchedulerState,\n\n task_id: T,\n\n worker_id: W,\n\n) {\n\n let task_id = task_id.into();\n\n core.remove_from_ready_to_assign(task_id);\n\n scheduler.assign(core, task_id, worker_id.into());\n\n}\n\n\n", "file_path": "crates/tako/src/tests/utils/schedule.rs", "rank": 20, "score": 241888.12692377344 }, { "content": "pub fn task<T: Into<TaskId>>(id: T) -> Task {\n\n TaskBuilder::new(id.into()).outputs(1).build()\n\n}\n\n\n", "file_path": "crates/tako/src/tests/utils/task.rs", "rank": 21, "score": 238583.444060265 }, { "content": "fn count_available_tasks(state: &State, queue_info: &QueueInfo) -> u64 {\n\n let waiting_tasks: u64 = state\n\n .jobs()\n\n .map(|job| {\n\n let result = match can_provide_worker(job, queue_info) {\n\n true => job.counters.n_waiting_tasks(job.n_tasks()),\n\n false => 0,\n\n };\n\n result as u64\n\n })\n\n .sum();\n\n waiting_tasks\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/server/autoalloc/process.rs", "rank": 22, "score": 236191.5924787997 }, { "content": "fn pin_program(program: &mut ProgramDefinition, allocation: &ResourceAllocation) {\n\n program.args.insert(0, \"taskset\".into());\n\n program.args.insert(1, \"-c\".into());\n\n program\n\n .args\n\n .insert(2, allocation.comma_delimited_cpu_ids().into());\n\n}\n\n\n\n/// Zero-worker mode measures pure overhead of HyperQueue.\n\n/// In this mode the task is not executed at all.\n\n#[cfg(feature = \"zero-worker\")]\n\nasync fn run_task(\n\n _streamer_ref: StreamerRef,\n\n _program: ProgramDefinition,\n\n _job_id: JobId,\n\n _job_task_id: JobTaskId,\n\n _instance_id: InstanceId,\n\n _end_receiver: tokio::sync::oneshot::Receiver<StopReason>,\n\n) -> tako::Result<TaskResult> {\n\n Ok(TaskResult::Finished)\n", "file_path": "crates/hyperqueue/src/worker/start.rs", "rank": 23, "score": 231308.30889308784 }, { "content": "pub fn finish_on_worker<W: Into<WorkerId>, T: Into<TaskId>>(\n\n core: &mut Core,\n\n task_id: T,\n\n worker_id: W,\n\n size: u64,\n\n) {\n\n let mut comm = TestComm::default();\n\n on_task_finished(\n\n core,\n\n &mut comm,\n\n worker_id.into(),\n\n TaskFinishedMsg {\n\n id: task_id.into(),\n\n size,\n\n },\n\n );\n\n}\n\n\n", "file_path": "crates/tako/src/tests/utils/schedule.rs", "rank": 24, "score": 230137.58196123203 }, { "content": "pub fn worker_task<T: Into<TaskId>>(\n\n task_id: T,\n\n resources: ResourceRequest,\n\n u_priority: Priority,\n\n) -> Task {\n\n Task::new(ComputeTaskMsg {\n\n id: task_id.into(),\n\n instance_id: 0.into(),\n\n dep_info: vec![],\n\n user_priority: u_priority,\n\n scheduler_priority: 0,\n\n resources,\n\n time_limit: None,\n\n n_outputs: 0,\n\n body: vec![],\n\n })\n\n}\n\n\n\npub struct ResourceQueueBuilder {\n\n task_map: TaskMap,\n", "file_path": "crates/tako/src/worker/test_util.rs", "rank": 25, "score": 229998.49839428082 }, { "content": "pub fn detect_generic_resource() -> anyhow::Result<Vec<GenericResourceDescriptor>> {\n\n let mut generic = Vec::new();\n\n if let Ok(count) = read_linux_gpu_count() {\n\n if count > 0 {\n\n log::debug!(\"Gpus detected: {}\", count);\n\n generic.push(GenericResourceDescriptor {\n\n name: \"gpus\".to_string(),\n\n kind: GenericResourceDescriptorKind::Indices(GenericResourceKindIndices {\n\n start: GenericResourceIndex::new(0),\n\n end: GenericResourceIndex::new(count as u32 - 1),\n\n }),\n\n })\n\n }\n\n }\n\n Ok(generic)\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/worker/hwdetect.rs", "rank": 26, "score": 227019.45363953736 }, { "content": "/// Returns (working directory, stdout, stderr)\n\nfn get_task_paths(state: &JobTaskState) -> (CellStruct, CellStruct, CellStruct) {\n\n match state {\n\n JobTaskState::Canceled {\n\n started_data: Some(started_data),\n\n }\n\n | JobTaskState::Running { started_data, .. }\n\n | JobTaskState::Finished { started_data, .. }\n\n | JobTaskState::Failed { started_data, .. } => {\n\n let ctx = &started_data.context;\n\n (\n\n ctx.cwd.to_str().unwrap().cell(),\n\n stdio_to_cell(&ctx.stdout),\n\n stdio_to_cell(&ctx.stderr),\n\n )\n\n }\n\n _ => (\"\".cell(), \"\".cell(), \"\".cell()),\n\n }\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/client/output/cli.rs", "rank": 29, "score": 225785.45827277764 }, { "content": "fn fail_steal<W: Into<WorkerId>, T: Into<TaskId>>(\n\n core: &mut Core,\n\n task_id: T,\n\n worker_id: W,\n\n target_worker_id: W,\n\n) {\n\n let task_id = task_id.into();\n\n start_stealing(core, task_id, target_worker_id.into());\n\n let mut comm = env::create_test_comm();\n\n on_steal_response(\n\n core,\n\n &mut comm,\n\n worker_id.into(),\n\n StealResponseMsg {\n\n responses: vec![(task_id, StealResponse::Running)],\n\n },\n\n )\n\n}\n\n\n", "file_path": "crates/tako/src/tests/test_reactor.rs", "rank": 30, "score": 225087.5124801736 }, { "content": "fn force_reassign<W: Into<WorkerId>, T: Into<TaskId>>(\n\n core: &mut Core,\n\n scheduler: &mut SchedulerState,\n\n task_id: T,\n\n worker_id: W,\n\n) {\n\n // The same as force_assign, but do not expect that task in ready_to_assign array\n\n scheduler.assign(core, task_id.into(), worker_id.into());\n\n}\n\n\n", "file_path": "crates/tako/src/tests/test_reactor.rs", "rank": 31, "score": 225087.5124801736 }, { "content": "fn check_worker_tasks_exact(core: &Core, worker_id: u32, tasks: &[TaskId]) {\n\n let worker = core.get_worker_by_id_or_panic(worker_id.into());\n\n assert_eq!(worker.tasks().len(), tasks.len());\n\n for task in tasks {\n\n assert!(worker.tasks().contains(&task));\n\n }\n\n}\n\n\n", "file_path": "crates/tako/src/tests/test_reactor.rs", "rank": 32, "score": 223968.44752445965 }, { "content": "fn cancel_tasks<T: Into<TaskId> + Copy>(core: &mut Core, task_ids: &[T]) {\n\n let mut comm = env::create_test_comm();\n\n on_cancel_tasks(\n\n core,\n\n &mut comm,\n\n &task_ids.iter().map(|&v| v.into()).collect::<Vec<_>>(),\n\n );\n\n}\n\n\n", "file_path": "crates/tako/src/tests/test_reactor.rs", "rank": 34, "score": 221878.99797827873 }, { "content": "/// Try to find the CPU NUMA configuration.\n\n///\n\n/// Returns a list of NUMA nodes, each node contains a list of assigned CPUs.\n\nfn read_linux_numa() -> anyhow::Result<Vec<Vec<CpuId>>> {\n\n let nodes = parse_range(&std::fs::read_to_string(\n\n \"/sys/devices/system/node/possible\",\n\n )?)?;\n\n let mut numa_nodes: Vec<Vec<CpuId>> = Vec::new();\n\n for numa_index in nodes {\n\n let filename = format!(\"/sys/devices/system/node/node{}/cpulist\", numa_index);\n\n numa_nodes.push(parse_range(&std::fs::read_to_string(filename)?)?);\n\n }\n\n log::debug!(\"Linux numa detection is successful\");\n\n Ok(numa_nodes)\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/worker/hwdetect.rs", "rank": 36, "score": 218582.69559184366 }, { "content": "fn worker_has_task<T: Into<TaskId>>(core: &Core, worker_id: u32, task_id: T) -> bool {\n\n core.get_worker_by_id_or_panic(worker_id.into())\n\n .tasks()\n\n .contains(&task_id.into())\n\n}\n\n\n", "file_path": "crates/tako/src/tests/test_reactor.rs", "rank": 38, "score": 216235.42383130218 }, { "content": "pub fn simple_task(args: &[&'static str], id: u64) -> TaskConfigBuilder {\n\n TaskConfigBuilder::default()\n\n .args(simple_args(args))\n\n .id(Some(id))\n\n}\n", "file_path": "crates/tako/src/tests/integration/utils/task.rs", "rank": 39, "score": 216151.47669362463 }, { "content": "pub fn task_with_deps<T: Into<TaskId>>(id: T, deps: &[&Task], n_outputs: u32) -> Task {\n\n TaskBuilder::new(id.into())\n\n .simple_deps(deps)\n\n .outputs(n_outputs)\n\n .build()\n\n}\n\n\n", "file_path": "crates/tako/src/tests/utils/task.rs", "rank": 40, "score": 213094.34655012266 }, { "content": "fn format_worker(id: WorkerId, worker_map: &WorkerMap) -> &str {\n\n worker_map\n\n .get(&id)\n\n .map(|s| s.as_str())\n\n .unwrap_or_else(|| \"N/A\")\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/client/output/cli.rs", "rank": 41, "score": 211676.21217187704 }, { "content": "fn fill_task_started_data(dict: &mut Value, data: StartedTaskData) {\n\n dict[\"started_at\"] = format_datetime(data.start_date);\n\n dict[\"worker\"] = data.worker_id.as_num().into();\n\n dict[\"cwd\"] = data.context.cwd.to_str().unwrap().into();\n\n dict[\"stdout\"] = format_stdio_def(data.context.stdout);\n\n dict[\"stderr\"] = format_stdio_def(data.context.stderr);\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/client/output/json.rs", "rank": 42, "score": 211125.07940554217 }, { "content": "fn draw(state: &mut DashboardState, terminal: &mut DashboardTerminal) {\n\n terminal\n\n .draw(|frame| {\n\n let data = state.get_data_source().clone();\n\n let screen = state.get_current_screen_mut();\n\n\n\n screen.update(&data.get());\n\n screen.draw(frame);\n\n })\n\n .expect(\"An error occurred while drawing the dashboard\");\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/dashboard/ui_loop.rs", "rank": 43, "score": 210294.90327126242 }, { "content": "fn insert_resources_into_env(\n\n state: &WorkerState,\n\n task: &Task,\n\n allocation: &ResourceAllocation,\n\n program: &mut ProgramDefinition,\n\n) {\n\n program\n\n .env\n\n .insert(HQ_CPUS.into(), allocation.comma_delimited_cpu_ids().into());\n\n\n\n let resource_map = state.get_resource_map();\n\n\n\n for rq in task.resources.generic_requests() {\n\n let resource_name = resource_map.get_name(rq.resource).unwrap();\n\n program.env.insert(\n\n format!(\"HQ_RESOURCE_REQUEST_{}\", resource_name).into(),\n\n rq.amount.to_string().into(),\n\n );\n\n }\n\n\n", "file_path": "crates/hyperqueue/src/worker/start.rs", "rank": 44, "score": 208922.03013136014 }, { "content": "/// Selects a worker that is able to execute the given task.\n\n///\n\n/// The `workers` Vec is passed from the outside as an optimization, to reuse its allocated buffer.\n\n///\n\n/// If no worker is available, returns `None`.\n\nfn choose_worker_for_task(\n\n task: &Task,\n\n taskmap: &TaskMap,\n\n worker_map: &Map<WorkerId, Worker>,\n\n random: &mut SmallRng,\n\n workers: &mut Vec<WorkerId>,\n\n now: std::time::Instant,\n\n) -> Option<WorkerId> {\n\n workers.clear();\n\n\n\n let mut costs = u64::MAX;\n\n for worker in worker_map.values() {\n\n if !worker.is_capable_to_run(&task.configuration.resources, now) {\n\n continue;\n\n }\n\n\n\n let c = task_transfer_cost(taskmap, task, worker.id);\n\n match c.cmp(&costs) {\n\n Ordering::Less => {\n\n costs = c;\n", "file_path": "crates/tako/src/scheduler/state.rs", "rank": 45, "score": 208731.19782092568 }, { "content": "fn count_active_workers(descriptor: &DescriptorState) -> u64 {\n\n descriptor\n\n .active_allocations()\n\n .map(|allocation| allocation.worker_count)\n\n .sum()\n\n}\n\n\n\n/// Schedule new allocations for the descriptor with the given name.\n\nasync fn schedule_new_allocations(id: DescriptorId, state_ref: &StateRef) {\n\n let (allocations_to_create, workers_per_alloc, mut waiting_tasks, mut available_workers) = {\n\n let state = state_ref.get();\n\n let descriptor = get_or_return!(state.get_autoalloc_state().get_descriptor(id));\n\n let allocs_in_queue = descriptor.queued_allocations().count();\n\n\n\n let waiting_tasks = count_available_tasks(&state, descriptor.descriptor.info());\n\n let active_workers = count_active_workers(descriptor);\n\n let info = descriptor.descriptor.info();\n\n let available_workers = match info.max_worker_count() {\n\n Some(max) => (max as u64).saturating_sub(active_workers),\n\n None => u64::MAX,\n", "file_path": "crates/hyperqueue/src/server/autoalloc/process.rs", "rank": 46, "score": 206232.4861547267 }, { "content": "pub fn benchmark(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"scheduler\");\n\n\n\n bench_b_level(&mut group);\n\n bench_schedule(&mut group);\n\n}\n\n\n", "file_path": "crates/tako/benches/benchmarks/scheduler.rs", "rank": 47, "score": 201371.07823040505 }, { "content": "pub fn benchmark(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"core\");\n\n\n\n bench_remove_single_task(&mut group);\n\n bench_remove_all_tasks(&mut group);\n\n bench_add_task(&mut group);\n\n bench_add_tasks(&mut group);\n\n bench_iterate_tasks(&mut group);\n\n}\n", "file_path": "crates/tako/benches/benchmarks/core.rs", "rank": 48, "score": 201371.07823040505 }, { "content": "#[test]\n\nfn test_resources_blocked_workers() {\n\n let mut rt = TestEnv::new();\n\n rt.new_workers(&[4, 8, 2]);\n\n\n\n rt.new_assigned_tasks_cpus(&[&[4, 4, 4, 4, 4]]);\n\n rt.balance();\n\n assert!(rt.worker_load(100).get_n_cpus() >= 4);\n\n assert!(rt.worker_load(101).get_n_cpus() >= 8);\n\n assert_eq!(rt.worker_load(102).get_n_cpus(), 0);\n\n\n\n assert!(!rt.worker(100).is_parked());\n\n assert!(!rt.worker(101).is_parked());\n\n assert!(rt.worker(102).is_parked());\n\n rt.core().sanity_check();\n\n\n\n rt.new_ready_tasks_cpus(&[3]);\n\n rt.schedule();\n\n\n\n assert!(!rt.worker(100).is_parked());\n\n assert!(!rt.worker(101).is_parked());\n", "file_path": "crates/tako/src/tests/test_scheduler.rs", "rank": 49, "score": 200377.6961788248 }, { "content": "fn filter_allocations(allocations: &mut Vec<Allocation>, filter: Option<AllocationStateFilter>) {\n\n if let Some(filter) = filter {\n\n allocations.retain(|allocation| {\n\n let status = &allocation.status;\n\n match filter {\n\n AllocationStateFilter::Queued => matches!(status, AllocationStatus::Queued),\n\n AllocationStateFilter::Running => {\n\n matches!(status, AllocationStatus::Running { .. })\n\n }\n\n AllocationStateFilter::Finished => {\n\n matches!(status, AllocationStatus::Finished { .. })\n\n }\n\n AllocationStateFilter::Failed => matches!(status, AllocationStatus::Failed { .. }),\n\n }\n\n })\n\n }\n\n}\n", "file_path": "crates/hyperqueue/src/client/commands/autoalloc.rs", "rank": 50, "score": 199043.6145559643 }, { "content": "pub fn sorted_vec<T: Ord>(mut vec: Vec<T>) -> Vec<T> {\n\n vec.sort();\n\n vec\n\n}\n\n\n", "file_path": "crates/tako/src/tests/utils/mod.rs", "rank": 51, "score": 198480.69795600412 }, { "content": "fn read_linux_thread_siblings(cpu_id: CpuId) -> anyhow::Result<Vec<CpuId>> {\n\n let filename = format!(\n\n \"/sys/devices/system/cpu/cpu{}/topology/thread_siblings_list\",\n\n cpu_id\n\n );\n\n log::debug!(\"Reading {}\", filename);\n\n parse_range(&std::fs::read_to_string(filename)?)\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/worker/hwdetect.rs", "rank": 52, "score": 198342.09703724273 }, { "content": "fn p_cpu_range(input: &str) -> NomResult<Vec<CpuId>> {\n\n map_res(\n\n tuple((\n\n terminated(p_u32, space0),\n\n opt(terminated(\n\n preceded(tuple((tag(\"-\"), space0)), p_u32),\n\n space0,\n\n )),\n\n )),\n\n |(u, v)| crate::Result::Ok((u..=v.unwrap_or(u)).map(|id| id.into()).collect()),\n\n )\n\n .parse(input)\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/worker/hwdetect.rs", "rank": 53, "score": 194100.90206665205 }, { "content": "fn p_cpu_ranges(input: &str) -> NomResult<Vec<CpuId>> {\n\n separated_list1(terminated(tag(\",\"), space0), p_cpu_range)(input)\n\n .map(|(a, b)| (a, b.into_iter().flatten().collect()))\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/worker/hwdetect.rs", "rank": 54, "score": 194100.902066652 }, { "content": "fn p_cpu_list(input: &str) -> NomResult<Vec<CpuId>> {\n\n delimited(\n\n tuple((char('['), space0)),\n\n separated_list1(tuple((char(','), space0)), map(p_u32, |x| x.into())),\n\n tuple((space0, char(']'))),\n\n )(input)\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/worker/parser.rs", "rank": 55, "score": 194100.902066652 }, { "content": "fn parse_range(input: &str) -> anyhow::Result<Vec<CpuId>> {\n\n let parser = terminated(p_cpu_ranges, opt(newline));\n\n consume_all(parser, input)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{parse_range, read_linux_numa};\n\n use tako::common::index::AsIdVec;\n\n\n\n #[test]\n\n fn test_parse_range() {\n\n assert_eq!(parse_range(\"10\").unwrap(), vec![10].to_ids());\n\n assert_eq!(parse_range(\"10\\n\").unwrap(), vec![10].to_ids());\n\n assert_eq!(parse_range(\"0-3\\n\").unwrap(), vec![0, 1, 2, 3].to_ids());\n\n assert_eq!(\n\n parse_range(\"111-115\\n\").unwrap(),\n\n vec![111, 112, 113, 114, 115].to_ids()\n\n );\n\n assert_eq!(parse_range(\"2,7, 10\").unwrap(), vec![2, 7, 10].to_ids());\n", "file_path": "crates/hyperqueue/src/worker/hwdetect.rs", "rank": 56, "score": 190723.90656854675 }, { "content": "fn bench_add_task(c: &mut BenchmarkGroup<WallTime>) {\n\n for task_count in [10, 1_000, 100_000] {\n\n c.bench_with_input(\n\n BenchmarkId::new(\"add task\", task_count),\n\n &task_count,\n\n |b, &task_count| {\n\n b.iter_batched_ref(\n\n || {\n\n let mut core = Core::default();\n\n add_tasks(&mut core, task_count);\n\n\n\n let task = create_task(TaskId::new(\n\n (task_count + 1) as <TaskId as ItemId>::IdType,\n\n ));\n\n (core, Some(task))\n\n },\n\n |(ref mut core, task)| {\n\n core.add_task(task.take().unwrap());\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n },\n\n );\n\n }\n\n}\n\n\n", "file_path": "crates/tako/benches/benchmarks/core.rs", "rank": 57, "score": 189157.66776171338 }, { "content": "fn bench_add_tasks(c: &mut BenchmarkGroup<WallTime>) {\n\n for task_count in [10, 1_000, 100_000] {\n\n c.bench_with_input(\n\n BenchmarkId::new(\"add tasks\", task_count),\n\n &task_count,\n\n |b, &task_count| {\n\n b.iter_batched_ref(\n\n || {\n\n let core = Core::default();\n\n let tasks: Vec<_> = (0..task_count)\n\n .map(|id| create_task(TaskId::new(id as <TaskId as ItemId>::IdType)))\n\n .collect();\n\n (core, tasks)\n\n },\n\n |(ref mut core, tasks)| {\n\n for task in tasks.drain(..) {\n\n core.add_task(task);\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n },\n\n );\n\n }\n\n}\n\n\n", "file_path": "crates/tako/benches/benchmarks/core.rs", "rank": 58, "score": 189157.66776171338 }, { "content": "fn bench_remove_all_tasks(c: &mut BenchmarkGroup<WallTime>) {\n\n for task_count in [10, 1_000, 100_000] {\n\n c.bench_with_input(\n\n BenchmarkId::new(\"remove all tasks\", task_count),\n\n &task_count,\n\n |b, &task_count| {\n\n b.iter_batched_ref(\n\n || {\n\n let mut core = Core::default();\n\n let tasks: Set<_> = add_tasks(&mut core, task_count).into_iter().collect();\n\n (core, tasks)\n\n },\n\n |(ref mut core, tasks)| {\n\n core.remove_tasks_batched(tasks);\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n },\n\n );\n\n }\n\n}\n\n\n", "file_path": "crates/tako/benches/benchmarks/core.rs", "rank": 59, "score": 189157.66776171338 }, { "content": "fn bench_iterate_tasks(c: &mut BenchmarkGroup<WallTime>) {\n\n for task_count in [10, 1_000, 100_000] {\n\n c.bench_with_input(\n\n BenchmarkId::new(\"iterate tasks\", task_count),\n\n &task_count,\n\n |b, &task_count| {\n\n b.iter_batched_ref(\n\n || {\n\n let mut core = Core::default();\n\n add_tasks(&mut core, task_count);\n\n core\n\n },\n\n |ref mut core| {\n\n let mut sum = 0;\n\n for task in core.task_map().tasks() {\n\n sum += task.id().as_num();\n\n }\n\n black_box(sum);\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n },\n\n );\n\n }\n\n}\n\n\n", "file_path": "crates/tako/benches/benchmarks/core.rs", "rank": 60, "score": 189157.66776171338 }, { "content": "fn bench_remove_single_task(c: &mut BenchmarkGroup<WallTime>) {\n\n for task_count in [10, 1_000, 100_000] {\n\n c.bench_with_input(\n\n BenchmarkId::new(\"remove a single task\", task_count),\n\n &task_count,\n\n |b, &task_count| {\n\n b.iter_batched_ref(\n\n || {\n\n let mut core = Core::default();\n\n add_tasks(&mut core, task_count);\n\n (core, TaskId::new(0))\n\n },\n\n |(ref mut core, task_id)| {\n\n let _ = core.remove_task(*task_id);\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n },\n\n );\n\n }\n\n}\n\n\n", "file_path": "crates/tako/benches/benchmarks/core.rs", "rank": 61, "score": 186543.44595902297 }, { "content": "fn is_overlapping(mut ranges: Vec<IntRange>) -> bool {\n\n ranges.sort_unstable_by_key(|range| range.start);\n\n let mut ids = Set::new();\n\n for range in ranges {\n\n if range.iter().any(|x| !ids.insert(x)) {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/common/arrayparser.rs", "rank": 62, "score": 186142.61900665873 }, { "content": "pub fn task_running_msg<T: Into<TaskId>>(task_id: T) -> TaskRunningMsg {\n\n TaskRunningMsg {\n\n id: task_id.into(),\n\n context: Default::default(),\n\n }\n\n}\n", "file_path": "crates/tako/src/tests/utils/task.rs", "rank": 63, "score": 179970.40114044692 }, { "content": "pub fn compute_b_level_metric(tasks: &mut TaskMap) {\n\n crawl(tasks, |t| t.get_consumers());\n\n}\n\n\n", "file_path": "crates/tako/src/scheduler/metrics.rs", "rank": 64, "score": 176438.50047716722 }, { "content": "fn handle_key(state: &mut DashboardState, input: Key) -> ControlFlow<anyhow::Result<()>> {\n\n if input == Key::Char('q') {\n\n // Quits the dashboard\n\n ControlFlow::Break(Ok(()))\n\n } else {\n\n let screen = state.get_current_screen_mut();\n\n screen.handle_key(input);\n\n ControlFlow::Continue(())\n\n }\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/dashboard/ui_loop.rs", "rank": 65, "score": 171057.21727189235 }, { "content": "fn p_resource_kind(input: &str) -> NomResult<GenericResourceDescriptorKind> {\n\n alt((\n\n p_kind_indices.context(\"Index resource\"),\n\n p_kind_sum.context(\"Sum resource\"),\n\n ))(input)\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/worker/parser.rs", "rank": 66, "score": 170925.64643961977 }, { "content": "/// Warns the user that an array job does not contain task ID within stdout/stderr path.\n\nfn warn_missing_task_id(opts: &SubmitOpts, task_count: u32) {\n\n let check_path = |path: Option<&StdioArg>, stream: &str| {\n\n let path = path.and_then(|stdio| match &stdio.0 {\n\n StdioDef::File(path) => path.to_str(),\n\n _ => None,\n\n });\n\n if let Some(path) = path {\n\n let placeholders = parse_resolvable_string(path);\n\n if !placeholders.contains(&StringPart::Placeholder(TASK_ID_PLACEHOLDER)) {\n\n log::warn!(\"You have submitted an array job, but the `{}` path does not contain the task ID placeholder.\\n\\\n\n Individual tasks might thus overwrite the file. Consider adding `%{{{}}}` to the `--{}` value.\", stream, TASK_ID_PLACEHOLDER, stream);\n\n }\n\n }\n\n };\n\n\n\n if task_count > 1 {\n\n check_path(opts.stdout.as_ref(), \"stdout\");\n\n check_path(opts.stderr.as_ref(), \"stderr\");\n\n }\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/client/commands/submit.rs", "rank": 67, "score": 169326.26934184952 }, { "content": "fn get_ids_and_entries(opts: &SubmitOpts) -> anyhow::Result<(IntArray, Option<Vec<BString>>)> {\n\n let entries = if let Some(ref filename) = opts.each_line {\n\n Some(read_lines(filename)?)\n\n } else if let Some(ref filename) = opts.from_json {\n\n Some(make_entries_from_json(filename)?)\n\n } else {\n\n None\n\n };\n\n\n\n let ids = if let Some(ref entries) = entries {\n\n IntArray::from_range(0, entries.len() as JobTaskCount)\n\n } else if let Some(ref array) = opts.array {\n\n array.clone()\n\n } else {\n\n IntArray::from_id(0)\n\n };\n\n\n\n Ok((ids, entries))\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/client/commands/submit.rs", "rank": 69, "score": 168156.93918119583 }, { "content": "fn parse_resource_definition(input: &str) -> anyhow::Result<GenericResourceDescriptor> {\n\n consume_all(p_resource_definition, input)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::tests::utils::check_parse_error;\n\n use tako::common::index::AsIdVec;\n\n\n\n #[test]\n\n fn test_parse_cpu_def() {\n\n assert_eq!(\n\n parse_cpu_definition(\"4\").unwrap(),\n\n CpuDefinition::Custom(vec![vec![0, 1, 2, 3].to_ids()]),\n\n );\n\n assert_eq!(\n\n parse_cpu_definition(\"2x3\").unwrap(),\n\n CpuDefinition::Custom(vec![vec![0, 1, 2].to_ids(), vec![3, 4, 5].to_ids()]),\n\n );\n", "file_path": "crates/hyperqueue/src/worker/parser.rs", "rank": 70, "score": 166951.49894174474 }, { "content": "pub fn p_resource_definition(input: &str) -> NomResult<GenericResourceDescriptor> {\n\n let parser = separated_pair(\n\n alphanumeric1.context(\"Resource identifier\"),\n\n tuple((multispace0, char('='), multispace0)),\n\n p_resource_kind.context(\"Resource kind (sum or indices)\"),\n\n );\n\n map(parser, |(name, kind)| GenericResourceDescriptor {\n\n name: name.to_string(),\n\n kind,\n\n })(input)\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/worker/parser.rs", "rank": 71, "score": 166951.49894174474 }, { "content": "fn create_task_configuration(core_ref: &mut Core, msg: TaskConf) -> TaskConfiguration {\n\n let resources_msg = msg.resources;\n\n let generic_resources = resources_msg\n\n .generic\n\n .into_iter()\n\n .map(|req| {\n\n let resource = core_ref.get_or_create_generic_resource_id(&req.resource);\n\n GenericResourceRequest {\n\n resource,\n\n amount: req.amount,\n\n }\n\n })\n\n .collect();\n\n\n\n let resources = ResourceRequest::new(\n\n resources_msg.cpus,\n\n resources_msg.min_time,\n\n generic_resources,\n\n );\n\n\n", "file_path": "crates/tako/src/server/client.rs", "rank": 72, "score": 166404.8633583091 }, { "content": "fn cancel_tasks_from_callback(\n\n state_ref: &StateRef,\n\n tako_ref: &Backend,\n\n job_id: JobId,\n\n tasks: Vec<TakoTaskId>,\n\n) {\n\n if tasks.is_empty() {\n\n return;\n\n }\n\n let tako_ref = tako_ref.clone();\n\n let state_ref = state_ref.clone();\n\n tokio::task::spawn_local(async move {\n\n let message = FromGatewayMessage::CancelTasks(CancelTasks { tasks });\n\n let response = tako_ref.send_tako_message(message).await.unwrap();\n\n\n\n match response {\n\n ToGatewayMessage::CancelTasksResponse(msg) => {\n\n let mut state = state_ref.get_mut();\n\n let job = state.get_job_mut(job_id).unwrap();\n\n for tako_id in msg.cancelled_tasks {\n", "file_path": "crates/hyperqueue/src/server/state.rs", "rank": 73, "score": 166229.61584428954 }, { "content": "fn check_task_consumers_exact(task: &Task, consumers: &[&Task]) {\n\n let task_consumers = task.get_consumers();\n\n\n\n assert_eq!(task_consumers.len(), consumers.len());\n\n for consumer in consumers {\n\n assert!(task_consumers.contains(&consumer.id));\n\n }\n\n}\n\n\n", "file_path": "crates/tako/src/tests/test_reactor.rs", "rank": 74, "score": 165530.59281379194 }, { "content": "fn resource_allocation_to_msg(\n\n allocation: &ResourceAllocation,\n\n resource_map: &ResourceMap,\n\n) -> TaskResourceAllocation {\n\n TaskResourceAllocation {\n\n cpus: allocation.cpus.to_vec(),\n\n generic_allocations: allocation\n\n .generic_allocations\n\n .iter()\n\n .map(|alloc| crate::messages::worker::GenericResourceAllocation {\n\n resource: resource_map\n\n .get_name(alloc.resource)\n\n .unwrap_or(\"unknown\")\n\n .to_string(),\n\n value: match &alloc.value {\n\n GenericResourceAllocationValue::Indices(indices) => {\n\n crate::messages::worker::GenericResourceAllocationValue::Indices(\n\n indices.iter().cloned().collect(),\n\n )\n\n }\n", "file_path": "crates/tako/src/worker/rpc.rs", "rank": 75, "score": 165486.43373397697 }, { "content": "/// Finds all child directories in the given directory.\n\n/// For each directory, tries to parse its name as an integer.\n\n/// Returns the maximum found successfully parsed integer or [`None`] if no integer was found.\n\nfn find_max_id_in_dir(directory: &Path) -> Option<u64> {\n\n if let Ok(entries) = std::fs::read_dir(directory) {\n\n entries\n\n .filter_map(|entry| {\n\n entry.ok().and_then(|entry| {\n\n match entry.metadata().ok().map(|m| m.is_dir()).unwrap_or(false) {\n\n true => entry\n\n .file_name()\n\n .to_str()\n\n .and_then(|f| f.parse::<u64>().ok()),\n\n false => None,\n\n }\n\n })\n\n })\n\n .max()\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/common/serverdir.rs", "rank": 76, "score": 164129.88200976173 }, { "content": "/// Fills placeholder values known on the worker.\n\npub fn fill_placeholders_worker(program: &mut ProgramDefinition) {\n\n let mut placeholders = PlaceholderMap::new();\n\n\n\n let task_id = program.env[env_key(HQ_TASK_ID)].to_string();\n\n placeholders.insert(TASK_ID_PLACEHOLDER, task_id.into());\n\n\n\n let instance_id = program.env[env_key(HQ_INSTANCE_ID)].to_string();\n\n placeholders.insert(INSTANCE_ID_PLACEHOLDER, instance_id.into());\n\n\n\n let submit_dir: PathBuf = program.env[env_key(HQ_SUBMIT_DIR)].to_string().into();\n\n resolve_program_paths(placeholders, program, &submit_dir, true);\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/common/placeholders.rs", "rank": 77, "score": 163078.6226548809 }, { "content": "struct TestTaskLauncher;\n\n\n\nimpl TaskLauncher for TestTaskLauncher {\n\n fn build_task(\n\n &self,\n\n state: &WorkerState,\n\n task_id: TaskId,\n\n stop_receiver: Receiver<StopReason>,\n\n ) -> crate::Result<TaskLaunchData> {\n\n let program: ProgramDefinition = {\n\n let task = state.get_task(task_id);\n\n log::debug!(\n\n \"Starting program launcher task_id={} res={:?} alloc={:?} body_len={}\",\n\n task.id,\n\n &task.resources,\n\n task.resource_allocation(),\n\n task.body.len(),\n\n );\n\n rmp_serde::from_slice(&task.body)?\n\n };\n", "file_path": "crates/tako/src/tests/integration/utils/worker.rs", "rank": 78, "score": 161683.81226203573 }, { "content": "pub fn run_task(\n\n state: &mut WorkerState,\n\n state_ref: &WorkerStateRef,\n\n task_id: TaskId,\n\n allocation: ResourceAllocation,\n\n) {\n\n log::debug!(\"Task={} assigned\", task_id);\n\n\n\n assert_eq!(state.get_task(task_id).n_outputs, 0);\n\n let (end_sender, end_receiver) = oneshot::channel();\n\n let task_env = TaskEnv::new(end_sender);\n\n\n\n state.start_task(task_id, task_env, allocation);\n\n\n\n match state.task_launcher.build_task(state, task_id, end_receiver) {\n\n Ok(task_launch_data) => {\n\n let TaskLaunchData {\n\n task_future,\n\n task_context,\n\n } = task_launch_data;\n", "file_path": "crates/tako/src/worker/reactor.rs", "rank": 79, "score": 161049.7572915557 }, { "content": "fn get_task_time(state: &JobTaskState) -> (Option<DateTime<Utc>>, Option<DateTime<Utc>>) {\n\n match state {\n\n JobTaskState::Canceled {\n\n started_data: Some(started_data),\n\n } => (Some(started_data.start_date), None),\n\n JobTaskState::Running { started_data, .. } => (Some(started_data.start_date), None),\n\n JobTaskState::Finished {\n\n started_data,\n\n end_date,\n\n ..\n\n }\n\n | JobTaskState::Failed {\n\n started_data,\n\n end_date,\n\n ..\n\n } => (Some(started_data.start_date), Some(*end_date)),\n\n JobTaskState::Canceled { started_data: None } | JobTaskState::Waiting => (None, None),\n\n }\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/client/output/cli.rs", "rank": 80, "score": 159156.89104693042 }, { "content": "pub fn gather_configuration(opts: WorkerStartOpts) -> anyhow::Result<WorkerConfiguration> {\n\n log::debug!(\"Gathering worker configuration information\");\n\n\n\n let hostname = opts.hostname.unwrap_or_else(|| {\n\n gethostname::gethostname()\n\n .into_string()\n\n .expect(\"Invalid hostname\")\n\n });\n\n\n\n let cpus = match opts.cpus.unpack() {\n\n CpuDefinition::Detect => detect_cpus()?,\n\n CpuDefinition::DetectNoHyperThreading => detect_cpus_no_ht()?,\n\n CpuDefinition::Custom(cpus) => cpus,\n\n };\n\n\n\n let mut generic = if opts.no_detect_resources {\n\n Vec::new()\n\n } else {\n\n detect_generic_resource()?\n\n };\n", "file_path": "crates/hyperqueue/src/worker/start.rs", "rank": 81, "score": 159090.30177190923 }, { "content": "fn p_kind_indices(input: &str) -> NomResult<GenericResourceDescriptorKind> {\n\n map(\n\n delimited(\n\n tuple((tag(\"indices\"), multispace0, char('('), multispace0)),\n\n separated_pair(p_u32, tuple((multispace0, char('-'), multispace0)), p_u32),\n\n tuple((multispace0, char(')'), multispace0)),\n\n ),\n\n |(start, end)| {\n\n GenericResourceDescriptorKind::Indices(GenericResourceKindIndices {\n\n start: start.into(),\n\n end: end.into(),\n\n })\n\n },\n\n )(input)\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/worker/parser.rs", "rank": 82, "score": 157990.12762856606 }, { "content": "fn p_kind_sum(input: &str) -> NomResult<GenericResourceDescriptorKind> {\n\n map(\n\n delimited(\n\n tuple((tag(\"sum\"), multispace0, char('('), multispace0)),\n\n p_u64,\n\n tuple((multispace0, char(')'), multispace0)),\n\n ),\n\n |size| GenericResourceDescriptorKind::Sum(GenericResourceKindSum { size }),\n\n )\n\n .parse(input)\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/worker/parser.rs", "rank": 83, "score": 157990.12762856606 }, { "content": "#[test]\n\nfn lost_worker_with_running_and_assign_tasks() {\n\n let mut core = Core::default();\n\n create_test_workers(&mut core, &[1, 1, 1]);\n\n submit_example_1(&mut core);\n\n\n\n let t40 = task(40);\n\n let t41 = task(41);\n\n submit_test_tasks(&mut core, vec![t40, t41]);\n\n\n\n start_on_worker(&mut core, 11, 101);\n\n start_on_worker(&mut core, 12, 101);\n\n start_on_worker(&mut core, 40, 101);\n\n start_on_worker(&mut core, 41, 100);\n\n\n\n fail_steal(&mut core, 12, 101, 100);\n\n start_stealing(&mut core, 40, 100);\n\n start_stealing(&mut core, 41, 101);\n\n\n\n core.assert_running(&[12]);\n\n assert_eq!(core.get_task(12.into()).instance_id, 0.into());\n", "file_path": "crates/tako/src/tests/test_reactor.rs", "rank": 84, "score": 155935.8906425797 }, { "content": "fn bench_schedule(c: &mut BenchmarkGroup<WallTime>) {\n\n for task_count in [10, 1_000, 100_000] {\n\n for worker_count in [1, 8, 16, 32] {\n\n c.bench_with_input(\n\n BenchmarkId::new(\n\n \"schedule\",\n\n format!(\"tasks={}, workers={}\", task_count, worker_count),\n\n ),\n\n &(task_count, worker_count),\n\n |b, &(task_count, worker_count)| {\n\n b.iter_batched_ref(\n\n || {\n\n let mut core = Core::default();\n\n add_tasks(&mut core, task_count);\n\n\n\n for worker_id in 0..worker_count {\n\n core.new_worker(create_worker(worker_id as u64));\n\n }\n\n\n\n let scheduler = SchedulerState::new();\n", "file_path": "crates/tako/benches/benchmarks/scheduler.rs", "rank": 85, "score": 152857.9981005051 }, { "content": "fn bench_b_level(c: &mut BenchmarkGroup<WallTime>) {\n\n for task_count in [10, 1_000, 100_000] {\n\n c.bench_with_input(\n\n BenchmarkId::new(\"compute b-level\", task_count),\n\n &task_count,\n\n |b, &task_count| {\n\n b.iter_batched_ref(\n\n || {\n\n let mut core = Core::default();\n\n add_tasks(&mut core, task_count);\n\n core\n\n },\n\n |core| {\n\n compute_b_level_metric(core.task_map_mut());\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n },\n\n );\n\n }\n\n}\n\n\n", "file_path": "crates/tako/benches/benchmarks/scheduler.rs", "rank": 86, "score": 152857.9981005051 }, { "content": "/// Formatting\n\npub fn format_job_workers(tasks: &[JobTaskInfo], worker_map: &WorkerMap) -> String {\n\n // BTreeSet is used to both filter duplicates and keep a stable order\n\n let worker_set: BTreeSet<_> = tasks\n\n .iter()\n\n .filter_map(|task| task.state.get_worker())\n\n .collect();\n\n let worker_count = worker_set.len();\n\n\n\n let mut result = worker_set\n\n .into_iter()\n\n .take(MAX_DISPLAYED_WORKERS)\n\n .map(|id| format_worker(id, worker_map))\n\n .collect::<Vec<_>>()\n\n .join(\", \");\n\n\n\n if worker_count > MAX_DISPLAYED_WORKERS {\n\n write!(result, \", … ({} total)\", worker_count).unwrap();\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/client/output/cli.rs", "rank": 87, "score": 152764.06294683815 }, { "content": "// We need to read it as bytes, because not all our users use UTF-8\n\nfn read_lines(filename: &Path) -> anyhow::Result<Vec<BString>> {\n\n log::info!(\"Reading file: {}\", filename.display());\n\n if fs::metadata(filename)?.len() > 100 << 20 {\n\n log::warn!(\"Reading file bigger than 100MB\");\n\n };\n\n let file = std::fs::File::open(filename)?;\n\n let results: Result<Vec<BString>, std::io::Error> = io::BufReader::new(file)\n\n .split(b'\\n')\n\n .map(|x| x.map(BString::from))\n\n .collect();\n\n Ok(results?)\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/client/commands/submit.rs", "rank": 88, "score": 151669.18282672562 }, { "content": "#[allow(unused)]\n\npub fn get_average_cpu_usage_for_worker(hw_state: &WorkerHwStateMessage) -> f32 {\n\n let num_cpus = hw_state\n\n .state\n\n .worker_cpu_usage\n\n .cpu_per_core_percent_usage\n\n .len();\n\n let cpu_usage_sum_per_core = hw_state\n\n .state\n\n .worker_cpu_usage\n\n .cpu_per_core_percent_usage\n\n .iter()\n\n .copied()\n\n .reduce(|cpu_a, cpu_b| (cpu_a + cpu_b))\n\n .unwrap_or(0.0);\n\n (cpu_usage_sum_per_core / num_cpus as f32) as f32\n\n}\n", "file_path": "crates/hyperqueue/src/dashboard/utils.rs", "rank": 89, "score": 151644.66652496808 }, { "content": "pub fn simple_args(args: &[&'static str]) -> Vec<String> {\n\n args.iter().map(|&v| v.to_string()).collect()\n\n}\n\n\n", "file_path": "crates/tako/src/tests/integration/utils/task.rs", "rank": 90, "score": 151349.88236369746 }, { "content": "pub fn task_status(status: &JobTaskState) -> Status {\n\n match status {\n\n JobTaskState::Waiting => Status::Waiting,\n\n JobTaskState::Running { .. } => Status::Running,\n\n JobTaskState::Finished { .. } => Status::Finished,\n\n JobTaskState::Failed { .. } => Status::Failed,\n\n JobTaskState::Canceled { .. } => Status::Canceled,\n\n }\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/client/status.rs", "rank": 91, "score": 150966.89256069524 }, { "content": "pub fn create_test_workers(core: &mut Core, cpus: &[u32]) {\n\n for (i, c) in cpus.iter().enumerate() {\n\n let worker_id = WorkerId::new((100 + i) as u32);\n\n\n\n let wcfg = WorkerConfiguration {\n\n resources: ResourceDescriptor::simple(*c),\n\n listen_address: format!(\"1.1.1.{}:123\", i),\n\n hostname: format!(\"test{}\", i),\n\n work_dir: Default::default(),\n\n log_dir: Default::default(),\n\n heartbeat_interval: Duration::from_millis(1000),\n\n send_overview_interval: Some(Duration::from_millis(1000)),\n\n idle_timeout: None,\n\n time_limit: None,\n\n on_server_lost: ServerLostPolicy::Stop,\n\n extra: Default::default(),\n\n };\n\n\n\n let worker = Worker::new(worker_id, wcfg, Default::default());\n\n on_new_worker(core, &mut TestComm::default(), worker);\n\n }\n\n}\n\n\n", "file_path": "crates/tako/src/tests/utils/schedule.rs", "rank": 92, "score": 150659.574992073 }, { "content": "fn make_entries_from_json(filename: &Path) -> anyhow::Result<Vec<BString>> {\n\n log::info!(\"Reading json file: {}\", filename.display());\n\n if fs::metadata(filename)?.len() > 100 << 20 {\n\n log::warn!(\"Reading file bigger then 100MB\");\n\n };\n\n let file = std::fs::File::open(filename)?;\n\n let root = serde_json::from_reader(file)?;\n\n\n\n if let serde_json::Value::Array(values) = root {\n\n values\n\n .iter()\n\n .map(|element| {\n\n serde_json::to_string(element)\n\n .map(BString::from)\n\n .map_err(|e| e.into())\n\n })\n\n .collect()\n\n } else {\n\n anyhow::bail!(\n\n \"{}: The top element of the provided JSON file has to be an array\",\n", "file_path": "crates/hyperqueue/src/client/commands/submit.rs", "rank": 94, "score": 149268.5938515566 }, { "content": "pub fn fill_placeholders_log(value: &mut PathBuf, job_id: JobId, submit_dir: &Path) {\n\n let mut placeholders = PlaceholderMap::new();\n\n insert_submit_data(&mut placeholders, job_id, submit_dir);\n\n *value = resolve(&placeholders, value.to_str().unwrap()).into();\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/common/placeholders.rs", "rank": 95, "score": 148214.7307351839 }, { "content": "/// Handles key press events when the dashboard_ui is active\n\nfn start_key_event_listener(tx: UnboundedSender<DashboardEvent>) -> thread::JoinHandle<()> {\n\n thread::spawn(move || {\n\n let stdin = io::stdin();\n\n for key in stdin.keys().flatten() {\n\n if let Err(err) = tx.send(DashboardEvent::KeyPressEvent(key)) {\n\n eprintln!(\"Error in sending dashboard key: {}\", err);\n\n return;\n\n }\n\n }\n\n })\n\n}\n\n\n\n/// Sends a dashboard event every n milliseconds\n\nasync fn send_event_every(\n\n n_milliseconds: u64,\n\n sender: UnboundedSender<DashboardEvent>,\n\n event_type: DashboardEvent,\n\n) {\n\n let mut tick_duration = tokio::time::interval(Duration::from_millis(n_milliseconds));\n\n loop {\n\n if let Err(e) = sender.send(event_type) {\n\n log::error!(\"Error in producing dashboard events: {}\", e);\n\n return;\n\n }\n\n tick_duration.tick().await;\n\n }\n\n}\n", "file_path": "crates/hyperqueue/src/dashboard/ui_loop.rs", "rank": 96, "score": 148200.3354511086 }, { "content": "fn insert_submit_data<'a>(map: &mut PlaceholderMap<'a>, job_id: JobId, submit_dir: &'a Path) {\n\n map.insert(JOB_ID_PLACEHOLDER, job_id.to_string().into());\n\n map.insert(SUBMIT_DIR_PLACEHOLDER, submit_dir.to_str().unwrap().into());\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/common/placeholders.rs", "rank": 97, "score": 146019.77448180996 }, { "content": "fn format_resource_request(rq: &ResourceRequest) -> String {\n\n let mut result = format_cpu_request(&rq.cpus);\n\n for grq in &rq.generic {\n\n result.push_str(&format!(\"\\n{}: {}\", grq.resource, grq.amount))\n\n }\n\n result\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/client/output/cli.rs", "rank": 98, "score": 143066.54108252365 }, { "content": "fn task_status_to_cell(status: Status) -> CellStruct {\n\n match status {\n\n Status::Waiting => \"WAITING\".cell().foreground_color(Some(Color::Cyan)),\n\n Status::Finished => \"FINISHED\".cell().foreground_color(Some(Color::Green)),\n\n Status::Failed => \"FAILED\".cell().foreground_color(Some(Color::Red)),\n\n Status::Running => \"RUNNING\".cell().foreground_color(Some(Color::Yellow)),\n\n Status::Canceled => \"CANCELED\".cell().foreground_color(Some(Color::Magenta)),\n\n }\n\n}\n\n\n", "file_path": "crates/hyperqueue/src/client/output/cli.rs", "rank": 99, "score": 140701.63952252327 } ]
Rust
src/ops/list.rs
wellinthatcase/fast-redis
b8b806989290d7c658884379975f59bfecaa84a5
use crate::*; #[pymethods] impl RedisClient { #[args(elements="*", no_overwrite="**")] #[text_signature = "($self, key, elements, *, no_overwrite)"] pub fn rpush(&mut self, key: String, elements: Vec<&PyAny>, no_overwrite: Option<&PyDict>) -> PyResult<usize> { let command = nx_x_decider("RPUSH", "X", no_overwrite); let mut args = construct_vector(elements.len() + 1, Cow::from(&elements))?; args.insert(0, key); Ok(route_command(self, &command, Some(args))?) } #[args(elements="*", no_overwrite="**")] #[text_signature = "($self, key, elements, *, no_overwrite)"] pub fn lpush(&mut self, key: String, elements: Vec<&PyAny>, no_overwrite: Option<&PyDict>) -> PyResult<usize> { let command = nx_x_decider("LPUSH", "X", no_overwrite); let mut args = construct_vector(elements.len() + 1, Cow::from(&elements))?; args.insert(0, key); Ok(route_command(self, &command, Some(args))?) } #[text_signature = "($self, key, index, /)"] pub fn lindex(&mut self, key: &str, index: isize) -> PyResult<String> { let ind = index.to_string(); Ok(route_command(self, "LINDEX", Some(&[key, &ind]))?) } #[text_signature = "($self, key, element, /)"] pub fn linsert(&mut self, key: &str, element: &str) -> PyResult<isize> { Ok(route_command(self, "LINSERT", Some(&[key, element]))?) } #[text_signature = "($self, key, /)"] pub fn llen(&mut self, key: &str) -> PyResult<isize> { Ok(route_command(self, "LLEN", Some(key))?) } #[text_signature = "($self, key, /)"] pub fn lpop(&mut self, key: &str) -> PyResult<String> { Ok(route_command(self, "LPOP", Some(key))?) } #[text_signature = "($self, key, index, element, /)"] pub fn lset(&mut self, key: &str, index: usize, element: &PyAny) -> PyResult<String> { let ind = index.to_string(); let elem = element.to_string(); Ok(route_command(self, "LSET", Some(&[key, &ind, &elem]))?) } #[text_signature = "($self, key, beginning, end, /)"] pub fn lrange(&mut self, key: &str, beginning: usize, end: usize) -> PyResult<Vec<String>> { let start = beginning.to_string(); let stop = end.to_string(); Ok(route_command(self, "LRANGE", Some(&[key, &start, &stop]))?) } #[args(elems="*")] #[text_signature = "($self, key, amt, elems, /)"] pub fn lrem(&mut self, key: String, amt: usize, elems: Vec<&PyAny>) -> PyResult<usize> { let mut arguments = construct_vector(elems.len() + 2, Cow::from(&elems))?; arguments.insert(0, amt.to_string()); arguments.insert(0, key); Ok(route_command(self, "LREM", Some(arguments))?) } #[text_signature = "($self, key, beginning, end, /)"] pub fn ltrim(&mut self, key: &str, beginning: isize, end: isize) -> PyResult<usize> { let stop = end.to_string(); let start = beginning.to_string(); Ok(route_command(self, "LTRIM", Some(&[key, &start, &stop]))?) } #[text_signature = "($self, key, /)"] pub fn rpop(&mut self, key: &str) -> PyResult<String> { Ok(route_command(self, "RPOP", Some(key))?) } #[text_signature = "($self, key, /)"] pub fn lelements(&mut self, key: &str) -> PyResult<Vec<String>> { let stop = self.llen(key)?.to_string(); Ok(route_command(self, "LRANGE", Some(&[key, "0", &stop]))?) } #[text_signature = "($self, source, destination, /)"] pub fn rpoplpush(&mut self, source: &str, destination: &str) -> PyResult<String> { Ok(route_command(self, "RPOPLPUSH", Some(&[source, destination]))?) } }
use crate::*; #[pymethods] impl RedisClient { #[args(elements="*", no_overwrite="**")] #[text_signature = "($self, key, elements, *, no_overwrite)"] pub fn rpush(&mut self, key: String, elements: Vec<&PyAny>, no_overwrite: Option<&PyDict>) -> PyResult<usize> { let command = nx_x_decider("RPUSH", "X", no_overwrite); let mut args = construct_vector(elements.len() + 1, Cow::from(&elements))?; args.insert(0, key); Ok(route_command(self, &command, Some(args))?) } #[args(elements="*", no_overwrite="**")] #[text_signature = "($self, key, elements, *, no_overwrite)"] pub fn lpush(&mut self, key: String, elements: Vec<&PyAny>, no_overwrite: Option<&PyDict>) -> PyResult<usize> { let command = nx_x_decider("LPUSH", "
#[text_signature = "($self, key, index, /)"] pub fn lindex(&mut self, key: &str, index: isize) -> PyResult<String> { let ind = index.to_string(); Ok(route_command(self, "LINDEX", Some(&[key, &ind]))?) } #[text_signature = "($self, key, element, /)"] pub fn linsert(&mut self, key: &str, element: &str) -> PyResult<isize> { Ok(route_command(self, "LINSERT", Some(&[key, element]))?) } #[text_signature = "($self, key, /)"] pub fn llen(&mut self, key: &str) -> PyResult<isize> { Ok(route_command(self, "LLEN", Some(key))?) } #[text_signature = "($self, key, /)"] pub fn lpop(&mut self, key: &str) -> PyResult<String> { Ok(route_command(self, "LPOP", Some(key))?) } #[text_signature = "($self, key, index, element, /)"] pub fn lset(&mut self, key: &str, index: usize, element: &PyAny) -> PyResult<String> { let ind = index.to_string(); let elem = element.to_string(); Ok(route_command(self, "LSET", Some(&[key, &ind, &elem]))?) } #[text_signature = "($self, key, beginning, end, /)"] pub fn lrange(&mut self, key: &str, beginning: usize, end: usize) -> PyResult<Vec<String>> { let start = beginning.to_string(); let stop = end.to_string(); Ok(route_command(self, "LRANGE", Some(&[key, &start, &stop]))?) } #[args(elems="*")] #[text_signature = "($self, key, amt, elems, /)"] pub fn lrem(&mut self, key: String, amt: usize, elems: Vec<&PyAny>) -> PyResult<usize> { let mut arguments = construct_vector(elems.len() + 2, Cow::from(&elems))?; arguments.insert(0, amt.to_string()); arguments.insert(0, key); Ok(route_command(self, "LREM", Some(arguments))?) } #[text_signature = "($self, key, beginning, end, /)"] pub fn ltrim(&mut self, key: &str, beginning: isize, end: isize) -> PyResult<usize> { let stop = end.to_string(); let start = beginning.to_string(); Ok(route_command(self, "LTRIM", Some(&[key, &start, &stop]))?) } #[text_signature = "($self, key, /)"] pub fn rpop(&mut self, key: &str) -> PyResult<String> { Ok(route_command(self, "RPOP", Some(key))?) } #[text_signature = "($self, key, /)"] pub fn lelements(&mut self, key: &str) -> PyResult<Vec<String>> { let stop = self.llen(key)?.to_string(); Ok(route_command(self, "LRANGE", Some(&[key, "0", &stop]))?) } #[text_signature = "($self, source, destination, /)"] pub fn rpoplpush(&mut self, source: &str, destination: &str) -> PyResult<String> { Ok(route_command(self, "RPOPLPUSH", Some(&[source, destination]))?) } }
X", no_overwrite); let mut args = construct_vector(elements.len() + 1, Cow::from(&elements))?; args.insert(0, key); Ok(route_command(self, &command, Some(args))?) }
function_block-function_prefixed
[ { "content": "#[inline]\n\nfn route_command<Args, ReturnType>(inst: &mut RedisClient, cmd: &str, args: Option<Args>) -> PyResult<ReturnType>\n\nwhere \n\n Args: ToRedisArgs,\n\n ReturnType: FromRedisValue\n\n{\n\n let mut call = redis::cmd(cmd);\n\n call.arg(args);\n\n\n\n match call.query(&mut inst.connection) as RedisResult<ReturnType> {\n\n Ok(v) => Ok(v),\n\n Err(v) => {\n\n let detail = Cow::from(v.detail().unwrap_or_else(|| \"Unknown exception!\").to_string());\n\n \n\n match v.kind() {\n\n ErrorKind::ExtensionError => exceptions::TypeError::into(detail),\n\n ErrorKind::TypeError => exceptions::TypeError::into(detail),\n\n ErrorKind::IoError => exceptions::IOError::into(detail),\n\n _ => exceptions::Exception::into(detail)\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Converts a `Cow<[&PyAny]>` into a Redis applicable format. \n", "file_path": "src/lib.rs", "rank": 0, "score": 80477.22157076039 }, { "content": "#[inline]\n\nfn construct_vector(capacity: usize, other: Cow<[&PyAny]>) -> PyResult<Vec<String>> {\n\n let mut vector = Vec::with_capacity(capacity);\n\n \n\n vector.extend(other.iter().map(|element| element.to_string()).collect::<Vec<String>>());\n\n\n\n Ok(vector)\n\n}\n\n\n\n/// Decides whether a command should be the NX/X varient.\n\n/// \n\n/// # Arguments \n\n/// * `base` - The original command. Ex, `SET` being the original of `SETNX`. \n\n/// * `additive` - What to add if `no_overwrite` is true. NX/X. \n\n/// * `choice` - The choice the user may of passed. \n\n/// \n\n/// # Returns \n\n/// The corrected command. Whether NX/X or not. \n", "file_path": "src/lib.rs", "rank": 1, "score": 49112.89913588888 }, { "content": "class StringKey(GenericKey):\n\n __doc__ = GenericKey.__doc__\n\n\n\n def __init__(self, *args: List[Union[RedisClient, str]], **attributes: Dict[str, Any]) -> None:\n\n super().__init__(*args, **attributes)\n\n\n\n def append(self, value: str) -> int:\n\n \"\"\"Append a substring to this key's value.\"\"\"\n\n\n\n return self.client.append(self._name, value)\n\n\n\n def bitcount(self, start: int, stop: int) -> int: \n\n \"\"\"Count the number of set bits in a string.\"\"\"\n\n\n\n return self.client.bitcount(self._name, int(start), int(stop))\n\n\n\n def get(self) -> str: \n\n \"\"\"Get the value of this key.\"\"\"\n\n\n\n return self.client.get(self._name)\n\n\n\n def sset(self, value, no_overwrite=True) -> str: \n\n \"\"\"Set the value of this key.\"\"\"\n\n\n\n return self.client.sset(self._name, value, no_overwrite=no_overwrite)\n\n\n\n def getset(self, value: str) -> str: \n\n \"\"\"Set the value of this key and return the old value. Atomic.\"\"\"\n\n\n\n return self.client.getset(self._name, value)\n\n\n\n def decr(self) -> int: \n\n \"\"\"Decrement the value at this key. If applicable.\"\"\"\n\n\n\n return self.client.decr(self._name)\n\n\n\n def decrby(self, value: int) -> int: \n\n \"\"\"Decrement the value at this key by `value`. If applicable.\"\"\"\n\n\n\n return self.client.decrby(self._name, int(value))\n\n\n\n def incr(self) -> int: \n\n \"\"\"Increment the value at this key. If applicable.\"\"\"\n\n\n\n return self.client.incr(self._name)\n\n\n\n def incrby(self, value: int) -> int: \n\n \"\"\"Increment the value at this key by `value`. If applicable.\"\"\"\n\n\n\n return self.client.incrby(self._name, int(value))\n\n\n\n def incrbyfloat(self, value: float) -> float: \n\n \"\"\"Increment the value at this key with a float. If the value is already a double-point float.\"\"\"\n\n\n\n return self.client.incrbyfloat(self._name, float(value))\n\n\n\n def getrange(self, start: int, stop: int) -> str: \n\n \"\"\"Substring the value stored at this key. \"\"\"\n\n\n\n return self.client.getrange(self._name, int(start), int(stop))\n\n\n\n def setrange(self, value: str, offset: int) -> int: \n\n \"\"\"Overwrites part of the string stored at key, starting at the specified offset.\"\"\"\n\n\n\n return self.client.setrange(self._name, value, int(offset))\n\n\n\n def strlen(self) -> int: \n\n \"\"\"The length of the string at this key.\"\"\"\n\n\n\n return self.client.strlen(self._name)\n\n\n\n def __len__(self) -> int: \n", "file_path": "suredis/__init__.py", "rank": 2, "score": 43776.01804696122 }, { "content": "#[inline]\n\nfn nx_x_decider(base: &'static str, additive: &'static str, choice: Option<&PyDict>) -> String {\n\n let gil = Python::acquire_gil();\n\n let mut res = String::with_capacity(base.len() + additive.len());\n\n let raw_choice = choice.unwrap_or_else(|| PyDict::new(gil.python())).get_item(\"no_overwrite\"); \n\n\n\n res.push_str(base);\n\n\n\n if let Some(val) = raw_choice {\n\n match val.to_string().to_ascii_lowercase().parse::<bool>() {\n\n Ok(val) => { \n\n if val { res.push_str(additive); } else {};\n\n res\n\n }, \n\n Err(_) => res\n\n }\n\n } else { res }\n\n} \n\n\n\n/// The main client for suredis. \n\n/// \n\n/// Attributes\n\n/// ==========\n\n/// `db` - The DB the connection interacts with. See [SELECT](https://redis.io/commands/select).\n\n/// `url` - The URL you passed to establish the Redis client.\n\n/// `supports_pipelining` - Whether the connection supports pipelines. \n\n/// \n\n/// Note \n\n/// ====\n\n/// 1. Unsupported Redis operations can be accessed with the `manual` method.\n\n/// 2. It is preferred to prefix your URL with `redis://`.\n", "file_path": "src/lib.rs", "rank": 3, "score": 43604.74857894087 }, { "content": "#[pymodule]\n\nfn suredis(_py: Python, module: &PyModule) -> PyResult<()> {\n\n module.add_class::<RedisClient>()?;\n\n Ok(())\n\n}", "file_path": "src/lib.rs", "rank": 4, "score": 28340.503655286557 }, { "content": "//! Implementation of string operations for the client. \n\nuse crate::*; \n\n\n\n#[pymethods]\n\nimpl RedisClient {\n\n /// If key already exists and is a string, this command appends the value at the end of the string. \n\n /// If key does not exist it is created and set as an empty string, \n\n /// so APPEND will be similar to SET in this special case.\n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `key` - The key.\n\n /// \n\n /// `value` - The value to append to the key. \n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.set(\"key\", \"hello_\") # Set the key. \n", "file_path": "src/ops/string.rs", "rank": 5, "score": 27846.414642433465 }, { "content": " /// client = RedisClient(\"url\")\n\n /// client.mset({\"key\": \"hi\", \"key2\": \"bye\"}, no_overwrite=False)\n\n /// ```\n\n /// \n\n /// Simple String Reply\n\n /// ===================\n\n /// `\"OK\"`: Since mset cannot fail.\n\n /// \n\n /// Time Complexity \n\n /// ===============\n\n /// `O(n)` : Where n = the number of keys to set.\n\n /// \n\n /// [Read about MSET in the Redis documentation.](https://redis.io/commands/mset)\n\n #[args(no_overwrite=\"**\")]\n\n #[text_signature = \"($self, keys, /)\"]\n\n pub fn mset(&mut self, keys: HashMap<&str, &PyAny>, no_overwrite: Option<&PyDict>) -> PyResult<String> {\n\n let command = nx_x_decider(\"MSET\", \"NX\", no_overwrite);\n\n\n\n let mut arguments = Vec::with_capacity(keys.len() * 2);\n\n\n", "file_path": "src/ops/string.rs", "rank": 6, "score": 27843.309953301323 }, { "content": " /// client = RedisClient(\"url\")\n\n /// client.set(\"my_key\", \"hello world!\", no_overwrite=True)\n\n /// ```\n\n /// \n\n /// Simple String Reply\n\n /// ===================\n\n /// `\"OK\"`: SET was executed correctly. \n\n /// \n\n /// [Read about SET in the Redis documentation.](https://redis.io/commands/set)\n\n #[args(no_overwrite=\"**\")]\n\n #[text_signature = \"($self, name, value, *, no_overwrite=True)\"]\n\n pub fn sset(&mut self, key: &str, value: &PyAny, no_overwrite: Option<&PyDict>) -> PyResult<String> {\n\n let val = value.to_string();\n\n let command = nx_x_decider(\"SET\", \"NX\", no_overwrite);\n\n Ok(route_command(self, &command, Some(&[key, &val]))?)\n\n }\n\n\n\n /// Atomically sets key to value and returns the old value stored at key. \n\n /// Returns -1 when key exists but does not hold a string value.\n\n /// \n", "file_path": "src/ops/string.rs", "rank": 7, "score": 27843.30039840137 }, { "content": " /// ================= \n\n /// The value of the key after the increment.\n\n /// \n\n /// Time Complexity\n\n /// =============== \n\n /// O(1)\n\n /// \n\n /// [Read about INCRBYFLOAT in the Redis documentation.](https://redis.io/commands/incrbyfloat)\n\n #[text_signature = \"($self, key, amount, /)\"]\n\n pub fn incrbyfloat(&mut self, key: &str, amount: f64) -> PyResult<String> {\n\n let amt = amount.to_string();\n\n Ok(route_command(self, \"INCRBYFLOAT\", Some(&[key, &amt]))?)\n\n }\n\n\n\n /// Returns the substring of the string value stored at key, \n\n /// determined by the offsets start and end (both are inclusive). \n\n /// Negative offsets can be used in order to provide an offset starting from the end of the string. \n\n /// So -1 means the last character, -2 the penultimate and so forth.\n\n /// \n\n /// The function handles out of range requests by limiting the resulting range to the actual length of the string.\n", "file_path": "src/ops/string.rs", "rank": 8, "score": 27843.107093684037 }, { "content": " /// \n\n /// Time Complexity\n\n /// =============== \n\n /// O(n) : Where n = the length of the returned string. \n\n /// \n\n /// O(1) : For small strings. \n\n /// \n\n /// [Read about GETRANGE in the Redis documentation.](https://redis.io/commands/getrange)\n\n #[text_signature = \"($self, key, beginning, end, /)\"]\n\n pub fn getrange(&mut self, key: &str, beginning: usize, end: usize) -> PyResult<String> {\n\n let start = beginning.to_string();\n\n let stop = end.to_string();\n\n Ok(route_command(self, \"GETRANGE\", Some(&[key, &start, &stop]))?)\n\n }\n\n\n\n /// Returns the values of all specified keys.\n\n /// For every key that does not hold a string value or does not exist, the special value nil is returned. \n\n /// Because of this, the operation never fails.\n\n /// \n\n /// Arguments\n", "file_path": "src/ops/string.rs", "rank": 9, "score": 27842.232404865423 }, { "content": " /// =============\n\n /// The number of bits set to 1. \n\n /// \n\n /// Time Complexity\n\n /// =============== \n\n /// `O(n)` : Where n = the number of set bits in the string.\n\n /// \n\n /// [Read about BITCOUNT in the Redis documentation.](https://redis.io/commands/bitcount)\n\n #[text_signature = \"($self, key, beginning, end, /)\"]\n\n pub fn bitcount(&mut self, key: &str, beginning: isize, end: isize) -> PyResult<usize> {\n\n let start = beginning.to_string();\n\n let stop = end.to_string();\n\n Ok(route_command(self, \"BITCOUNT\", Some(&[key, &start, &stop]))?)\n\n }\n\n\n\n /// Get the value of key. If the key does not exist the special value nil is returned. \n\n /// An empty string is returned if the value stored at key is not a string, because GET only handles string values.\n\n /// \n\n /// Arguments\n\n /// =========\n", "file_path": "src/ops/string.rs", "rank": 10, "score": 27842.18586774489 }, { "content": " /// client = RedisClient(\"url\")\n\n /// client.setrange(\"key\", \"redis!\", \"5\") // if key's value == \"world!\", it now == \"world!redis!\"\n\n /// ```\n\n /// \n\n /// # Integer Reply: \n\n /// * The length of the string after the operation. \n\n /// \n\n /// # Time Complexity: \n\n /// * O(1)\n\n /// \n\n /// [Read about SETRANGE in the Redis documentation.](https://redis.io/commands/setrange)\n\n #[text_signature = \"($self, key, value, offset/)\"]\n\n pub fn setrange(&mut self, key: &str, value: &PyAny, offset: usize) -> PyResult<usize> {\n\n let val = value.to_string();\n\n let off = offset.to_string();\n\n Ok(route_command(self, \"SETRANGE\", Some(&[key, &val, &off]))?)\n\n }\n\n\n\n /// Returns the length of the string value stored at key. An error is returned when key holds a non-string value.\n\n /// \n", "file_path": "src/ops/string.rs", "rank": 11, "score": 27841.882810177518 }, { "content": " /// # Arguments: \n\n /// * `key` - The name of the key. \n\n /// \n\n /// # Example:\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// length = client.strlen(\"key\") # if key's value == \"hey\", this returns 3\n\n /// ```\n\n /// \n\n /// # Integer Reply: \n\n /// * The length of the string.\n\n /// \n\n /// # Time Complexity: \n\n /// * O(1)\n\n /// \n\n /// [Read about STRLEN in the Redis documentation.](https://redis.io/commands/strlen)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn strlen(&mut self, key: &str) -> PyResult<usize> {\n\n Ok(route_command(self, \"STRLEN\", Some(key))?)\n\n }\n\n}", "file_path": "src/ops/string.rs", "rank": 12, "score": 27841.726834019693 }, { "content": " for (key, value) in keys.iter() {\n\n arguments.push(key.to_string());\n\n arguments.push(value.to_string());\n\n }\n\n\n\n Ok(route_command(self, &command, Some(arguments))?)\n\n }\n\n\n\n /// Set key to hold the string value and set key to timeout after a given number of seconds. \n\n /// This command is equivalent to executing the following commands:\n\n /// * `SET` key value\n\n /// * `EXPIRE` key seconds\n\n /// \n\n /// It is provided as a faster alternative to the given sequence of operations, \n\n /// because this operation is very common when Redis is used as a cache.\n\n /// An error is returned when seconds is invalid.\n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `key` - The name of the key. \n", "file_path": "src/ops/string.rs", "rank": 13, "score": 27841.724802342575 }, { "content": " #[text_signature = \"($self, key, /)\"]\n\n pub fn get(&mut self, key: &str) -> PyResult<String> {\n\n Ok(route_command(self, \"GET\", Some(key)).unwrap_or_default())\n\n }\n\n\n\n /// Set key to hold the string value. \n\n /// If key already holds a value, it is overwritten, regardless of its type. \n\n /// Any previous time to live associated with the key is discarded on successful SET operation.\n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `key` - The name of the key. \n\n /// \n\n /// `value` - The string value of the key.\n\n /// \n\n /// `no_overwrite` - Set to False if the key shall be replaced in the case of a duplicate. Otherwise, True.\n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n", "file_path": "src/ops/string.rs", "rank": 14, "score": 27841.712527197924 }, { "content": " /// The old value of the key. \n\n /// \n\n /// Time Complexity \n\n /// ===============\n\n /// `O(1)`\n\n /// \n\n /// [Read about GETSET in the Redis documentation.](https://redis.io/commands/getset)\n\n #[text_signature = \"($self, key, value, /)\"]\n\n pub fn getset(&mut self, key: &str, value: &PyAny) -> PyResult<String> {\n\n let val = value.to_string();\n\n Ok(route_command(self, \"GETSET\", Some(&[key, &val]))?)\n\n }\n\n\n\n /// Decrements the number stored at key by one. \n\n /// If the key does not exist, it is set to 0 before performing the operation. \n\n /// An error is returned if the key contains a value of the wrong type or contains a string \n\n /// that can not be represented as integer. This operation is limited to 64 bit signed integers.\n\n /// \n\n /// Arguments\n\n /// =========\n", "file_path": "src/ops/string.rs", "rank": 15, "score": 27841.702613122223 }, { "content": " /// client = RedisClient(\"url\")\n\n /// client.psetex(\"hello\", \"5000\", \"world!\")\n\n /// ```python\n\n /// \n\n /// # Simple String Reply: \n\n /// * \"OK\": on success.\n\n /// \n\n /// # Time Complexity: \n\n /// * O(1)\n\n /// \n\n /// [Read about PSETEX in the Redis documentation.](https://redis.io/commands/psetex)\n\n #[text_signature = \"($self, key, value, milliseconds, /)\"]\n\n pub fn psetex(&mut self, key: &str, value: &PyAny, milliseconds: usize) -> PyResult<String> {\n\n let val = value.to_string();\n\n let ms = milliseconds.to_string();\n\n Ok(route_command(self, \"PSETEX\", Some(&[key, &ms, &val]))?)\n\n }\n\n\n\n /// Overwrites part of the string stored at key, starting at the specified offset, for the entire length of value. \n\n /// If the offset is larger than the current length of the string at key, the string is padded with zero-bytes \n", "file_path": "src/ops/string.rs", "rank": 16, "score": 27841.657857506914 }, { "content": " /// client.append(\"key\", \"world!\") == 12 # Append \"world!\" to the value.\n\n /// client.get(\"key\") == \"hello_world!\" # Checking out the new value.\n\n /// ```\n\n /// \n\n /// Integer Reply\n\n /// =============\n\n /// The length of the string after the command.\n\n /// \n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)` : Assuming the appended value is small, and the already present value is of any size.\n\n /// \n\n /// [Read about APPEND in the Redis documentation.](https://redis.io/commands/append)\n\n #[text_signature = \"($self, key, value, /)\"]\n\n pub fn append(&mut self, key: &str, value: &PyAny) -> PyResult<usize> {\n\n let val = value.to_string();\n\n Ok(route_command(self, \"APPEND\", Some(&[key, &val]))?)\n\n }\n\n \n\n /// Count the number of set bits (population counting) in a string. \n", "file_path": "src/ops/string.rs", "rank": 17, "score": 27841.646685443106 }, { "content": " /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.incrby(\"key\", 5) // if key == 2, key now == 7\n\n /// ```\n\n /// \n\n /// Integer Reply \n\n /// =============\n\n /// The value of the key after the increment. \n\n /// \n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)`\n\n /// \n\n /// [Read about INCRBY in the Redis documentation.](https://redis.io/commands/incrby)\n\n #[text_signature = \"($self, key, amount, /)\"]\n\n pub fn incrby(&mut self, key: &str, amount: usize) -> PyResult<isize> {\n\n let amt = amount.to_string();\n\n Ok(route_command(self, \"INCRBY\", Some(&[key, &amt]))?)\n\n } \n", "file_path": "src/ops/string.rs", "rank": 18, "score": 27841.56121366616 }, { "content": " #[text_signature = \"($self, key, value, lifespan, /)\"]\n\n pub fn setex(&mut self, key: &str, value: &PyAny, lifespan: usize) -> PyResult<String> {\n\n let val = value.to_string();\n\n let life = lifespan.to_string();\n\n Ok(route_command(self, \"SETEX\", Some(&[key, &life, &val]))?)\n\n }\n\n\n\n // From here on documentation needs to look like above. \n\n // See # Contribution in the README.\n\n\n\n /// PSETEX works exactly like SETEX with the sole difference that \n\n /// the expire time is specified in milliseconds instead of seconds.\n\n /// \n\n /// # Arguments: \n\n /// * `key` - The name of the key. \n\n /// * `value` - The value of the key. \n\n /// * `milliseconds` - The TTL in milliseconds. \n\n /// \n\n /// # Example: \n\n /// ```python\n", "file_path": "src/ops/string.rs", "rank": 19, "score": 27841.115868784545 }, { "content": " /// \n\n /// Integer Reply \n\n /// =============\n\n /// The value of the key after the decrement, or -1 if an error occurs.\n\n /// \n\n /// Time Complexity\n\n /// =============== \n\n /// `O(1)`\n\n /// \n\n /// [Read about DECRBY in the Redis documentation.](https://redis.io/commands/decrby)\n\n #[text_signature = \"($self, key, amount, /)\"]\n\n pub fn decrby(&mut self, key: &str, amount: usize) -> PyResult<isize> {\n\n let amt = amount.to_string();\n\n Ok(route_command(self, \"DECRBY\", Some(&[key, &amt]))?)\n\n }\n\n\n\n /// Increments the number stored at key by one. If the key does not exist, \n\n /// it is set to 0 before performing the operation. \n\n /// An error is returned if the key contains a value of the wrong type or contains a string \n\n /// that can not be represented as integer. This operation is limited to 64 bit signed integers.\n", "file_path": "src/ops/string.rs", "rank": 20, "score": 27841.044894923638 }, { "content": " /// `O(1)`\n\n /// \n\n /// [Read about INCR in the Redis documentation.](https://redis.io/commands/incr)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn incr(&mut self, key: &str) -> PyResult<isize> {\n\n Ok(route_command(self, \"INCR\", Some(key))?)\n\n }\n\n\n\n /// Increments the number stored at key by increment. \n\n /// If the key does not exist, it is set to 0 before performing the operation. \n\n /// An error is returned if the key contains a value of the wrong type or contains a string \n\n /// that can not be represented as integer. This operation is limited to 64 bit signed integers.\n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `key` - The name of the key. \n\n /// \n\n /// `amount` - The amount to increment by. \n\n /// \n\n /// Example\n", "file_path": "src/ops/string.rs", "rank": 21, "score": 27840.901488555493 }, { "content": " /// `key` - The name of the key to decrement.\n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.decr(\"key\")\n\n /// ```\n\n /// \n\n /// Integer Reply\n\n /// =============\n\n /// The value of the key after the decrement.\n\n /// \n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)`\n\n /// \n\n /// [Read about DECR in the Redis documentation.](https://redis.io/commands/decr)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn decr(&mut self, key: &str) -> PyResult<i64> {\n", "file_path": "src/ops/string.rs", "rank": 22, "score": 27839.75270081964 }, { "content": " #[text_signature = \"($self, keys, /)\"]\n\n pub fn mget(&mut self, keys: Vec<&PyAny>) -> PyResult<Vec<String>> {\n\n let skeys = construct_vector(keys.len(), Cow::from(&keys))?;\n\n Ok(route_command(self, \"MGET\", Some(skeys))?)\n\n }\n\n\n\n /// Sets the given keys to their respective values. \n\n /// MSET replaces existing values with new values, just as regular SET. \n\n /// See MSETNX if you don't want to overwrite existing values.\n\n /// MSET is atomic, so all given keys are set at once. \n\n /// It is not possible for clients to see that some of the keys were updated while others are unchanged.\n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `keys` - A Dictionary with a `'key': 'value'` mapping.\n\n /// `no_overwrite` - Set to False in the case you want duplicates to be overwitten. Otherwise, True. \n\n /// \n\n /// Example \n\n /// =======\n\n /// ```python\n", "file_path": "src/ops/string.rs", "rank": 23, "score": 27839.461095044684 }, { "content": " /// `key` - The name of the key.\n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// value = client.get(\"key\")\n\n /// ```\n\n /// \n\n /// Bulk String Reply\n\n /// =================\n\n /// The value of the key, typically. \n\n /// \n\n /// An empty string if the key is an improper type or doesn't exist. \n\n /// \n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)`\n\n /// \n\n /// [Read about GET in the Redis documentation.](https://redis.io/commands/get)\n", "file_path": "src/ops/string.rs", "rank": 24, "score": 27838.738858210192 }, { "content": "\n\n /// Increment the string representing a floating point number stored at key by the specified increment. \n\n /// By using a negative increment value, the result is that the value stored at the key is decremented \n\n /// (by the obvious properties of addition).\n\n /// If the key does not exist, it is set to 0 before performing the operation.\n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `key` - The name of the key. \n\n /// \n\n /// `amount` - The amount to increment by. \n\n /// \n\n /// Example \n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.incrbyfloat(\"key\", 5.3)\n\n /// ```\n\n /// \n\n /// Bulk String Reply\n", "file_path": "src/ops/string.rs", "rank": 25, "score": 27838.51710683883 }, { "content": " /// to make offset fit. Non-existing keys are considered as empty strings, so this command will make sure \n\n /// it holds a string large enough to be able to set value at offset.\n\n /// Note that the maximum offset that you can set is 229 -1 (536870911).\n\n /// As Redis Strings are limited to 512 megabytes. If you need to grow beyond this size, you can use multiple keys.\n\n /// \n\n /// # Warning: \n\n /// When setting the last possible byte and the string value stored at key does not yet hold a string value, \n\n /// or holds a small string value, Redis needs to allocate all intermediate memory which can block the server \n\n /// for some time. On a 2010 MacBook Pro, setting byte number 536870911 (512MB allocation) takes ~300ms, \n\n /// setting byte number 134217728 (128MB allocation) takes ~80ms, setting bit number 33554432 (32MB allocation) \n\n /// takes ~30ms and setting bit number 8388608 (8MB allocation) takes ~8ms. Note that once this first allocation \n\n /// is done, subsequent calls to SETRANGE for the same key will not have the allocation overhead.\n\n /// \n\n /// # Arguments: \n\n /// * `key` - The name of the key. \n\n /// * `offset` - The starting offset. \n\n /// * `value` - The value to append after the offset. \n\n /// \n\n /// # Example: \n\n /// ```python\n", "file_path": "src/ops/string.rs", "rank": 26, "score": 27838.353828197603 }, { "content": " /// =========\n\n /// `keys` - A list of keys, by name, to get.\n\n /// \n\n /// Example \n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.mget(\"key1\", \"key2\", \"key3\")\n\n /// ```\n\n /// \n\n /// Array Reply \n\n /// ===========\n\n /// The values of all the keys. \n\n /// \n\n /// Time Complexity\n\n /// =============== \n\n /// O(n) : Where n = the number of keys to retrieve.\n\n /// \n\n /// [Read about MGET in the Redis documentation.](https://redis.io/commands/mget)\n\n #[args(keys=\"*\")]\n", "file_path": "src/ops/string.rs", "rank": 27, "score": 27838.136584303 }, { "content": " Ok(route_command(self, \"DECR\", Some(key))?)\n\n }\n\n\n\n /// Decrements the number stored at key by the amount.\n\n /// If the key does not exist, it is set to 0 before performing the operation. \n\n /// An error is returned if the key contains a value of the wrong type or contains a string \n\n /// that can not be represented as integer. This operation is limited to 64 bit signed integers.\n\n /// \n\n /// Arguments\n\n /// ========= \n\n /// `key` - The name of the key to decrement.\n\n /// \n\n /// `amount` - The amount to decrement by.\n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.decrby(\"key\", 3) // if key == 10, key now == 7\n\n /// ```\n", "file_path": "src/ops/string.rs", "rank": 28, "score": 27837.533241223646 }, { "content": " /// `value` - The value of the key. \n\n /// `seconds` - The TTL of the key, in seconds. \n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// # A key named \"hello\", with a lifespan of 5 seconds, with a value of \"world!\"\n\n /// client.setex(\"hello\", \"world!\", 5)\n\n /// ```\n\n /// \n\n /// Simple String Reply\n\n /// ===================\n\n /// `\"OK\"`: on success. \n\n /// \n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)`\n\n /// \n\n /// [Read about SETEX in the Redis documentation.](https://redis.io/commands/setex)\n", "file_path": "src/ops/string.rs", "rank": 29, "score": 27837.323799895858 }, { "content": " /// By default all the bytes contained in the string are examined. \n\n /// It is possible to specify the counting operation only in an interval \n\n /// passing the additional arguments start and end.\n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `key` - The name of the key. \n\n /// \n\n /// `beginning` - An index of where to start on the string.\n\n /// \n\n /// `end` - An index of where to end on the string. \n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.bitcount(\"key\", 0, 0)\n\n /// ```\n\n /// \n\n /// Integer Reply\n", "file_path": "src/ops/string.rs", "rank": 30, "score": 27836.980178046 }, { "content": " /// \n\n /// There is no overhead for storing the string representation of the integer.\n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `key` - The name of the key.\n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.incr(\"key\")\n\n /// ```\n\n /// \n\n /// Integer Reply \n\n /// =============\n\n /// The value of the key after the increment. \n\n /// \n\n /// Time Complexity \n\n /// ===============\n", "file_path": "src/ops/string.rs", "rank": 31, "score": 27836.434425538588 }, { "content": " /// \n\n /// Arguments \n\n /// =========\n\n /// `key` - The name of the key.\n\n /// \n\n /// `beginning` - The starting index. \n\n /// \n\n /// `end` - The ending index. \n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"\")\n\n /// client.set(\"key\", \"hello world!\")\n\n /// client.getrange(\"key\", 0, 4) == \"hello\"\n\n /// ```\n\n /// \n\n /// Bulk String Reply\n\n /// ================= \n\n /// The indexed substring. \n", "file_path": "src/ops/string.rs", "rank": 32, "score": 27836.28808074778 }, { "content": " /// Arguments\n\n /// =========\n\n /// `key` - The name of the key. \n\n /// \n\n /// `value` - The new value of the key. \n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.set(\"hello\", \"world\")\n\n /// client.get(\"hello\") == \"world\"\n\n ///\n\n /// old = client.getset(\"hello\", \"redis!\")\n\n /// old == \"world\"\n\n /// client.get(\"hello\") == \"redis!\"\n\n /// ```\n\n /// \n\n /// Bulk String Reply\n\n /// =================\n", "file_path": "src/ops/string.rs", "rank": 33, "score": 27835.720276363245 }, { "content": "class GenericKey:\n\n \"\"\"Key class holding specific attributes and operations.\n\n\n\n You can initalize this class if you need to pass a key around. \n\n\n\n Attribute assignment is very dynamic. Any kwarg you pass will become an attribute of that class. \n\n Your `RedisClient` then the `_name` are the only positionals.\n\n My hope with this is that you will only pass as much information as you need with the least restriction. \n\n\n\n Overwriting the `client` or `_name` attributes is forbidden. \n\n \"\"\"\n\n\n\n def __init__(self, client: RedisClient, _name: str, **attributes: Dict[str, Any]) -> None: \n\n try:\n\n client.manual(\"PING\")\n\n except TypeError: \n\n raise ValueError(\"`client` must be an instance.\")\n\n\n\n self._name = _name\n\n self.client = client \n\n\n\n for (name, value) in attributes.items():\n\n if name not in (\"client\", \"_name\"): \n\n setattr(self, name, value)\n\n \n\n def delete(self) -> int:\n\n \"\"\"Delete this key.\"\"\"\n\n\n\n return self.client.delete(self._name)\n\n\n\n def rename(self, new_name: str) -> str: \n\n \"\"\"Rename this key.\"\"\"\n\n\n\n return self.client.rename(self._name, new_name)\n\n\n\n def exists(self) -> int: \n\n \"\"\"Check if this key still exists.\"\"\"\n\n\n\n return self.client.exists(self._name)\n\n\n\n def expire(self, seconds: int) -> int:\n\n \"\"\"Set an expiration for this key.\"\"\"\n\n\n\n return self.client.expire(self._name, int(seconds))\n\n\n\n def expireat(self, unix_timestamp: int) -> int:\n\n \"\"\"Set an expiration for this key with a UNIX timestamp.\"\"\"\n\n\n\n return self.client.expireat(self._name, int(unix_timestamp))\n\n\n\n def move(self, db: int) -> int: \n\n \"\"\"Move this key to another database.\"\"\"\n\n\n\n return self.client.move(self._name, int(db))\n\n\n\n def persist(self) -> int: \n\n \"\"\"Remove the expiration on this key.\"\"\"\n\n\n\n return self.client.persist(self._name)\n\n\n\n def pexpire(self, milliseconds: int) -> int: \n\n \"\"\"Set an expiration for this key in milliseconds.\"\"\"\n\n\n\n return self.client.pexpire(self._name, int(milliseconds))\n\n\n\n def pexpireat(self, unix_timestamp_in_ms: int) -> int: \n\n \"\"\"Set an expiration for this key with a UNIX timestamp in milliseconds.\"\"\"\n\n\n\n return self.client.pexpireat(self._name, int(unix_timestamp_in_ms))\n\n\n\n def pttl(self) -> int: \n\n \"\"\"The time to live for this key in milliseconds. If applicable.\"\"\"\n\n\n\n return self.client.pttl(self._name)\n\n\n\n def ttl(self) -> int: \n\n \"\"\"The time to live for this key in seconds. If applicable.\"\"\"\n\n\n\n return self.client.ttl(self._name)\n\n\n\n def keytype(self) -> str: \n\n \"\"\"The type of this key.\"\"\"\n\n\n\n return self.client.keytype(self._name)\n\n\n\n def unlink(self) -> int: \n\n \"\"\"Unlink this key. Asynchronous deletion.\"\"\"\n\n\n\n return self.client.unlink(self._name)\n\n\n\n # I thought this was cool. Might remove it later. \n\n def __matmul__(self, identifier: int) -> int: \n\n \"\"\"`GenericKey @ 1` == `GenericKey.move(1)`.\"\"\"\n", "file_path": "suredis/__init__.py", "rank": 34, "score": 22213.23152357669 }, { "content": "class SetKey(GenericKey):\n\n __doc__ = GenericKey.__doc__\n\n\n\n def __init__(self, *args: List[Union[RedisClient, str]], **attributes: Dict[str, Any]) -> None:\n\n super().__init__(*args, **attributes)\n\n\n\n def sadd(self, *members: List[Any]) -> int: \n\n \"\"\"Add members to this set.\"\"\"\n\n\n\n return self.client.sadd(self._name, *members)\n\n\n\n def scard(self) -> int: \n\n \"\"\"The amount of members in this set.\"\"\"\n\n\n\n return self.client.scard(self._name)\n\n\n\n def sdiff(self, *sets: List[str]) -> List[str]:\n\n \"\"\"Calculate the difference between this set and `sets`.\"\"\"\n\n\n\n return self.client.sdiff(self._name, *sets)\n\n\n\n def sdiffstore(self, destination: str, *sets: List[str]) -> int: \n\n \"\"\"Subtract this set and other sets, and store the result in a key called `destination`.\"\"\"\n\n\n\n return self.client.sdiffstore(self._name, destination, *sets)\n\n\n\n def sinter(self, *sets: List[str]) -> List[str]: \n\n \"\"\"Intersect this set with others.\"\"\"\n\n\n\n return self.client.sinter(self._name, *sets)\n\n\n\n def sinterstore(self, destination: str, *sets: List[str]) -> List[str]: \n\n \"\"\"Same as `sinter`, except the result is stored in a key named `destination`.\"\"\"\n\n\n\n return self.client.sinterstore(self._name, destination, *sets)\n\n\n\n def sismember(self, member: Any) -> int: \n\n \"\"\"Determine if a given value is a member of this set.\"\"\"\n\n\n\n return self.client.sismember(self._name, member)\n\n\n\n def smembers(self) -> List[str]:\n\n \"\"\"Get the members of this set.\"\"\"\n\n\n\n return self.client.smembers(self._name)\n\n\n\n def smove(self, destination: str, member: Any) -> int: \n\n \"\"\"Move a member from this set to `destination` set.\"\"\"\n\n\n\n return self.client.smove(self._name, destination, member)\n\n\n\n def __len__(self) -> int: \n\n return self.scard()\n\n\n\n def __iter__(self) -> List[str]: \n", "file_path": "suredis/__init__.py", "rank": 35, "score": 22211.040209360835 }, { "content": "class ListKey(GenericKey): \n\n __doc__ = GenericKey.__doc__\n\n\n\n def __init__(self, *args: List[Union[RedisClient, str]], **attributes: Dict[str, Any]) -> None:\n\n super().__init__(*args, **attributes)\n\n\n\n def rpush(self, *elements: List[Any], no_overwrite: bool=True) -> int: \n\n \"\"\"Push some elements to the right-side of this list.\"\"\"\n\n\n\n return self.client.rpush(self._name, *elements, no_overwrite=no_overwrite)\n\n\n\n def lpush(self, *elements: List[Any], no_overwrite: bool=True) -> int: \n\n \"\"\"Push some elements to the left-side of this list.\"\"\"\n\n\n\n return self.client.lpush(self._name, *elements, no_overwrite=no_overwrite)\n\n\n\n def lindex(self, index: int) -> str: \n\n \"\"\"Get the element at `index`. Empty string if there's no element.\"\"\"\n\n\n\n return self.client.lindex(self._name, int(index))\n\n\n\n def linsert(self, element: Any) -> int: \n\n \"\"\"Inserts `element` in the list stored at key either before or after the reference value pivot.\"\"\"\n\n\n\n return self.client.linsert(self._name, element)\n\n\n\n def llen(self) -> int: \n\n \"\"\"The length of this list.\"\"\"\n\n\n\n return self.client.llen(self._name)\n\n \n\n def lpop(self) -> str: \n\n \"\"\"Remove and return the last element of this list.\"\"\"\n\n\n\n return self.client.lpop(self._name)\n\n\n\n def lset(self, index: int, element: Any) -> int: \n\n \"\"\"Set `element` at `index`.\"\"\"\n\n\n\n return self.client.lset(self._name, int(index), element)\n\n\n\n def lrange(self, start: int, stop: int) -> List[str]: \n\n \"\"\"Get a range of elements from this list.\"\"\"\n\n\n\n return self.client.lrange(self._name, int(start), int(stop))\n\n\n\n def lrem(self, amt: int, *elements: List[Any]) -> int: \n\n \"\"\"Remove elements from the left side of a list. Best suited for duplicate element removal, it seems.\"\"\"\n\n\n\n return self.client.lrem(self._name, int(amt), *elements)\n\n\n\n def ltrim(self, start: int, stop: int) -> int: \n\n \"\"\"Trim a list from the left-side.\"\"\"\n\n\n\n return self.client.ltrim(self._name, int(start), int(stop))\n\n\n\n def rpop(self) -> str: \n\n \"\"\"Remove and return the right-most element on this list.\"\"\"\n\n\n\n return self.client.rpop(self._name)\n\n\n\n def rpoplpush(self, destination: str) -> str: \n\n \"\"\"Remove the last element in a list, prepend it to another list and return it.\"\"\"\n\n\n\n return self.client.rpoplpush(self._name, destination)\n\n\n\n def lelements(self) -> List[str]: \n\n \"\"\"Get the elements of this list.\"\"\"\n\n\n\n return self.client.lelements(self._name)\n\n\n\n def __len__(self) -> int: \n\n return self.llen()\n\n\n\n def __iter__(self) -> List[str]: \n", "file_path": "suredis/__init__.py", "rank": 36, "score": 22211.040209360835 }, { "content": "class HashKey(GenericKey):\n\n __doc__ = GenericKey.__doc__\n\n\n\n def __init__(self, *args: List[Union[RedisClient, str]], **attributes: Dict[str, Any]) -> None:\n\n super().__init__(*args, **attributes)\n\n\n\n def hdel(self, *fields: List[str]) -> int: \n\n \"\"\"Remove the specified fields from the hash stored at this key.\"\"\"\n\n\n\n return self.client.hdel(self._name, *fields)\n\n\n\n def hexists(self, field: str) -> int: \n\n \"\"\"Check if this hash contains the field.\"\"\"\n\n\n\n return self.client.hexists(self._name, field)\n\n\n\n def hget(self, field: str) -> str: \n\n \"\"\"Get the value of this field.\"\"\"\n\n\n\n return self.client.hget(self._name, field)\n\n\n\n def hgetall(self) -> List[str]: \n\n \"\"\"Get a list of fields and their values.\"\"\"\n\n\n\n return self.client.hgetall(self._name)\n\n\n\n def hincrby(self, field: str, value: int) -> int: \n\n \"\"\"Increments the number stored at field in the hash stored at key by `value`.\"\"\"\n\n\n\n return self.client.hincrby(self._name, field, int(value))\n\n\n\n def hincrbyfloat(self, field: str, value: float) -> float: \n\n \"\"\"Same as hincrby, just with a double-point float instead of an integer.\"\"\"\n\n\n\n return self.client.hincrbyfloat(self._name, field, float(value))\n\n\n\n def hkeys(self) -> List[str]:\n\n \"\"\"The fields in this hash.\"\"\"\n\n\n\n return self.client.hkeys(self._name)\n\n\n\n def hlen(self) -> int: \n\n \"\"\"The amount of fields in this hash.\"\"\"\n\n\n\n return self.client.hlen(self._name)\n\n\n\n def hmget(self, *fields: List[str]) -> List[str]: \n\n \"\"\"The values in the fields at this key.\"\"\"\n\n\n\n return self.client.hmget(self._name, *fields)\n\n\n\n def hset(self, **fields: Dict[str, Any]) -> int: \n\n \"\"\"Set some fields and values on this hash.\"\"\"\n\n\n\n return self.client.hset(self._name, **fields, no_overwrite=fields.get('no_overwrite', True))\n\n\n\n def hstrlen(self, field: str) -> int: \n\n \"\"\"The length of the value at the field.\"\"\"\n\n\n\n return self.client.hstrlen(self._name, field)\n\n\n\n def hvals(self) -> List[str]: \n\n \"\"\"A list of all the values at this hash.\"\"\"\n\n\n\n return self.client.hvals(self._name)\n\n\n\n def __len__(self) -> int: \n", "file_path": "suredis/__init__.py", "rank": 37, "score": 22211.040209360835 }, { "content": " /// \n\n /// Bulk String Reply\n\n /// ================= \n\n /// The return of the Redis operation. \n\n /// \n\n #[args(args=\"*\")]\n\n #[text_signature = \"($self, cmd, *args)\"]\n\n pub fn manual(&mut self, cmd: &str, args: Vec<&PyAny>) -> PyResult<String> {\n\n let args = construct_vector(args.len(), Cow::from(&args))?;\n\n Ok(route_command(self, cmd, Some(args))?)\n\n }\n\n\n\n /// Delete the specified keys. Keys will be ignored if they do not exist.\n\n /// \n\n /// Use the UNLINK command to delete asynchronously.\n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `keys` - A list of keys to be deleted by name.\n\n ///\n", "file_path": "src/ops/generic.rs", "rank": 40, "score": 16.605318298532428 }, { "content": "//! Implementation of generic operations for the client. \n\nuse crate::*;\n\n\n\n#[pymethods]\n\nimpl RedisClient {\n\n /// A low-level interface for making more advanced commands to Redis.\n\n /// There's no real reason to use this unless you need access to a command not yet supported by suredis.\n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `cmd` - The command name. \n\n /// \n\n /// `args` - A list of arguments to pass to the command. Passed as rest arguments.\n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.manual(\"SET\", \"key\", \"value\") # equal to client.set(\"key\", \"value\")\n\n /// ```\n", "file_path": "src/ops/generic.rs", "rank": 41, "score": 14.723978163654765 }, { "content": " /// [Read about SINTERSTORE in the Redis documentation.](https://redis.io/commands/sinter)\n\n #[args(keys=\"*\")]\n\n #[text_signature = \"($self, destination, keys, /)\"]\n\n pub fn sinterstore(&mut self, destination: String, keys: Vec<&PyAny>) -> PyResult<Vec<String>> {\n\n let mut args = Vec::with_capacity(keys.len() + 1);\n\n args.push(destination);\n\n args.extend(construct_vector(keys.len(), Cow::from(&keys))?.into_iter());\n\n Ok(route_command(self, \"SINTERSTORE\", Some(args))?)\n\n }\n\n\n\n /// Determine if a given value is a member of a set. \n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `key` - The name of the key. \n\n /// \n\n /// `member` - The member to check. \n\n /// \n\n /// Example\n\n /// =======\n", "file_path": "src/ops/set.rs", "rank": 42, "score": 14.670803949396543 }, { "content": " /// \n\n /// Integer Reply\n\n /// =============\n\n /// The length of the resulting set. \n\n /// \n\n /// [Read about SDIFFSTORE in the Redis documentation.](https://redis.io/commands/sdiffstore)\n\n #[args(keys=\"*\")]\n\n #[text_signature = \"($self, destination, keys, /)\"]\n\n pub fn sdiffstore(&mut self, destination: String, keys: Vec<&PyAny>) -> PyResult<usize> {\n\n let mut args = construct_vector(keys.len() + 1, Cow::from(&keys))?;\n\n args.insert(0, destination);\n\n Ok(route_command(self, \"SDIFFSTORE\", Some(args))?)\n\n }\n\n\n\n /// Intersect two or more sets. \n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `keys` - The sets to intersect. Passed as rest arguments. \n\n /// \n", "file_path": "src/ops/set.rs", "rank": 45, "score": 13.99972439878154 }, { "content": " /// Example\n\n /// =======\n\n /// ```python\n\n /// client.sadd(\"a\", 1, 2, 3, 4)\n\n /// client.sadd(\"b\", 4, 5, 6, 7)\n\n /// client.sinter(\"a\", \"b\") == [\"4\"]\n\n /// ```\n\n /// \n\n /// Array Reply\n\n /// ===========\n\n /// The intersected set, or an empty array if the sets had no members in common. \n\n /// \n\n /// [Read about SINTER in the Redis documentation.](https://redis.io/commands/sinter)\n\n #[args(keys=\"*\")]\n\n #[text_signature = \"($self, keys, /)\"]\n\n pub fn sinter(&mut self, keys: Vec<&PyAny>) -> PyResult<Vec<String>> {\n\n let args = construct_vector(keys.len(), Cow::from(&keys))?;\n\n Ok(route_command(self, \"SINTER\", Some(args))?)\n\n }\n\n\n", "file_path": "src/ops/set.rs", "rank": 48, "score": 13.125032878619168 }, { "content": " /// \n\n /// Simple String Reply\n\n /// ===================\n\n /// `\"OK\"`: The key was renamed.\n\n /// \n\n /// `\"\"`: The key does not exist.\n\n ///\n\n /// Time Complexity\n\n /// =============== \n\n /// `O(1)`\n\n ///\n\n /// [Read about RENAME in the Redis documentation.](https://redis.io/commands/rename)\n\n #[args(no_overwrite=\"**\")]\n\n #[text_signature = \"($self, key, newkey, *, no_overwrite)\"]\n\n pub fn rename(&mut self, key: &str, newkey: &str, no_overwrite: Option<&PyDict>) -> PyResult<String> {\n\n let command = nx_x_decider(\"RENAME\", \"NX\", no_overwrite);\n\n Ok(route_command(self, &command, Some(&[key, newkey])).unwrap_or_default())\n\n }\n\n\n\n /// Returns the remaining time to live of a key that has a timeout. \n", "file_path": "src/ops/generic.rs", "rank": 52, "score": 12.610405031831887 }, { "content": " /// The length of the set after the operation. \n\n /// \n\n /// [Read about SADD in the Redis documentation.](https://redis.io/commands/sadd)\n\n #[args(members=\"*\")]\n\n #[text_signature = \"($self, key, members, /)\"]\n\n pub fn sadd(&mut self, key: String, members: Vec<&PyAny>) -> PyResult<usize> {\n\n let mut mems = construct_vector(members.len() + 1, Cow::from(&members))?;\n\n mems.insert(0, key);\n\n\n\n Ok(route_command(self, \"SADD\", Some(mems))?)\n\n }\n\n\n\n /// Get the amount of members in a set. \n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `key` - The name of the key. \n\n /// \n\n /// Example\n\n /// =======\n", "file_path": "src/ops/set.rs", "rank": 53, "score": 12.463696977608917 }, { "content": " \n\n args.push(key);\n\n\n\n for (key, value) in fields.iter() {\n\n args.push(key.to_string());\n\n args.push(value.to_string());\n\n } \n\n\n\n Ok(route_command(self, &command, Some(args))?)\n\n }\n\n\n\n /// Returns the string length of the value associated with field in the hash stored at key. \n\n /// If the key or the field do not exist, 0 is returned.\n\n /// \n\n /// # Arguments: \n\n /// * `key` - The name of the key. \n\n /// * `field` - The name of the field. \n\n /// \n\n /// # Example: \n\n /// ```python\n", "file_path": "src/ops/hash.rs", "rank": 55, "score": 12.365939145049591 }, { "content": " /// The number of keys that were unlinked.\n\n ///\n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)` : For each key removed regardless of its size. \n\n /// \n\n /// `O(n)` : Where n = the number of allocations the deleted objects were composed of. \n\n ///\n\n /// [Read about UNLINK in the Redis documentation.](https://redis.io/commands/unlink)\n\n #[args(keys=\"*\")]\n\n #[text_signature = \"($self, *keys)\"]\n\n pub fn unlink(&mut self, keys: Vec<&PyAny>) -> PyResult<usize> {\n\n let args = construct_vector(keys.len(), Cow::from(&keys))?;\n\n Ok(route_command(self, \"UNLINK\", Some(args))?)\n\n }\n\n}", "file_path": "src/ops/generic.rs", "rank": 56, "score": 11.99646291048182 }, { "content": " /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// keytype = client.keytype(\"key\")\n\n /// ```\n\n /// \n\n /// Simple String Reply\n\n /// ===================\n\n /// The type of the key.\n\n ///\n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)`\n\n ///\n\n /// [Read about TYPE in the Redis documentation.](https://redis.io/commands/type)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn keytype(&mut self, key: &str) -> PyResult<String> {\n\n Ok(route_command(self, \"TYPE\", Some(key))?)\n\n }\n\n\n", "file_path": "src/ops/generic.rs", "rank": 57, "score": 11.974097581121713 }, { "content": " /// client = RedisClient(\"url\")\n\n /// fields = {\n\n /// \"name\": \"value\",\n\n /// \"name2\": \"value2\"\n\n /// }\n\n /// client.hset(\"key\", fields, no_overwrite=True)\n\n /// ```\n\n /// \n\n /// # Integer Reply:\n\n /// * The number of fields that were added. \n\n /// \n\n /// # Time Complexity: \n\n /// * O(n) : Where n = the amount of field/value pairs to add. \n\n /// \n\n /// [Read about HSET in the Redis documentation.](https://redis.io/commands/hset)\n\n #[args(no_overwrite=\"**\")]\n\n #[text_signature = \"($self, key, fields, *, no_overwrite)\"]\n\n pub fn hset(&mut self, key: String, fields: HashMap<String, &PyAny>, no_overwrite: Option<&PyDict>) -> PyResult<usize> {\n\n let command = nx_x_decider(\"HSET\", \"NX\", no_overwrite);\n\n let mut args = Vec::with_capacity((fields.len() * 2) + 1);\n", "file_path": "src/ops/hash.rs", "rank": 58, "score": 11.93726660554411 }, { "content": " /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client.sadd(\"key1\", 1, 2, 3, 4, 5)\n\n /// client.sadd(\"key2\", 1, 2, 3)\n\n /// \n\n /// client.sdiff(\"key1\", \"key2\") == [\"4\", \"5\"]\n\n /// ```\n\n /// \n\n /// Array Reply\n\n /// ===========\n\n /// The difference between all of the sets.\n\n /// \n\n /// [Read about SDIFF in the Redis documentation.](https://redis.io/commands/sdiff)\n\n #[args(keys=\"*\")]\n\n #[text_signature = \"($self, keys, /)\"]\n\n pub fn sdiff(&mut self, keys: Vec<&PyAny>) -> PyResult<Vec<String>> {\n\n let arguments = construct_vector(keys.len(), Cow::from(&keys))?;\n\n Ok(route_command(self, \"SDIFF\", Some(arguments))?)\n", "file_path": "src/ops/set.rs", "rank": 59, "score": 11.868092857190529 }, { "content": "//! Implementation of hash operations for the client. \n\nuse crate::*; \n\n\n\n#[pymethods]\n\nimpl RedisClient {\n\n /// Removes the specified fields from the hash stored at key. \n\n /// Specified fields that do not exist within this hash are ignored. \n\n /// If key does not exist, it is treated as an empty hash and this command returns 0.\n\n /// \n\n /// # Arguments: \n\n /// * `key` - The name of the key.\n\n /// * `fields` - The fields to be removed. Passed as rest arguments. \n\n /// \n\n /// # Example: \n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.hdel(\"key\", \"field1\", \"field2\")\n\n /// ```\n\n /// \n\n /// # Integer Reply: \n", "file_path": "src/ops/hash.rs", "rank": 60, "score": 11.613485864119353 }, { "content": "//! Implementation of set operations for the client. \n\nuse crate::*; \n\n\n\n#[pymethods]\n\nimpl RedisClient {\n\n /// Add one or more members to a set. \n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `key` - The name of the key. \n\n /// `members` - The members to add to the set. Passed as rest arguments. \n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client.sadd(\"key\", 1, \"mem 2\", 3, \"mem4\")\n\n /// ```\n\n /// \n\n /// Integer Reply\n\n /// =============\n", "file_path": "src/ops/set.rs", "rank": 61, "score": 11.56064127750311 }, { "content": " /// ```\n\n ///\n\n /// Integer Reply\n\n /// =============\n\n /// The amount of keys that exist in Redis from the passed sequence.\n\n ///\n\n /// Time Complexity\n\n /// ===============\n\n /// `O(n)` : Where n = the amount of keys to check. \n\n ///\n\n /// [Read about EXISTS in the Redis documentation.](https://redis.io/commands/exists)\n\n #[args(keys=\"*\")]\n\n #[text_signature = \"($self, keys, /)\"]\n\n pub fn exists(&mut self, keys: Vec<&PyAny>) -> PyResult<usize> {\n\n let args = construct_vector(keys.len(), Cow::from(&keys))?;\n\n Ok(route_command(self, \"EXISTS\", Some(args))?)\n\n }\n\n\n\n /// Set a timeout on a key. After the timeout expires, the key will be deleted.\n\n /// Keys with this behavior are refeered to as volatile keys in Redis.\n", "file_path": "src/ops/generic.rs", "rank": 62, "score": 11.269637197562167 }, { "content": " #[args(fields=\"*\")]\n\n #[text_signature = \"($self, key, fields, /)\"]\n\n pub fn hmget(&mut self, key: String, fields: Vec<&PyAny>) -> PyResult<Vec<String>> {\n\n let mut arguments = construct_vector(fields.len() + 1, Cow::from(&fields))?;\n\n arguments.insert(0, key);\n\n\n\n Ok(route_command(self, \"HMGET\", Some(arguments))?)\n\n }\n\n\n\n /// Sets field in the hash stored at key to value. \n\n /// If key does not exist, a new key holding a hash is created. \n\n /// If field already exists in the hash, it is overwritten.\n\n /// \n\n /// # Arguments: \n\n /// * `key` - The name of the key. \n\n /// * `field` - A field name : field value dictionary mapping. \n\n /// * `no_overwrite` - Set to True to avoid overwriting. Otherwise, False. \n\n /// \n\n /// # Example: \n\n /// ```python\n", "file_path": "src/ops/hash.rs", "rank": 64, "score": 11.221593861763344 }, { "content": " ///\n\n /// Time Complexity\n\n /// ===============\n\n /// `O(n)` : Where n is the number of keys in the database.\n\n ///\n\n /// [Read about KEYS in the Redis documentation.](https://redis.io/commands/keys)\n\n #[text_signature = \"($self, pattern, /)\"]\n\n pub fn keys(&mut self, pattern: &str) -> PyResult<Vec<String>> {\n\n Ok(route_command(self, \"KEYS\", Some(pattern))?)\n\n }\n\n\n\n /// Move the key to another database.\n\n ///\n\n /// Move key from the currently selected database (see SELECT) to the specified destination database. \n\n /// When key already exists in the destination database, or it does not exist in the source database, it does nothing. \n\n /// It is possible to use MOVE as a locking primitive because of this.\n\n ///\n\n /// Arguments\n\n /// =========\n\n /// `key` - The key.\n", "file_path": "src/ops/generic.rs", "rank": 65, "score": 11.187474794124281 }, { "content": " /// `-2`: Key does not exist.\n\n ///\n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)`\n\n ///\n\n /// [Read about TTL in the Redis documentation.](https://redis.io/commands/ttl)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn ttl(&mut self, key: &str) -> PyResult<isize> {\n\n Ok(route_command(self, \"TTL\", Some(key))?)\n\n }\n\n\n\n /// Returns the string representation of the type of the value stored at key. \n\n /// The different types that can be returned are: string, list, set, zset, hash and stream.\n\n ///\n\n /// Arguments\n\n /// =========\n\n /// `key` - The name of the key.\n\n /// \n\n /// Example\n", "file_path": "src/ops/generic.rs", "rank": 67, "score": 11.173396964423866 }, { "content": " /// * The number of fields that were removed from the hash, not including specified but non existing fields.\n\n /// \n\n /// # Time Complexity: \n\n /// * O(n) : Where n = the number of fields to be removed.\n\n /// \n\n /// [Read about HDEL in the Redis documentation.](https://redis.io/commands/hdel)\n\n #[args(fields=\"*\")]\n\n #[text_signature = \"($self, key, fields, /)\"]\n\n pub fn hdel(&mut self, key: String, fields: Vec<&PyAny>) -> PyResult<usize> {\n\n let mut arguments = construct_vector(fields.len() + 1, Cow::from(&fields))?;\n\n arguments.insert(0, key);\n\n \n\n Ok(route_command(self, \"HDEL\", Some(arguments))?)\n\n }\n\n\n\n /// Returns if field is an existing field in the hash stored at key.\n\n ///\n\n /// # Arguments: \n\n /// * `key` - The name of the key. \n\n /// * `field`- The value of the field. \n", "file_path": "src/ops/hash.rs", "rank": 68, "score": 11.145217065844307 }, { "content": " /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// key = client.randomkey()\n\n /// ``` \n\n /// \n\n /// Bulk String Reply\n\n /// =================\n\n /// The random key, or an empty string if the database is empty.\n\n ///\n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)`\n\n ///\n\n /// [Read about RANDOMKEY on the Redis documentation.](https://redis.io/commands/randomkey)\n\n #[text_signature = \"($self)\"]\n\n pub fn randomkey(&mut self) -> PyResult<String> {\n\n Ok(route_command::<Option<u8>, _>(self, \"RANDOMKEY\", None).unwrap_or_default())\n\n }\n", "file_path": "src/ops/generic.rs", "rank": 69, "score": 10.969670598865429 }, { "content": " #[args(keys=\"*\")]\n\n #[text_signature = \"($self, keys, /)\"]\n\n pub fn delete(&mut self, keys: Vec<&PyAny>) -> PyResult<usize> {\n\n let args = construct_vector(keys.len(), Cow::from(&keys))?;\n\n Ok(route_command(self, \"DEL\", Some(args))?)\n\n }\n\n\n\n /// Check if a key exists. \n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `keys` - A list of keys to check exists\n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.set(\"key\", \"a\")\n\n /// exists = client.exists(\"key\") # Returns 1 since the 1 key provided does exist.\n\n /// exists_more = client.exists(\"key\", \"key2\", \"key3\") # Also returns 1. \n", "file_path": "src/ops/generic.rs", "rank": 71, "score": 10.70084714085616 }, { "content": " /// \n\n /// Arguments\n\n /// =========\n\n /// `key` - The name of the key with the set. \n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client.sadd(\"a\", \"hello\", \"world\")\n\n /// client.smembers(\"a\") == [\"hello\", \"world\"]\n\n /// ```\n\n /// \n\n /// Array Reply\n\n /// ===========\n\n /// The members in the set. \n\n /// \n\n /// [Read about SMEMBERS in the Redis documentation.](https://redis.io/commands/smembers)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn smembers(&mut self, key: &str) -> PyResult<Vec<String>> {\n\n Ok(route_command(self, \"SMEMBERS\", Some(key))?)\n", "file_path": "src/ops/set.rs", "rank": 72, "score": 10.693273499262116 }, { "content": " /// \n\n /// # Example: \n\n /// ```\n\n /// client = RedisClient(\"url\")\n\n /// client.hvals(\"key\")\n\n /// ```\n\n /// \n\n /// # Array Reply: \n\n /// A list of the values in the hash. \n\n /// \n\n /// # Time Complexity: \n\n /// * O(n) : Where n = the size of the hash.\n\n /// \n\n /// [Read more about HVALS in the Redis documentation.](https://redis.io/commands/hvals)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn hvals(&mut self, key: &str) -> PyResult<Vec<String>> {\n\n Ok(route_command(self, \"HVALS\", Some(key))?)\n\n }\n\n}", "file_path": "src/ops/hash.rs", "rank": 73, "score": 10.623423181999904 }, { "content": " /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.pexpireat(\"key\", 4294967295)\n\n /// ```\n\n /// \n\n /// Integer Reply\n\n /// =============\n\n /// `1`: The timeout was set.\n\n /// \n\n /// `0`: The timeout was not set.\n\n ///\n\n /// Time Complexity\n\n /// `O(1)`\n\n ///\n\n /// [Read about PEXPIREAT in the Redis documentation.](https://redis.io/commands/pexpireat)\n\n #[text_signature = \"($self, key, timeout, /)\"]\n\n pub fn pexpireat(&mut self, key: &str, timeout: usize) -> PyResult<i8> {\n\n let time = timeout.to_string();\n\n Ok(route_command(self, \"PEXPIREAT\", Some(&[key, &time]))?)\n\n }\n", "file_path": "src/ops/generic.rs", "rank": 75, "score": 10.16385491895078 }, { "content": " /// client.hkeys(\"key\")\n\n /// ```\n\n /// \n\n /// # Array Reply: \n\n ///* A list of fields in the hash, or an empty list when key does not exist.\n\n /// \n\n /// # Time Complexity: \n\n /// * O(n) : Where n = the size of the hash.\n\n /// \n\n /// [Read about HKEYS in the Redis documentation.](https://redis.io/commands/hkeys)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn hkeys(&mut self, key: &str) -> PyResult<Vec<String>> {\n\n Ok(route_command(self, \"HKEYS\", Some(key))?)\n\n }\n\n\n\n /// Returns the number of fields contained in the hash stored at key.\n\n /// \n\n /// # Arguments:\n\n /// * `key` - The name of the key. \n\n /// \n", "file_path": "src/ops/hash.rs", "rank": 76, "score": 10.161412338122737 }, { "content": " /// `O(1)`\n\n ///\n\n /// [Read about PEXPIRE in the Redis documentation.](https://redis.io/commands/pexpire)\n\n #[text_signature = \"($self, key, timeout, /)\"]\n\n pub fn pexpire(&mut self, key: &str, timeout: usize) -> PyResult<u8> {\n\n let time = timeout.to_string();\n\n Ok(route_command(self, \"PEXPIRE\", Some(&[key, &time]))?)\n\n }\n\n\n\n /// Has the same effect and semantic as EXPIREAT, \n\n /// but the UNIX time at which the key will expire is specified in milliseconds instead of seconds.\n\n ///\n\n /// Arguments\n\n /// =========\n\n /// `key` - The key.\n\n /// \n\n /// `timeout` - The expire time of the key in milliseconds.\n\n /// \n\n /// Example\n\n /// =======\n", "file_path": "src/ops/generic.rs", "rank": 77, "score": 10.155551818228748 }, { "content": " /// client = RedisClient(\"url\")\n\n /// client.hstrlen(\"key\", \"field\")\n\n /// ```\n\n /// \n\n /// # Integer Reply: \n\n /// * The length of the string at the field, or 0 if the field was not found.\n\n /// \n\n /// # Time Complexity: \n\n /// * O(1)\n\n /// \n\n /// [Read about HSTRLEN in the Redis Documentation.](https://redis.io/commands/hstrlen)\n\n #[text_signature = \"($self, key, field, /)\"]\n\n pub fn hstrlen(&mut self, key: &str, field: &str) -> PyResult<usize> {\n\n Ok(route_command(self, \"HSTRLEN\", Some(&[key, field]))?)\n\n }\n\n\n\n /// Returns all values in the hash stored at key.\n\n /// \n\n /// # Arguments: \n\n /// * `key` - The name of the key. \n", "file_path": "src/ops/hash.rs", "rank": 79, "score": 10.003990312458177 }, { "content": " /// ```python \n\n /// client.sadd(\"a\", 1, 2, 3, 4)\n\n /// client.sismember(\"a\", 5) == 0 # False\n\n /// client.sismember(\"a\", 2) == 1 # True\n\n /// ```\n\n /// \n\n /// Integer Reply\n\n /// =============\n\n /// `1`: The member is in the set. \n\n /// \n\n /// `0`: The member is not in the set, or the key was not a set.\n\n /// \n\n /// [Read about SISMEMBER in the Redis documentation.](https://redis.io/commands/sismember)\n\n #[text_signature = \"($self, key, member, /)\"]\n\n pub fn sismember(&mut self, key: &str, member: &PyAny) -> PyResult<u8> {\n\n let mem = member.to_string();\n\n Ok(route_command(self, \"SISMEMBER\", Some(&[key, &mem]))?)\n\n }\n\n\n\n /// Get all of the members in a set. \n", "file_path": "src/ops/set.rs", "rank": 80, "score": 9.908025555325425 }, { "content": " /// * The value of the field after the increment. \n\n /// \n\n /// # Time Complexity: \n\n /// * O(1)\n\n /// \n\n /// [Read about HINCRBY in the Redis documentation.](https://redis.io/commands/hincrby)\n\n #[text_signature = \"($self, key, field, amount, /)\"]\n\n pub fn hincrbyfloat(&mut self, key: &str, field: &str, amount: f64) -> PyResult<f64> {\n\n let amt = amount.to_string();\n\n Ok(route_command(self, \"HINCRBYFLOAT\", Some(&[key, field, &amt]))?)\n\n }\n\n\n\n /// Returns all field names in the hash stored at key.\n\n /// \n\n /// # Arguments: \n\n /// * `key` - The name of the key. \n\n /// \n\n /// # Example: \n\n /// ```python\n\n /// client = RedisClient(\"url\")\n", "file_path": "src/ops/hash.rs", "rank": 81, "score": 9.751961136846823 }, { "content": " /// [Read about MOVE in the Redis documentation.](https://redis.io/commands/move)\n\n #[text_signature = \"($self, key, db, /)\"]\n\n pub fn r#move(&mut self, key: &str, db: u8) -> PyResult<u8> {\n\n let id = db.to_string();\n\n Ok(route_command(self, \"MOVE\", Some(&[key, &id]))?)\n\n }\n\n\n\n /// Remove the existing timeout on a key, turning the key from volatile (a key with an expire set) \n\n /// to persistent (a key that will never expire as no timeout is associated).\n\n ///\n\n /// Arguments\n\n /// =========\n\n /// `key` - The name of the key.\n\n /// \n\n /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.persist(\"key\") # Remove the expiration on a key. \n\n /// ```\n", "file_path": "src/ops/generic.rs", "rank": 83, "score": 9.657202564203086 }, { "content": " /// =============\n\n /// `1`: The timeout was set.\n\n /// \n\n /// `0`: The timeout was not set. Input was not an integer, key doesn't exist, etc.\n\n /// \n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)`\n\n ///\n\n /// [Read about EXPIRE in the Redis documentation.](https://redis.io/commands/expire)\n\n #[text_signature = \"($self, key, seconds, /)\"]\n\n pub fn expire(&mut self, key: &str, seconds: usize) -> PyResult<u8> {\n\n let time = seconds.to_string();\n\n Ok(route_command(self, \"EXPIRE\", Some(&[key, &time]))?)\n\n }\n\n\n\n /// Set a timeout on a key with a UNIX timestamp. After the timeout expires, the key will be deleted.\n\n /// Keys with this behavior are refeered to as volatile keys in Redis.\n\n ///\n\n /// EXPIREAT has the same effect and semantic as EXPIRE, \n", "file_path": "src/ops/generic.rs", "rank": 84, "score": 9.470499404357826 }, { "content": " /// `-1`: Key exists, but has no expiration set.\n\n /// \n\n /// `-2`: Key does not exist.\n\n ///\n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)`\n\n ///\n\n /// [Read about PTTL in the Redis documentation.](https://redis.io/commands/pttl)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn pttl(&mut self, key: &str) -> PyResult<i64> {\n\n Ok(route_command(self, \"PTTL\", Some(key))?)\n\n }\n\n\n\n /// Return a random key name. \n\n /// \n\n /// Arguments\n\n /// =========\n\n /// None\n\n /// \n", "file_path": "src/ops/generic.rs", "rank": 85, "score": 9.361395673128772 }, { "content": " /// \n\n /// `0`: The timeout was not set. Invalid UNIX timestamp, key doesn't exist, etc.\n\n ///\n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)`\n\n ///\n\n /// [Read about EXPIREAT in the Redis documenation.](https://redis.io/commands/expireat)\n\n #[text_signature = \"($self, key, timestamp, /)\"]\n\n pub fn expireat(&mut self, key: &str, timestamp: usize) -> PyResult<u8> {\n\n let time = timestamp.to_string();\n\n Ok(route_command(self, \"EXPIREAT\", Some(&[key, &time]))?)\n\n }\n\n\n\n /// Return all the keys matching the passed pattern.\n\n /// While the time complexity is O(n), the constant times are quite fast. ~40ms for a 1 million key database.\n\n ///\n\n /// Arguments\n\n /// =========\n\n /// `pattern` - The pattern to search by.\n", "file_path": "src/ops/generic.rs", "rank": 86, "score": 9.335881234012682 }, { "content": " /// # Time Complexity: \n\n /// * O(n) : Where n = the size of the hash.\n\n /// \n\n /// [Read about HGETALL in the Redis documentation.](https://redis.io/commands/hgetall)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn hgetall(&mut self, key: &str) -> PyResult<Vec<String>> {\n\n Ok(route_command(self, \"HGETALL\", Some(key))?)\n\n }\n\n\n\n /// Increments the number stored at field in the hash stored at key by increment. \n\n /// If key does not exist, a new key holding a hash is created. \n\n /// If field does not exist the value is set to 0 before the operation is performed.\n\n /// The range of values supported is limited to 64 bit signed integers.\n\n /// \n\n /// # Arguments: \n\n /// * `key` - The name of the key. \n\n /// * `field` - The name of the field. \n\n /// * `amount` - The amount to increment by. \n\n /// \n\n /// # Example: \n", "file_path": "src/ops/hash.rs", "rank": 87, "score": 9.047949740664132 }, { "content": " pub fn hget(&mut self, key: &str, field: &str) -> PyResult<String> {\n\n Ok(route_command(self, \"HGET\", Some(&[key, field]))?)\n\n }\n\n\n\n /// Returns all fields and values of the hash stored at key. \n\n /// In the returned value, every field name is followed by its value, \n\n /// so the length of the reply is twice the size of the hash.\n\n /// \n\n /// # Arguments: \n\n /// * `key` - The name of the key. \n\n /// \n\n /// # Example: \n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.hgetall(\"key\")\n\n /// ```\n\n /// \n\n /// # Array Reply: \n\n /// * A list of fields and their values stored in the hash, or an empty list when key does not exist.\n\n /// \n", "file_path": "src/ops/hash.rs", "rank": 88, "score": 9.033594858113046 }, { "content": " /// ```python\n\n /// client.sadd(\"key\", 1, 2, 3, 4, 5, 9)\n\n /// client.scard(\"key\") == 6\n\n /// ```\n\n /// \n\n /// Integer Reply\n\n /// =============\n\n /// The amount of members in the set. \n\n /// \n\n /// [Read about SCARD in the Redis documentation.](https://redis.io/commands/scard)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn scard(&mut self, key: &str) -> PyResult<usize> {\n\n Ok(route_command(self, \"SCARD\", Some(key))?)\n\n }\n\n\n\n /// Subtract two or more sets. \n\n /// \n\n /// Arguments\n\n /// =========\n\n /// `keys` - Key names of all the sets. Passed as rest arguments. \n", "file_path": "src/ops/set.rs", "rank": 89, "score": 8.947171856126307 }, { "content": " /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.hincrby(\"key\", \"field\", 5)\n\n /// ```\n\n /// \n\n /// # Integer Reply: \n\n /// * The value of the field after the increment. \n\n /// \n\n /// # Time Complexity: \n\n /// * O(1)\n\n /// \n\n /// [Read about HINCRBY in the Redis documentation.](https://redis.io/commands/hincrby)\n\n #[text_signature = \"($self, key, field, amount, /)\"]\n\n pub fn hincrby(&mut self, key: &str, field: &str, amount: i64) -> PyResult<isize> {\n\n let amt = amount.to_string();\n\n Ok(route_command(self, \"HINCRBY\", Some(&[key, field, &amt]))?)\n\n }\n\n\n\n /// Increment the specified field of a hash stored at key, and representing a floating point number, \n\n /// by the specified increment. If the increment value is negative, \n", "file_path": "src/ops/hash.rs", "rank": 90, "score": 8.935803365146441 }, { "content": " ///\n\n /// Integer Reply\n\n /// =============\n\n /// `1`: The timeout was removed.\n\n /// \n\n /// `0`: The timeout was not removed.\n\n ///\n\n /// Time Complexity\n\n /// ===============\n\n /// `O(1)`\n\n ///\n\n /// [Read about PERSIST in the Redis documentation.](https://redis.io/commands/persist)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn persist(&mut self, key: &str) -> PyResult<i8> {\n\n Ok(route_command(self, \"PERSIST\", Some(key))?)\n\n }\n\n\n\n /// Works exactly like EXPIRE but the time to live of the key is specified in milliseconds instead of seconds.\n\n ///\n\n /// Arguments\n", "file_path": "src/ops/generic.rs", "rank": 91, "score": 8.699933087417469 }, { "content": " /// \n\n /// # Example: \n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.hexists(\"key\", \"field\")\n\n /// ```\n\n /// \n\n /// # Integer Reply: \n\n /// * 1: The hash contains the field. \n\n /// * 0: The hash does not contain the field, or the key does not exist. \n\n /// \n\n /// # Time Complexity: \n\n /// * O(1)\n\n /// \n\n /// [Read about HEXISTS in the Redis documentation.](https://redis.io/commands/hexists)\n\n #[text_signature = \"($self, key, field, /)\"]\n\n pub fn hexists(&mut self, key: &str, field: &str) -> PyResult<u8> {\n\n Ok(route_command(self, \"HEXISTS\", Some(&[key, field]))?)\n\n }\n\n\n", "file_path": "src/ops/hash.rs", "rank": 92, "score": 8.32881678787959 }, { "content": " /// # Example: \n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.hlen(\"key\")\n\n /// ```\n\n /// \n\n /// # Integer Reply: \n\n /// * The amount of fields on the key. \n\n /// \n\n /// # Time Complexity: \n\n /// * O(1)\n\n /// \n\n /// [Read about HLEN in the Redis documentation.](https://redis.io/commands/hlen)\n\n #[text_signature = \"($self, key, /)\"]\n\n pub fn hlen(&mut self, key: &str) -> PyResult<usize> {\n\n Ok(route_command(self, \"HLEN\", Some(key))?)\n\n }\n\n\n\n /// Returns the values associated with the specified fields in the hash stored at key.\n\n /// For every field that does not exist in the hash, a nil value is returned. \n", "file_path": "src/ops/hash.rs", "rank": 93, "score": 8.22174489028864 }, { "content": " /// \n\n /// Integer Reply\n\n /// =============\n\n /// `1`: The member was moved. \n\n /// \n\n /// `0`: The member does not exist. \n\n /// \n\n /// [Read about SMOVE in the Redis documentation.](https://redis.io/commands/smove)\n\n #[text_signature = \"($self, source, destination, member, /)\"]\n\n pub fn smove(&mut self, source: &str, destination: &str, member: &PyAny) -> PyResult<u8> {\n\n let mem = member.to_string();\n\n Ok(route_command(self, \"SMOVE\", Some(&[source, destination, &mem]))?)\n\n }\n\n}", "file_path": "src/ops/set.rs", "rank": 94, "score": 8.004257457141781 }, { "content": " def lindex(self, index: int) -> str: \n\n \"\"\"Get the element at `index`. Empty string if there's no element.\"\"\"\n\n\n", "file_path": "suredis/__init__.py", "rank": 95, "score": 7.184559973581946 }, { "content": "# Notice\n\nSuredis has been archived. \n\nThe code is brisk & rusty (heh) as well to being poorly representative of what I intend on writing in regards to quality.\n\n\n\nThough this did cement one thing, Rust is awesome. \n\n\n\n# suredis\n\nA speedy & simplistic library at runtime for an incredibly straightforward Redis interface.\n\n\n\nThe bar is to make all supported Redis operations take under 1ms to complete on localhost. Which to my knowledge, is achieved for now. \n\n\n\nAn exception may be when you pass *a lot* of arguments to a command.\n\n\n\n# Coverage\n\nsuredis strongly supports the following:\n\n - Commands inside of the list, hash, key, set, generic, and string types. \n\n\n\nsuredis partially supports the following: \n\n - Advanced commands. Mostly through the `manual` method as of now.\n\n\n\nsuredis has plans to implement or enhance the following (in no particular order):\n\n - Async.\n\n - Sorted Sets.\n\n - Transactions.\n\n - Advanced commands. \n\n - Python dictionaries. \n\n - Pipeline interfaces.\n\n - More object orientation.\n\n\n\nImplementation is driven by what superficially will be used most practically in a general purpose case.\n\n\n\n# Build Requirements\n\nAll versions you use must be compatible with the versions listed here:\n\n - Rust, 1.39\n\n - Cargo, 1.44.1\n\n - \"redis\" crate, 0.16.0\n\n - \"pyo3\" crate, 0.11.1\n\n - Python, (CPython) 3.5\n\n - \"maturin\" package, 0.8.2\n\n \n\n# Building\n\nsuredis is not yet released, but, you can still build it using:\n\n - `git clone https://github.com/wellinthatcase/suredis`\n\n - `pip install maturin`\n\n - `RUSTFLAGS=\"--emit=asm\"` (Optional, slower compilation time, but more LLVM optimization.)\n\n - `cargo build --release && maturin develop --release`\n\n\n\nMake sure you have a virtual environment activated to use maturin in the build process.\n\nAlso, suredis is 1.92 GB (2,067,718,144 bytes) after compilation (if you decide to build source). Make sure you have enough disk space. \n\n\n", "file_path": "README.md", "rank": 96, "score": 6.790578266988845 }, { "content": " /// Example\n\n /// =======\n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.set(\"key\", \"hello world!\") # Set the key. \n\n /// client.delete(\"key\") == 1 # Delete the key. \n\n /// client.delete(\"key1\", \"key2\", \"key3\") == 0 # None of the keys exist. \n\n /// ```\n\n /// \n\n /// Time Complexity \n\n /// ===============\n\n /// `O(n)` : Where n = the number of keys that will be removed. \n\n /// \n\n /// `O(m)` : Where m = the number of elements in the list, set, sorted set, or hash. **If applicable**. \n\n ///\n\n /// Integer Reply\n\n /// =============\n\n /// The amount of keys deleted.\n\n /// \n\n /// [Read about DEL in the Redis documentation.](https://redis.io/commands/del)\n", "file_path": "src/ops/generic.rs", "rank": 98, "score": 6.191748049611235 }, { "content": " /// Returns the value associated with field in the hash stored at key.\n\n /// \n\n /// # Arguments: \n\n /// * `key` - The name of the key. \n\n /// * `field` - The name of the field. \n\n /// \n\n /// # Example: \n\n /// ```python\n\n /// client = RedisClient(\"url\")\n\n /// client.hget(\"key\", \"field\")\n\n /// ```\n\n /// \n\n /// # Bulk String Reply: \n\n /// * The value associated with field, or nil when field is not present in the hash or key does not exist.\n\n /// \n\n /// # Time Complexity: \n\n /// * O(1)\n\n /// \n\n /// [Read about HGET in the Redis documentation.](https://redis.io/commands/hget)\n\n #[text_signature = \"($self, key, field, /)\"]\n", "file_path": "src/ops/hash.rs", "rank": 99, "score": 6.075298764511056 } ]
Rust
actions/balance-notification-registration/src/lib.rs
HugoByte/aurras
fcc03684f4ed56ce949c5a1db8764508e0feb3f9
extern crate serde_json; use actions_common::Context; use chesterfield::sync::{Client, Database}; use serde_derive::{Deserialize, Serialize}; use serde_json::{Error, Value}; mod types; use std::collections::HashMap; use types::{Address, Response, Topic}; #[cfg(test)] use actions_common::Config; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] struct Input { __ow_method: String, #[serde(default = "empty_string")] __ow_query: String, #[serde(default = "empty_string")] address: String, balance_filter_db: String, db_name: String, db_url: String, event_registration_db: String, #[serde(default = "empty_string")] token: String, #[serde(default = "empty_string")] topic: String, } fn empty_string() -> String { String::new() } struct Action { params: Input, context: Option<Context>, } impl Action { pub fn new(params: Input) -> Self { Action { params, context: None, } } #[cfg(test)] pub fn init(&mut self, config: &Config) { let db = self.connect_db(&self.params.db_url, &self.params.db_name); self.context = Some(Context::new(db, Some(config))); } #[cfg(not(test))] pub fn init(&mut self) { let db = self.connect_db(&self.params.db_url, &self.params.db_name); self.context = Some(Context::new(db, None)); } fn connect_db(&self, db_url: &String, db_name: &String) -> Database { let client = Client::new(db_url).unwrap(); let db = client.database(db_name).unwrap(); if !db.exists().unwrap() { db.create().unwrap(); } db } pub fn get_context(&mut self) -> &Context { self.context.as_mut().expect("Action not Initialized!") } pub fn method(&self) -> String { self.params.__ow_method.clone() } pub fn get_event_sources(&self) -> Result<Value, Error> { let db = self.connect_db(&self.params.db_url, &self.params.event_registration_db); let context = Context::new(db, None); let list: Response = serde_json::from_value( context.get_list(&self.params.db_url, &self.params.event_registration_db)?, )?; Ok(serde_json::json!({ "statusCode": 200, "headers": { "Content-Type": "application/json" }, "body": list.rows })) } pub fn get_address(&mut self, id: &String) -> Result<Value, Error> { self.get_context().get_document(id) } pub fn add_address(&self, token: &str, topic: &str, address: &str) -> Result<String, Error> { let db = self.connect_db(&self.params.db_url, &self.params.balance_filter_db); let context = Context::new(db, None); if context.get_document(topic).is_err() { context.insert_document( &serde_json::json!({ "filters": {} }), Some(topic.to_string()), )?; } let mut doc: Topic = serde_json::from_value(context.get_document(topic)?)?; doc.filters.insert( address.to_string(), Address { token: token.to_string(), }, ); context.update_document(&topic, &doc.rev, &serde_json::to_value(doc.clone())?) } } pub fn main(args: Value) -> Result<Value, Error> { let input = serde_json::from_value::<Input>(args)?; let mut action = Action::new(input); #[cfg(not(test))] action.init(); match action.method().as_ref() { "post" => { let id = action.add_address( &action.params.token, &action.params.topic, &action.params.address, )?; return Ok(serde_json::json!({ "statusCode": 200, "headers": { "Content-Type": "application/json" }, "body": { "success": true } })) } "get" => return action.get_event_sources(), method => { return Err(format!("method not supported document {}", method)) .map_err(serde::de::Error::custom) } } } #[cfg(test)] mod tests { use super::*; use actions_common::mock_containers::CouchDB; use tokio; use tokio::time::{sleep, Duration}; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Source { name: String, trigger: String, } impl Source { pub fn new(name: String, trigger: String) -> Self { Source { name, trigger } } } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Row<T> { rows: Vec<View<T>>, } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct View<T> { doc: T, } #[tokio::test] async fn add_address_pass() { let config = Config::new(); let couchdb = CouchDB::new("admin".to_string(), "password".to_string()) .await .unwrap(); sleep(Duration::from_millis(5000)).await; let url = format!("http://admin:password@localhost:{}", couchdb.port()); let topic = "1234".to_string(); let address = "15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw".to_string(); let token = "1".to_string(); let mut action = Action::new(Input { db_url: url, db_name: "test".to_string(), __ow_method: "post".to_string(), __ow_query: "".to_string(), address: "15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw".to_string(), balance_filter_db: "balance_filter_db".to_string(), event_registration_db: "".to_string(), token: "1".to_string(), topic: "418a8b8c-02b8-11ec-9a03-0242ac130003".to_string(), }); action.init(&config); let db = action.connect_db(&action.params.db_url, &action.params.balance_filter_db); let context = Context::new(db, None); action.add_address(&token, &topic, &address).unwrap(); let doc: Topic = serde_json::from_value(context.get_document(&topic).unwrap()).unwrap(); let mut filters = HashMap::new(); filters.insert( address.clone(), Address { token: "1".to_string(), }, ); let expected = Topic { id: doc.id.clone(), rev: doc.rev.clone(), filters, }; assert_eq!(doc, expected); couchdb.delete().await.expect("Stopping Container Failed"); } #[ignore] #[should_panic] #[tokio::test] async fn get_event_sources_pass() { let config = Config::new(); let couchdb = CouchDB::new("admin".to_string(), "password".to_string()) .await .unwrap(); sleep(Duration::from_millis(5000)).await; let url = format!("http://admin:password@localhost:{}", couchdb.port()); let topic = "1234".to_string(); let address = "15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw".to_string(); let token = "1".to_string(); let mut action = Action::new(Input { db_url: url.clone(), db_name: "test".to_string(), __ow_method: "post".to_string(), __ow_query: "".to_string(), address: "15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw".to_string(), balance_filter_db: "balance_filter_db".to_string(), event_registration_db: "event_registration_db".to_string(), token: "1".to_string(), topic: "418a8b8c-02b8-11ec-9a03-0242ac130003".to_string(), }); action.init(&config); let event_registration_db = action.connect_db(&action.params.db_url, &action.params.event_registration_db); let event_registration_db_context = Context::new(event_registration_db, None); event_registration_db_context .insert_document( &serde_json::json!({ "name": "polkadot", "trigger": "trigger" }), Some("event_id".to_string()), ) .unwrap(); let doc: Source = serde_json::from_value( event_registration_db_context .get_document(&"event_id".to_string()) .unwrap(), ) .unwrap(); let sources: Row<Source> = serde_json::from_value( event_registration_db_context .get_list(&url.clone(), &action.params.event_registration_db) .unwrap(), ) .unwrap(); let expected: View<Source> = View { doc: Source { ..doc }, }; assert_eq!(sources.rows, vec![expected]); couchdb.delete().await.expect("Stopping Container Failed"); } }
extern crate serde_json; use actions_common::Context; use chesterfield::sync::{Client, Database}; use serde_derive::{Deserialize, Serialize}; use serde_json::{Error, Value}; mod types; use std::collections::HashMap; use types::{Address, Response, Topic}; #[cfg(test)] use actions_common::Config; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] struct Input { __ow_method: String, #[serde(default = "empty_string")] __ow_query: String, #[serde(default = "empty_string")] address: String, balance_filter_db: String, db_name: String, db_url: String, event_registration_db: String, #[serde(default = "empty_string")] token: String, #[serde(default = "empty_string")] topic: String, } fn empty_string() -> String { String::new() } struct Action { params: Input, context: Option<Context>, } impl Action { pub fn new(params: Input) -> Self { Action { params, context: None, } } #[cfg(test)] pub fn init(&mut self, config: &Config) { let db = self.connect_db(&self.params.db_url, &self.params.db_name); self.context = Some(Context::new(db, Some(config))); } #[cfg(not(test))] pub fn init(&mut self) { let db = self.connect_db(&self.params.db_url, &self.params.db_name); self.context = Some(Context::new(db, None)); } fn connect_db(&self, db_url: &String, db_name: &String) -> Database { let client = Client::new(db_url).unwrap(); let db = client.database(db_name).unwrap(); if !db.exists().unwrap() { db.create().unwrap(); } db } pub fn get_context(&mut self) -> &Context { self.context.as_mut().expect("Action not Initialized!") } pub fn method(&self) -> String { self.params.__ow_method.clone() } pub fn get_event_sources(&self) -> Result<Value, Error> { let db = self.connect_db(&self.params.db_url, &self.params.event_registration_db); let context = Context::new(db, None); let list: Response = serde_json::from_value( context.get_list(&self.params.db_url, &self.params.event_registration_db)?, )?; Ok(serde_json::json!({ "statusCode": 200, "headers": { "Content-Type": "application/json" }, "body": list.rows })) } pub fn get_address(&mut self, id: &String) -> Result<Value, Error> { self.get_context().get_document(id) } pub fn add_address(&self, token: &str, topic: &str, address: &str) -> Result<String, Error> { let db = self.connect_db(&self.params.db_url, &self.params.balance_filter_db); let context = Context::new(db, None); if context.get_document(topic).is_err() { context.insert_document( &serde_json::json!({ "filters": {} }), Some(topic.to_string()), )?; } let mut doc: Topic = serde_json::from_value(context.get_document(topic)?)?; doc.filters.insert( address.to_string(), Address { token: token.to_string(), }, ); context.update_document(&topic, &doc.rev, &serde_json::to_value(doc.clone())?) } } pub fn main(args: Val
#[cfg(test)] mod tests { use super::*; use actions_common::mock_containers::CouchDB; use tokio; use tokio::time::{sleep, Duration}; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Source { name: String, trigger: String, } impl Source { pub fn new(name: String, trigger: String) -> Self { Source { name, trigger } } } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Row<T> { rows: Vec<View<T>>, } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct View<T> { doc: T, } #[tokio::test] async fn add_address_pass() { let config = Config::new(); let couchdb = CouchDB::new("admin".to_string(), "password".to_string()) .await .unwrap(); sleep(Duration::from_millis(5000)).await; let url = format!("http://admin:password@localhost:{}", couchdb.port()); let topic = "1234".to_string(); let address = "15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw".to_string(); let token = "1".to_string(); let mut action = Action::new(Input { db_url: url, db_name: "test".to_string(), __ow_method: "post".to_string(), __ow_query: "".to_string(), address: "15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw".to_string(), balance_filter_db: "balance_filter_db".to_string(), event_registration_db: "".to_string(), token: "1".to_string(), topic: "418a8b8c-02b8-11ec-9a03-0242ac130003".to_string(), }); action.init(&config); let db = action.connect_db(&action.params.db_url, &action.params.balance_filter_db); let context = Context::new(db, None); action.add_address(&token, &topic, &address).unwrap(); let doc: Topic = serde_json::from_value(context.get_document(&topic).unwrap()).unwrap(); let mut filters = HashMap::new(); filters.insert( address.clone(), Address { token: "1".to_string(), }, ); let expected = Topic { id: doc.id.clone(), rev: doc.rev.clone(), filters, }; assert_eq!(doc, expected); couchdb.delete().await.expect("Stopping Container Failed"); } #[ignore] #[should_panic] #[tokio::test] async fn get_event_sources_pass() { let config = Config::new(); let couchdb = CouchDB::new("admin".to_string(), "password".to_string()) .await .unwrap(); sleep(Duration::from_millis(5000)).await; let url = format!("http://admin:password@localhost:{}", couchdb.port()); let topic = "1234".to_string(); let address = "15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw".to_string(); let token = "1".to_string(); let mut action = Action::new(Input { db_url: url.clone(), db_name: "test".to_string(), __ow_method: "post".to_string(), __ow_query: "".to_string(), address: "15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw".to_string(), balance_filter_db: "balance_filter_db".to_string(), event_registration_db: "event_registration_db".to_string(), token: "1".to_string(), topic: "418a8b8c-02b8-11ec-9a03-0242ac130003".to_string(), }); action.init(&config); let event_registration_db = action.connect_db(&action.params.db_url, &action.params.event_registration_db); let event_registration_db_context = Context::new(event_registration_db, None); event_registration_db_context .insert_document( &serde_json::json!({ "name": "polkadot", "trigger": "trigger" }), Some("event_id".to_string()), ) .unwrap(); let doc: Source = serde_json::from_value( event_registration_db_context .get_document(&"event_id".to_string()) .unwrap(), ) .unwrap(); let sources: Row<Source> = serde_json::from_value( event_registration_db_context .get_list(&url.clone(), &action.params.event_registration_db) .unwrap(), ) .unwrap(); let expected: View<Source> = View { doc: Source { ..doc }, }; assert_eq!(sources.rows, vec![expected]); couchdb.delete().await.expect("Stopping Container Failed"); } }
ue) -> Result<Value, Error> { let input = serde_json::from_value::<Input>(args)?; let mut action = Action::new(input); #[cfg(not(test))] action.init(); match action.method().as_ref() { "post" => { let id = action.add_address( &action.params.token, &action.params.topic, &action.params.address, )?; return Ok(serde_json::json!({ "statusCode": 200, "headers": { "Content-Type": "application/json" }, "body": { "success": true } })) } "get" => return action.get_event_sources(), method => { return Err(format!("method not supported document {}", method)) .map_err(serde::de::Error::custom) } } }
function_block-function_prefixed
[ { "content": "pub fn main(args: Value) -> Result<Value, Error> {\n\n let input = serde_json::from_value::<Input>(args)?;\n\n let mut action = Action::new(input);\n\n\n\n // TODO: Fix\n\n #[cfg(not(test))]\n\n action.init();\n\n\n\n let filtered_topics = action.filter_topics();\n\n let filtered_address = action.filter_address(filtered_topics);\n\n action.invoke_trigger(filtered_address)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use actions_common::mock_containers::CouchDB;\n\n use tokio;\n\n use tokio::time::{sleep, Duration};\n\n\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 0, "score": 178187.02422367816 }, { "content": "#[cfg(test)]\n\nfn client() -> reqwest::Client {\n\n reqwest::Client::new()\n\n}\n\n\n", "file_path": "actions/common/src/types/context.rs", "rank": 1, "score": 172955.27026408704 }, { "content": "#[cfg(not(test))]\n\nfn client() -> reqwest::blocking::Client {\n\n reqwest::blocking::Client::builder()\n\n .timeout(None)\n\n .build()\n\n .unwrap()\n\n}\n\n\n", "file_path": "actions/common/src/types/context.rs", "rank": 2, "score": 165419.2129888485 }, { "content": "pub fn main(args: Value) -> Result<Value, Error> {\n\n let input = serde_json::from_value::<Input>(args)?;\n\n let mut action = Action::new(input);\n\n let event_id = action.generate_event_id();\n\n #[cfg(not(test))]\n\n action.init();\n\n let trigger = serde_json::from_value::<Trigger>(action.register_trigger(&event_id)?)?;\n\n action.register_source(&event_id, &trigger.name)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use actions_common::mock_containers::CouchDB;\n\n use serde_json::json;\n\n use tokio;\n\n use tokio::time::{sleep, Duration};\n\n\n\n #[tokio::test]\n\n async fn register_source_pass() {\n", "file_path": "actions/event-registration/src/lib.rs", "rank": 3, "score": 153694.54595258986 }, { "content": "pub fn main(args: Value) -> Result<Value, Error> {\n\n let input = serde_json::from_value::<Input>(args)?;\n\n let action = Action::new(input);\n\n let response = action.send_notification(&serde_json::json!({\n\n \"notification\": {\n\n \"title\": action.params.message.title,\n\n \"body\": action.params.message.body\n\n },\n\n \"to\": action.params.token,\n\n \"direct_boot_ok\" : true\n\n }))?;\n\n match response.status() {\n\n StatusCode::OK => Ok(serde_json::json!({\n\n \"action\": \"success\"\n\n })),\n\n error => Err(format!(\"failed to push notification {:?}\", error))\n\n .map_err(serde::de::Error::custom),\n\n }\n\n}\n\n\n", "file_path": "actions/push-notification/src/lib.rs", "rank": 4, "score": 153694.5459525899 }, { "content": "pub fn main(args: Value) -> Result<Value, Error> {\n\n // TODO: Use processor for each event source to process event to generic format as the event receiver will be generic for all event source\n\n let input = serde_json::from_value::<Input>(args)?;\n\n let mut action = Action::new(input);\n\n\n\n #[cfg(not(test))]\n\n action.init();\n\n action.process_event(&action.parse_event_data()?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn parse_event_pass() {\n\n let action = Action::new(Input {\n\n brokers: vec![\"172.17.0.1:9092\".to_string()],\n\n event: \"{\\\"section\\\": \\\"balances\\\", \\\"method\\\": \\\"Transfer\\\", \\\"data\\\": [{\\\"AccountId\\\":\\\"148fP7zCq1JErXCy92PkNam4KZNcroG9zbbiPwMB1qehgeT4\\\"},{\\\"AccountId\\\":\\\"13bbv2rNzAKuT2oSkFJyHUJAmPVbBYNQbRQ95xW3sQBGffHa\\\"},{\\\"Balance\\\":\\\"24682100255\\\"}]}\".to_string(),\n\n topic: \"7231ea34-7bc2-44e8-8601-c8cceb78f8c3\".to_string(),\n", "file_path": "actions/event-receiver/src/lib.rs", "rank": 5, "score": 153694.54595258992 }, { "content": "#[cfg(not(test))]\n\nfn invoke_client(\n\n request: reqwest::blocking::RequestBuilder,\n\n) -> Result<reqwest::blocking::Response, reqwest::Error> {\n\n request.send()\n\n}\n\n\n\nimpl Context {\n\n pub fn new(db: Database, config: Option<&Config>) -> Self {\n\n let api_key = if env::var(\"__OW_API_KEY\").is_ok() {\n\n env::var(\"__OW_API_KEY\").unwrap()\n\n } else {\n\n match config {\n\n Some(config) => config.api_key.clone(),\n\n None => \"test:test\".to_string(),\n\n }\n\n };\n\n let auth: Vec<&str> = api_key.split(\":\").collect();\n\n let host = if env::var(\"__OW_API_HOST\").is_ok() {\n\n env::var(\"__OW_API_HOST\").unwrap()\n\n } else {\n", "file_path": "actions/common/src/types/context.rs", "rank": 6, "score": 152889.0637552298 }, { "content": "pub fn main(args: Value) -> Result<Value, Error> {\n\n let input = serde_json::from_value::<Input>(args)?;\n\n let mut action = Action::new(input);\n\n\n\n #[cfg(not(test))]\n\n action.init();\n\n let parsed_event = action.parse_event_data()?;\n\n action.produce_event(parsed_event)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn parse_event_data_pass() {\n\n let input = serde_json::from_value::<Input>(serde_json::json!({\n\n \"topic\": \"topic\",\n\n \"brokers\": [\"172.17.0.1:9092\"],\n\n \"event_producer_trigger\": \"produce_event\",\n", "file_path": "actions/substrate-event-processor/src/lib.rs", "rank": 8, "score": 150652.0972349435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct Input {\n\n messages: Vec<Message>,\n\n push_notification_trigger: String,\n\n db_name: String,\n\n db_url: String,\n\n}\n\n\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 9, "score": 150115.70532011305 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct Doc {\n\n name: String,\n\n trigger: String,\n\n}\n", "file_path": "actions/balance-notification-registration/src/types/response.rs", "rank": 10, "score": 143086.50229226984 }, { "content": "#[cfg(test)]\n\nfn invoke_client(request: reqwest::RequestBuilder) -> Result<reqwest::Response, reqwest::Error> {\n\n let handle = Handle::current();\n\n tokio::task::block_in_place(move || handle.block_on(async { request.send().await }))\n\n}\n\n\n", "file_path": "actions/common/src/types/context.rs", "rank": 11, "score": 141666.94066327033 }, { "content": "struct Action {\n\n params: Input,\n\n context: Option<Context>,\n\n}\n\n\n\nimpl Action {\n\n pub fn new(params: Input) -> Self {\n\n Action {\n\n params,\n\n context: None,\n\n }\n\n }\n\n #[cfg(test)]\n\n pub fn init(&mut self, config: &Config) {\n\n let db = self.connect_db(&self.params.db_url, &self.params.db_name);\n\n self.context = Some(Context::new(db, Some(config)));\n\n }\n\n\n\n #[cfg(not(test))]\n\n pub fn init(&mut self) {\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 12, "score": 126503.5069753965 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct Input {\n\n name: String,\n\n db_name: String,\n\n db_url: String,\n\n feed: String,\n\n brokers: Vec<String>\n\n}\n\n\n", "file_path": "actions/event-registration/src/lib.rs", "rank": 13, "score": 120945.46197115429 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct Input {\n\n token: String,\n\n message: Message,\n\n api_key: String,\n\n}\n\n\n", "file_path": "actions/push-notification/src/lib.rs", "rank": 14, "score": 120945.46197115429 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct Input {\n\n brokers: Vec<String>,\n\n event: String,\n\n topic: String,\n\n #[serde(rename = \"eventProcessor\")]\n\n event_processor: String,\n\n}\n\n\n", "file_path": "actions/event-receiver/src/lib.rs", "rank": 15, "score": 120945.46197115429 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct Input {\n\n topic: String,\n\n brokers: Vec<String>,\n\n event_producer_trigger: String,\n\n event: Event,\n\n}\n\n\n", "file_path": "actions/substrate-event-processor/src/lib.rs", "rank": 17, "score": 118414.14947527542 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Topic {\n\n #[serde(skip_serializing, rename(deserialize = \"_id\"))]\n\n pub id: String,\n\n #[serde(skip_serializing, rename(deserialize = \"_rev\"))]\n\n pub rev: String,\n\n pub filters: HashMap<String, Address>,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Address {\n\n pub token: String,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Row<T> {\n\n rows: Vec<View<T>>,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct View<T> {\n\n doc: T,\n\n}\n", "file_path": "actions/balance-filter/src/types/topic.rs", "rank": 19, "score": 110238.7337394373 }, { "content": "mod message;\n\nmod source;\n\nmod topic;\n\npub use message::{Deposit, Message, Payload};\n\npub use source::Source;\n\npub use topic::{Address, Topic};\n", "file_path": "actions/balance-filter/src/types/mod.rs", "rank": 20, "score": 110068.5087415124 }, { "content": "struct Action {\n\n params: Input,\n\n}\n\n\n\nimpl Action {\n\n pub fn new(params: Input) -> Self {\n\n Action { params }\n\n }\n\n\n\n pub fn send_notification(&self, payload: &Value) -> Result<reqwest::blocking::Response, Error> {\n\n let client = reqwest::blocking::Client::new();\n\n client\n\n .post(\"https://fcm.googleapis.com/fcm/send\")\n\n .bearer_auth(self.params.api_key.clone())\n\n .json(payload)\n\n .send()\n\n .map_err(serde::de::Error::custom)\n\n }\n\n}\n\n\n", "file_path": "actions/push-notification/src/lib.rs", "rank": 21, "score": 97333.26362643775 }, { "content": "struct Action {\n\n params: Input,\n\n context: Option<Context>,\n\n}\n\n\n\nimpl Action {\n\n pub fn new(params: Input) -> Self {\n\n Action {\n\n params,\n\n context: None,\n\n }\n\n }\n\n\n\n #[cfg(test)]\n\n pub fn init(&mut self, config: &Config) {\n\n let db = self.connect_db(&self.params.db_url, &self.params.db_name);\n\n self.context = Some(Context::new(db, Some(config)));\n\n }\n\n\n\n #[cfg(not(test))]\n", "file_path": "actions/event-registration/src/lib.rs", "rank": 22, "score": 97333.26362643775 }, { "content": "struct Action {\n\n params: Input,\n\n context: Option<Context>,\n\n}\n\n\n\nimpl Action {\n\n pub fn new(params: Input) -> Self {\n\n Action {\n\n params,\n\n context: None,\n\n }\n\n }\n\n #[cfg(test)]\n\n pub fn init(&mut self, config: &Config) {\n\n let db = self.connect_db(&\"http://localhost:5984\".to_string(), &\"test\".to_string());\n\n self.context = Some(Context::new(db, Some(config)));\n\n }\n\n\n\n #[cfg(not(test))]\n\n pub fn init(&mut self) {\n", "file_path": "actions/event-receiver/src/lib.rs", "rank": 23, "score": 97333.26362643775 }, { "content": "struct Action {\n\n params: Input,\n\n context: Option<Context>,\n\n}\n\n\n\nimpl Action {\n\n pub fn new(params: Input) -> Self {\n\n Action {\n\n params,\n\n context: None,\n\n }\n\n }\n\n #[cfg(test)]\n\n pub fn init(&mut self, config: &Config) {\n\n let db = self.connect_db(&\"http://localhost:5984\".to_string(), &\"test\".to_string());\n\n self.context = Some(Context::new(db, Some(config)));\n\n }\n\n\n\n #[cfg(not(test))]\n\n pub fn init(&mut self) {\n", "file_path": "actions/substrate-event-processor/src/lib.rs", "rank": 24, "score": 95848.94251482101 }, { "content": "mod context;\n\nmod trigger;\n\npub use context::{Context, Config};\n\npub use trigger::Trigger;\n", "file_path": "actions/common/src/types/mod.rs", "rank": 26, "score": 83557.75507764913 }, { "content": "fn get_unused_port() -> Result<u16, std::io::Error> {\n\n let listener = TcpListener::bind(\"localhost:0\")?;\n\n let port = listener.local_addr()?.port();\n\n Ok(port)\n\n}\n", "file_path": "actions/common/src/mock/couchdb_test_container.rs", "rank": 27, "score": 82839.72382956604 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::mock::mock_containers::CouchDB;\n\n use chesterfield::sync::Client;\n\n use serde_derive::{Deserialize, Serialize};\n\n use std::collections::HashMap;\n\n use tokio;\n\n use tokio::time::{sleep, Duration};\n\n use wiremock::matchers::{method, path};\n\n use wiremock::{Mock, MockServer, ResponseTemplate};\n\n\n\n #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\n pub struct Topic {\n\n #[serde(skip_serializing, rename(deserialize = \"_id\"))]\n\n pub id: String,\n\n #[serde(skip_serializing, rename(deserialize = \"_rev\"))]\n\n pub rev: String,\n\n pub filters: HashMap<String, Address>,\n\n }\n", "file_path": "actions/common/src/types/context.rs", "rank": 28, "score": 80996.45184287986 }, { "content": " };\n\n }\n\n\n\n pub fn get_document(&self, id: &str) -> Result<Value, Error> {\n\n match self.db.get(id).send::<Value>() {\n\n Ok(v) => return Ok(v.into_inner().unwrap()),\n\n Err(err) => {\n\n return Err(format!(\"error fetching document {}: {:?}\", id, err))\n\n .map_err(serde::de::Error::custom)\n\n }\n\n }\n\n }\n\n\n\n pub fn get_list(&self, db_url: &str, db_name: &str) -> Result<Value, Error> {\n\n let client = client();\n\n let url = format!(\"{}/{}/_all_docs?include_docs=true\", db_url, db_name);\n\n if let Ok(response) = invoke_client(client.get(url.clone())) {\n\n return match response.status() {\n\n StatusCode::OK => {\n\n #[cfg(not(test))]\n", "file_path": "actions/common/src/types/context.rs", "rank": 29, "score": 80994.64606969086 }, { "content": "\n\n #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\n pub struct Address {\n\n pub token: String,\n\n pub trigger: String,\n\n }\n\n\n\n #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\n pub struct Row<T> {\n\n rows: Vec<View<T>>,\n\n }\n\n\n\n #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\n pub struct View<T> {\n\n doc: T,\n\n }\n\n\n\n fn connect_db(db_url: &String, db_name: &String) -> Database {\n\n let client = Client::new(db_url).unwrap();\n\n let db = client.database(db_name).unwrap();\n", "file_path": "actions/common/src/types/context.rs", "rank": 30, "score": 80994.25026902613 }, { "content": " };\n\n Context {\n\n host,\n\n db,\n\n name,\n\n namespace,\n\n user: auth[0].to_string(),\n\n pass: auth[1].to_string(),\n\n }\n\n }\n\n\n\n pub fn invoke_action(&self, name: &str, value: &Value) -> Result<Value, Error> {\n\n let client = client();\n\n let url = format!(\n\n \"{}/api/v1/namespaces/{}/actions/{}\",\n\n self.host, self.namespace, name\n\n );\n\n let response = invoke_client(\n\n client\n\n .post(url.clone())\n", "file_path": "actions/common/src/types/context.rs", "rank": 31, "score": 80993.8221459248 }, { "content": " }),\n\n Some(topic.clone()),\n\n )\n\n .unwrap();\n\n\n\n let mut doc: Topic = serde_json::from_value(context.get_document(&topic).unwrap()).unwrap();\n\n\n\n doc.filters.insert(\n\n topic.clone(),\n\n Address {\n\n token: \"1\".to_string(),\n\n trigger: \"1\".to_string(),\n\n },\n\n );\n\n context\n\n .update_document(\n\n &topic,\n\n &doc.rev,\n\n &serde_json::to_value(doc.clone()).unwrap(),\n\n )\n", "file_path": "actions/common/src/types/context.rs", "rank": 32, "score": 80991.71659457617 }, { "content": " .unwrap();\n\n\n\n let doc: Topic = serde_json::from_value(context.get_document(&topic).unwrap()).unwrap();\n\n let mut expected = HashMap::new();\n\n expected.insert(\n\n topic.clone(),\n\n Address {\n\n token: \"1\".to_string(),\n\n trigger: \"1\".to_string(),\n\n },\n\n );\n\n assert_eq!(doc.filters, expected);\n\n couchdb.delete().await.expect(\"Stopping Container Failed\");\n\n }\n\n\n\n #[tokio::test(flavor = \"multi_thread\")]\n\n async fn get_list_pass() {\n\n let config = Config::new();\n\n let couchdb = CouchDB::new(\"admin\".to_string(), \"password\".to_string())\n\n .await\n", "file_path": "actions/common/src/types/context.rs", "rank": 33, "score": 80991.50385200024 }, { "content": " .unwrap();\n\n sleep(Duration::from_millis(5000)).await;\n\n let url = format!(\"http://admin:password@localhost:{}\", couchdb.port());\n\n let context = Context::new(connect_db(&url, &\"test\".to_string()), Some(&config));\n\n let topic = \"1234\".to_string();\n\n\n\n context\n\n .insert_document(\n\n &serde_json::json!({\n\n \"filters\": {}\n\n }),\n\n Some(topic.clone()),\n\n )\n\n .unwrap();\n\n\n\n let mut doc: Topic = serde_json::from_value(context.get_document(&topic).unwrap()).unwrap();\n\n\n\n doc.filters.insert(\n\n topic.clone(),\n\n Address {\n", "file_path": "actions/common/src/types/context.rs", "rank": 34, "score": 80990.29684140148 }, { "content": "use chesterfield::sync::Database;\n\nuse reqwest;\n\nuse reqwest::StatusCode;\n\n\n\nuse super::Trigger;\n\nuse serde_json::{to_value, Error, Value};\n\nuse std::env;\n\n\n\n#[cfg(test)]\n\nuse tokio::runtime::Handle;\n\n\n\n#[derive(new, Debug, Clone)]\n\npub struct Config {\n\n #[new(value = r#\"\"test:test\".to_string()\"#)]\n\n pub api_key: String,\n\n #[new(value = r#\"\"http://172.17.0.1:8888\".to_string()\"#)]\n\n pub host: String,\n\n #[new(value = r#\"\"action\".to_string()\"#)]\n\n pub name: String,\n\n #[new(value = r#\"\"guest\".to_string()\"#)]\n", "file_path": "actions/common/src/types/context.rs", "rank": 35, "score": 80989.65322127476 }, { "content": " );\n\n let response = invoke_client(\n\n client\n\n .put(url.clone())\n\n .basic_auth(self.user.clone(), Some(self.pass.clone()))\n\n .json(value),\n\n )\n\n .map_err(serde::de::Error::custom)?;\n\n match response.status().is_success() {\n\n true => to_value(Trigger::new(name.to_string(), url)),\n\n false => Err(format!(\n\n \"failed to create trigger {} {:?}\",\n\n name,\n\n response.error_for_status()\n\n ))\n\n .map_err(serde::de::Error::custom),\n\n }\n\n }\n\n\n\n pub fn update_document(&self, id: &str, rev: &str, doc: &Value) -> Result<String, Error> {\n", "file_path": "actions/common/src/types/context.rs", "rank": 36, "score": 80989.16909791813 }, { "content": " }\n\n }\n\n\n\n // TODO: Fix return\n\n pub fn create_rule(&self, name: &str, trigger: &str, action: &str) -> Result<Value, Error> {\n\n let client = client();\n\n let url = format!(\n\n \"{}/api/v1/namespaces/{}/rules/{}?overwrite=true\",\n\n self.host, self.namespace, name\n\n );\n\n let response = invoke_client(\n\n client\n\n .put(url.clone())\n\n .basic_auth(self.user.clone(), Some(self.pass.clone()))\n\n .json(&serde_json::json!({\n\n \"status\": \"\",\n\n \"action\": format!(\"/{}/{}\",self.namespace, action),\n\n \"trigger\": format!(\"/{}/{}\",self.namespace, trigger)\n\n })),\n\n )\n", "file_path": "actions/common/src/types/context.rs", "rank": 37, "score": 80987.1682102409 }, { "content": " token: \"1\".to_string(),\n\n trigger: \"1\".to_string(),\n\n },\n\n );\n\n context\n\n .update_document(\n\n &topic,\n\n &doc.rev,\n\n &serde_json::to_value(doc.clone()).unwrap(),\n\n )\n\n .unwrap();\n\n let doc: Topic = serde_json::from_value(context.get_document(&topic).unwrap()).unwrap();\n\n let docs: Row<Topic> =\n\n serde_json::from_value(context.get_list(&url, &\"test\".to_string()).unwrap()).unwrap();\n\n let expected: View<Topic> = View {\n\n doc: Topic { ..doc },\n\n };\n\n assert_eq!(docs.rows, vec![expected]);\n\n couchdb.delete().await.expect(\"Stopping Container Failed\");\n\n }\n", "file_path": "actions/common/src/types/context.rs", "rank": 38, "score": 80986.84494982315 }, { "content": " .basic_auth(self.user.clone(), Some(self.pass.clone()))\n\n .json(value),\n\n )\n\n .map_err(serde::de::Error::custom)?;\n\n match response.status().is_success() {\n\n true => Ok(serde_json::json!({\n\n \"success\": true\n\n })),\n\n false => Err(format!(\n\n \"failed to invoke action {} {:?}\",\n\n name,\n\n response.error_for_status()\n\n ))\n\n .map_err(serde::de::Error::custom),\n\n }\n\n }\n\n\n\n pub fn invoke_trigger(&self, name: &str, value: &Value) -> Result<Value, Error> {\n\n let client = client();\n\n let url = format!(\n", "file_path": "actions/common/src/types/context.rs", "rank": 39, "score": 80986.295601255 }, { "content": " match config {\n\n Some(config) => config.host.clone(),\n\n None => \"host.docker.internal\".to_string(),\n\n }\n\n };\n\n let name = if env::var(\"__OW_ACTION_NAME\").is_ok() {\n\n env::var(\"__OW_ACTION_NAME\").unwrap()\n\n } else {\n\n match config {\n\n Some(config) => config.name.clone(),\n\n None => \"action\".to_string(),\n\n }\n\n };\n\n let namespace = if env::var(\"__OW_NAMESPACE\").is_ok() {\n\n env::var(\"__OW_NAMESPACE\").unwrap()\n\n } else {\n\n match config {\n\n Some(config) => config.namespace.clone(),\n\n None => \"guest\".to_string(),\n\n }\n", "file_path": "actions/common/src/types/context.rs", "rank": 40, "score": 80985.79232359538 }, { "content": " match self.db.update(doc, id, rev).send() {\n\n Ok(r) => return Ok(r.id),\n\n Err(err) => {\n\n return Err(format!(\"error updating document {}: {:?}\", doc, err))\n\n .map_err(serde::de::Error::custom)\n\n }\n\n };\n\n }\n\n\n\n pub fn get_auth_key(&self) -> String {\n\n format!(\"{}:{}\", self.user, self.pass)\n\n }\n\n\n\n pub fn insert_document(&self, doc: &Value, id: Option<String>) -> Result<String, Error> {\n\n match self.db.insert(doc, id).send() {\n\n Ok(r) => return Ok(r.id),\n\n Err(err) => {\n\n return Err(format!(\"error creating document {}: {:?}\", doc, err))\n\n .map_err(serde::de::Error::custom)\n\n }\n", "file_path": "actions/common/src/types/context.rs", "rank": 41, "score": 80985.58243803072 }, { "content": " config.namespace, topic\n\n )))\n\n .respond_with(ResponseTemplate::new(200))\n\n .mount(&mock_server)\n\n .await;\n\n config.host = mock_server.uri();\n\n sleep(Duration::from_millis(5000)).await;\n\n let url = format!(\"http://admin:password@localhost:{}\", couchdb.port());\n\n let context = Context::new(connect_db(&url, &\"test\".to_string()), Some(&config));\n\n context.create_rule(&topic, \"trigger\", \"action\").unwrap();\n\n let received_requests = mock_server.received_requests().await;\n\n assert!(received_requests.is_some());\n\n couchdb.delete().await.expect(\"Stopping Container Failed\");\n\n }\n\n\n\n #[tokio::test(flavor = \"multi_thread\")]\n\n async fn invoke_trigger_pass() {\n\n let mut config = Config::new();\n\n let couchdb = CouchDB::new(\"admin\".to_string(), \"password\".to_string())\n\n .await\n", "file_path": "actions/common/src/types/context.rs", "rank": 42, "score": 80984.74006413227 }, { "content": " if !db.exists().unwrap() {\n\n db.create().unwrap();\n\n }\n\n db\n\n }\n\n\n\n #[tokio::test]\n\n async fn update_document_pass() {\n\n let topic = \"1234\".to_string();\n\n let config = Config::new();\n\n let couchdb = CouchDB::new(\"admin\".to_string(), \"password\".to_string())\n\n .await\n\n .unwrap();\n\n sleep(Duration::from_millis(5000)).await;\n\n let url = format!(\"http://admin:password@localhost:{}\", couchdb.port());\n\n let context = Context::new(connect_db(&url, &\"test\".to_string()), Some(&config));\n\n context\n\n .insert_document(\n\n &serde_json::json!({\n\n \"filters\": {}\n", "file_path": "actions/common/src/types/context.rs", "rank": 43, "score": 80983.86646371814 }, { "content": " let context = Context::new(connect_db(&url, &\"test\".to_string()), Some(&config));\n\n context\n\n .create_trigger(&topic, &serde_json::json!({}))\n\n .unwrap();\n\n let received_requests = mock_server.received_requests().await;\n\n assert!(received_requests.is_some());\n\n couchdb.delete().await.expect(\"Stopping Container Failed\");\n\n }\n\n\n\n #[tokio::test(flavor = \"multi_thread\")]\n\n async fn create_rule_pass() {\n\n let topic = \"1234\".to_string();\n\n let mut config = Config::new();\n\n let couchdb = CouchDB::new(\"admin\".to_string(), \"password\".to_string())\n\n .await\n\n .unwrap();\n\n let mock_server = MockServer::start().await;\n\n Mock::given(method(\"PUT\"))\n\n .and(path(format!(\n\n \"/api/v1/namespaces/{}/rules/{}\",\n", "file_path": "actions/common/src/types/context.rs", "rank": 44, "score": 80983.76992542685 }, { "content": " pub namespace: String,\n\n}\n\n\n\npub struct Context {\n\n pub host: String,\n\n pub name: String,\n\n pub namespace: String,\n\n db: Database,\n\n user: String,\n\n pass: String,\n\n}\n\n\n\n#[cfg(not(test))]\n", "file_path": "actions/common/src/types/context.rs", "rank": 45, "score": 80982.5612041057 }, { "content": " .map_err(serde::de::Error::custom)?;\n\n match response.status().is_success() {\n\n true => Ok(serde_json::json!({\n\n \"success\": true\n\n })),\n\n false => Err(format!(\n\n \"failed to create rule {} {:?}\",\n\n name,\n\n response.error_for_status()\n\n ))\n\n .map_err(serde::de::Error::custom),\n\n }\n\n }\n\n\n\n // TODO: Fix return\n\n pub fn create_trigger(&self, name: &str, value: &Value) -> Result<Value, Error> {\n\n let client = client();\n\n let url = format!(\n\n \"{}/api/v1/namespaces/{}/triggers/{}?overwrite=true\",\n\n self.host, self.namespace, name\n", "file_path": "actions/common/src/types/context.rs", "rank": 46, "score": 80981.84231235819 }, { "content": "\n\n #[tokio::test(flavor = \"multi_thread\")]\n\n async fn create_trigger_pass() {\n\n let topic = \"1234\".to_string();\n\n let mut config = Config::new();\n\n let couchdb = CouchDB::new(\"admin\".to_string(), \"password\".to_string())\n\n .await\n\n .unwrap();\n\n let mock_server = MockServer::start().await;\n\n Mock::given(method(\"PUT\"))\n\n .and(path(format!(\n\n \"/api/v1/namespaces/{}/triggers/{}\",\n\n config.namespace, topic\n\n )))\n\n .respond_with(ResponseTemplate::new(200))\n\n .mount(&mock_server)\n\n .await;\n\n config.host = mock_server.uri();\n\n sleep(Duration::from_millis(5000)).await;\n\n let url = format!(\"http://admin:password@localhost:{}\", couchdb.port());\n", "file_path": "actions/common/src/types/context.rs", "rank": 47, "score": 80980.85673992187 }, { "content": " \"{}/api/v1/namespaces/{}/triggers/{}?result=true\",\n\n self.host, self.namespace, name\n\n );\n\n let response = invoke_client(\n\n client\n\n .post(url.clone())\n\n .basic_auth(self.user.clone(), Some(self.pass.clone()))\n\n .json(value),\n\n )\n\n .map_err(serde::de::Error::custom)?;\n\n match response.status().is_success() {\n\n true => Ok(serde_json::json!({\n\n \"success\": true\n\n })),\n\n false => Err(format!(\n\n \"failed to invoke trigger {} {:?}\",\n\n name,\n\n response.error_for_status()\n\n ))\n\n .map_err(serde::de::Error::custom),\n", "file_path": "actions/common/src/types/context.rs", "rank": 48, "score": 80979.79334993985 }, { "content": " return response.json().map_err(serde::de::Error::custom);\n\n\n\n #[cfg(test)]\n\n {\n\n let handle = Handle::current();\n\n return tokio::task::block_in_place(move || {\n\n handle.block_on(async { response.json().await })\n\n })\n\n .map_err(serde::de::Error::custom);\n\n }\n\n }\n\n _ => Err(format!(\"error fetching list {}\", db_name))\n\n .map_err(serde::de::Error::custom),\n\n };\n\n };\n\n\n\n Err(format!(\"error fetching list {}\", db_name)).map_err(serde::de::Error::custom)\n\n }\n\n}\n\n\n", "file_path": "actions/common/src/types/context.rs", "rank": 49, "score": 80978.68127308498 }, { "content": " .unwrap();\n\n let mock_server = MockServer::start().await;\n\n Mock::given(method(\"POST\"))\n\n .and(path(format!(\n\n \"/api/v1/namespaces/{}/triggers/{}\",\n\n config.namespace, \"trigger\"\n\n )))\n\n .respond_with(ResponseTemplate::new(200))\n\n .mount(&mock_server)\n\n .await;\n\n config.host = mock_server.uri();\n\n sleep(Duration::from_millis(5000)).await;\n\n let url = format!(\"http://admin:password@localhost:{}\", couchdb.port());\n\n let context = Context::new(connect_db(&url, &\"test\".to_string()), Some(&config));\n\n context\n\n .invoke_trigger(\"trigger\", &serde_json::json!({}))\n\n .unwrap();\n\n let received_requests = mock_server.received_requests().await;\n\n assert!(received_requests.is_some());\n\n couchdb.delete().await.expect(\"Stopping Container Failed\");\n\n }\n\n}\n", "file_path": "actions/common/src/types/context.rs", "rank": 50, "score": 80977.87177041765 }, { "content": "mod message;\n\npub use message::Message;\n", "file_path": "actions/push-notification/src/types/mod.rs", "rank": 51, "score": 80891.02528443836 }, { "content": "mod source;\n\npub use source::Source;", "file_path": "actions/event-registration/src/types/mod.rs", "rank": 52, "score": 80891.02528443835 }, { "content": "mod event;\n\npub use event::Event;", "file_path": "actions/event-receiver/src/types/mod.rs", "rank": 53, "score": 80891.02528443835 }, { "content": "use super::Topic;\n\nuse serde_derive::{Deserialize, Serialize};\n\nuse serde_json::{from_str, Error};\n\n\n\npub type Payload = Vec<(Deposit, Topic)>;\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Message {\n\n pub topic: String,\n\n pub value: String,\n\n}\n\n\n\n//TODO: Change\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Deposit {\n\n #[serde(rename = \"to\")]\n\n pub address: String,\n\n #[serde(rename = \"value\")]\n\n pub amount: String,\n\n}\n\n\n\nimpl Message {\n\n pub fn parse_value(&self) -> Result<Deposit, Error> {\n\n from_str(&self.value)\n\n }\n\n}\n", "file_path": "actions/balance-filter/src/types/message.rs", "rank": 54, "score": 80338.49602717538 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Source {\n\n name: String,\n\n topic: String,\n\n trigger: String,\n\n}\n", "file_path": "actions/balance-filter/src/types/source.rs", "rank": 55, "score": 80327.79528530699 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Body {\n\n pub topic: String,\n\n pub token: String,\n\n pub address: String,\n\n}\n", "file_path": "actions/balance-notification-registration/src/types/body.rs", "rank": 56, "score": 78605.44436579036 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Topic {\n\n #[serde(rename = \"_id\")]\n\n pub id: String,\n\n #[serde(rename = \"_rev\")]\n\n pub rev: String,\n\n pub filters: HashMap<String, Address>\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Address {\n\n pub token: String\n\n}", "file_path": "actions/balance-notification-registration/src/types/topic.rs", "rank": 57, "score": 78569.96887655628 }, { "content": "mod body;\n\nmod topic;\n\nmod response;\n\npub use body::Body;\n\npub use topic::{Topic, Address};\n\npub use response::Response;", "file_path": "actions/balance-notification-registration/src/types/mod.rs", "rank": 58, "score": 78407.20345644186 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct Event {\n\n section: String,\n\n method: String,\n\n data: Vec<HashMap<String, String>>,\n\n}\n\n\n", "file_path": "actions/substrate-event-processor/src/lib.rs", "rank": 59, "score": 69877.94943523398 }, { "content": "const WorkboxWebpackPlugin = require('workbox-webpack-plugin')\n\nconst path = require('path');\n\nmodule.exports = function override(webpackConfig) {\n\n webpackConfig.module.rules.push({\n\n test: /\\.mjs$/,\n\n include: /node_modules/,\n\n type: \"javascript/auto\"\n\n });\n\n webpackConfig.plugins.push(new WorkboxWebpackPlugin.InjectManifest({\n\n swSrc: path.join(process.cwd(), '/src/sw.js'),\n\n swDest: 'firebase-messaging-sw.js',\n\n exclude: [\n\n /\\.map$/,\n\n /manifest$/,\n\n /\\.htaccess$/,\n\n /firebase-messaging-sw\\.js$/,\n\n /sw\\.js$/,\n\n ]\n\n }));\n\n\n\n return webpackConfig;\n", "file_path": "examples/substrate-push-notification/config-overrides.js", "rank": 60, "score": 60997.46611502485 }, { "content": "export const config = {\n\n apiKey: \"xxxxxx\",\n\n authDomain: \"xxxxxx\",\n\n projectId: \"xxxxxx\",\n\n storageBucket: \"xxxxxx\",\n\n messagingSenderId: \"xxxxxx\",\n\n appId: \"xxxxxx\",\n\n measurementId: \"xxxxxx\"\n", "file_path": "examples/substrate-push-notification/src/config/firebase.js", "rank": 61, "score": 58634.09031403881 }, { "content": "const config = { ...configCommon, ...configEnv, ...envVars, types, firebase };\n", "file_path": "examples/substrate-push-notification/src/config/index.js", "rank": 62, "score": 58634.09031403881 }, { "content": "#[cfg(feature = \"mock_containers\")]\n\nmod couchdb_test_container;\n\n\n\n\n\n#[cfg(feature = \"mock_containers\")]\n\npub mod mock_containers {\n\n pub use super::couchdb_test_container::CouchDB;\n\n}", "file_path": "actions/common/src/mock/mod.rs", "rank": 63, "score": 54245.01233893488 }, { "content": "extern crate serde_json;\n\n\n\nuse serde_derive::{Deserialize, Serialize};\n\nuse serde_json::{Error, Value};\n\nmod types;\n\nuse actions_common::Context;\n\n\n\n#[cfg(test)]\n\nuse actions_common::Config;\n\n\n\nuse chesterfield::sync::{Client, Database};\n\nuse types::{Message, Payload};\n\n\n\n#[cfg(test)]\n\nuse types::{Address, Deposit, Topic};\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 64, "score": 53673.396746393264 }, { "content": " let db = self.connect_db(&self.params.db_url, &self.params.db_name);\n\n self.context = Some(Context::new(db, None));\n\n }\n\n\n\n fn connect_db(&self, db_url: &str, db_name: &str) -> Database {\n\n let client = Client::new(db_url).unwrap();\n\n let db = client.database(db_name).unwrap();\n\n if !db.exists().unwrap() {\n\n db.create().unwrap();\n\n }\n\n db\n\n }\n\n\n\n pub fn get_context(&mut self) -> &Context {\n\n self.context.as_mut().expect(\"Action not Initialized!\")\n\n }\n\n\n\n fn filter_topics(&mut self) -> Payload {\n\n let mut payload = vec![];\n\n for message in self.params.messages.clone().iter() {\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 65, "score": 53669.32671560593 }, { "content": " \"filters\": {}\n\n }),\n\n Some(topic.clone()),\n\n )\n\n .unwrap();\n\n\n\n let mut doc: Topic =\n\n serde_json::from_value(action.get_context().get_document(&topic).unwrap()).unwrap();\n\n\n\n doc.filters.insert(\n\n \"15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw\".to_string(),\n\n Address {\n\n token: \"1\".to_string(),\n\n },\n\n );\n\n\n\n action\n\n .get_context()\n\n .update_document(\n\n &topic,\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 66, "score": 53662.08724723886 }, { "content": " })).unwrap();\n\n\n\n let mut action = Action::new(input);\n\n action.init(&config);\n\n\n\n action\n\n .get_context()\n\n .insert_document(\n\n &serde_json::json!({\n\n \"filters\": {}\n\n }),\n\n Some(topic.clone()),\n\n )\n\n .unwrap();\n\n\n\n let mut doc: Topic =\n\n serde_json::from_value(action.get_context().get_document(&topic).unwrap()).unwrap();\n\n\n\n doc.filters.insert(\n\n \"15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw\".to_string(),\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 67, "score": 53661.506022679365 }, { "content": " &serde_json::json!({\n\n \"filters\": {}\n\n }),\n\n Some(topic.clone()),\n\n )\n\n .unwrap();\n\n\n\n let mut doc: Topic =\n\n serde_json::from_value(action.get_context().get_document(&topic).unwrap()).unwrap();\n\n\n\n doc.filters.insert(\n\n \"15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw\".to_string(),\n\n Address {\n\n token: \"1\".to_string(),\n\n },\n\n );\n\n\n\n action\n\n .get_context()\n\n .update_document(\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 68, "score": 53661.47285009567 }, { "content": " Address {\n\n token: \"1\".to_string(),\n\n },\n\n );\n\n\n\n action\n\n .get_context()\n\n .update_document(\n\n &topic,\n\n &doc.rev,\n\n &serde_json::to_value(doc.clone()).unwrap(),\n\n )\n\n .unwrap();\n\n let doc: Topic =\n\n serde_json::from_value(action.get_context().get_document(&topic).unwrap()).unwrap();\n\n let expected = vec![(\n\n Deposit {\n\n address: \"15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw\".to_string(),\n\n amount: \"100.0000\".to_string(),\n\n },\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 69, "score": 53660.96717763624 }, { "content": " pub fn invoke_trigger(&mut self, payload: Payload) -> Result<Value, Error> {\n\n let mut failed_triggers = vec![];\n\n for message in payload.iter() {\n\n let trigger = self.params.push_notification_trigger.clone();\n\n // TODO: Add attributes neccessary for push notification trigger\n\n if self\n\n .get_context()\n\n .invoke_trigger(\n\n &trigger,\n\n &serde_json::json!({\n\n \"token\": message.1.filters.get(&message.0.address).unwrap().token,\n\n \"message\": {\n\n \"title\": \"Amount Recieved!\",\n\n \"body\": message.0.amount\n\n }\n\n }),\n\n )\n\n .is_err()\n\n {\n\n failed_triggers.push(message.0.address.clone());\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 70, "score": 53660.90823504824 }, { "content": " topic: \"1234\".to_string(),\n\n value: serde_json::json!({\n\n \"from\": \"12o3hWM94g5EoNkEiPibo7WMToM6gKvL8osJCGht9W79iEpf\",\n\n \"to\":\"15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw\",\n\n \"value\": \"100.0000\".to_string(),\n\n })\n\n .to_string(),\n\n },\n\n ];\n\n let mut action = Action::new(Input {\n\n push_notification_trigger: \"push_notification\".to_string(),\n\n db_url: url,\n\n db_name: \"test\".to_string(),\n\n messages,\n\n });\n\n action.init(&config);\n\n\n\n action\n\n .get_context()\n\n .insert_document(\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 71, "score": 53659.92275820885 }, { "content": " Topic {\n\n id: doc.id,\n\n rev: doc.rev,\n\n filters: doc.filters,\n\n },\n\n )];\n\n let filtered_topics = action.filter_topics();\n\n assert_eq!(action.filter_address(filtered_topics), expected);\n\n couchdb.delete().await.expect(\"Stopping Container Failed\");\n\n }\n\n\n\n // TODO: This panic because of reqwest blocking in tokio runtime context. Should Add sync or async context.\n\n #[ignore]\n\n #[should_panic]\n\n #[tokio::test(flavor = \"multi_thread\")]\n\n async fn invoke_trigger_pass() {\n\n let config = Config::new();\n\n let couchdb = CouchDB::new(\"admin\".to_string(), \"password\".to_string())\n\n .await\n\n .unwrap();\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 72, "score": 53659.35965760059 }, { "content": " &topic,\n\n &doc.rev,\n\n &serde_json::to_value(doc.clone()).unwrap(),\n\n )\n\n .unwrap();\n\n let doc: Topic =\n\n serde_json::from_value(action.get_context().get_document(&topic).unwrap()).unwrap();\n\n let expected = vec![(\n\n Deposit {\n\n address: \"15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw\".to_string(),\n\n amount: \"100.0000\".to_string(),\n\n },\n\n Topic {\n\n id: doc.id,\n\n rev: doc.rev,\n\n filters: doc.filters,\n\n },\n\n )];\n\n assert_eq!(action.filter_topics(), expected);\n\n couchdb.delete().await.expect(\"Stopping Container Failed\");\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 73, "score": 53658.74601659819 }, { "content": " sleep(Duration::from_millis(5000)).await;\n\n let url = format!(\"http://admin:password@localhost:{}\", couchdb.port());\n\n let topic = \"1234\".to_string();\n\n let input = serde_json::from_value::<Input>(serde_json::json!({\n\n \"push_notification_trigger\": \"test\",\n\n \"db_name\": \"test\",\n\n \"db_url\": url,\n\n \"messages\": [{\n\n \"topic\":\"418a8b8c-02b8-11ec-9a03-0242ac130003\",\n\n \"value\": \"{\\\"from\\\":\\\"12o3hWM94g5EoNkEiPibo7WMToM6gKvL8osJCGht9W79iEpf\\\",\\\"to\\\":\\\"15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw\\\",\\\"value\\\":1000}\"\n\n }]\n\n })).unwrap();\n\n \n\n let mut action = Action::new(input);\n\n action.init(&config);\n\n\n\n action\n\n .get_context()\n\n .insert_document(\n\n &serde_json::json!({\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 74, "score": 53658.365412737796 }, { "content": " &doc.rev,\n\n &serde_json::to_value(doc.clone()).unwrap(),\n\n )\n\n .unwrap();\n\n let filtered_topics = action.filter_topics();\n\n assert_eq!(\n\n action\n\n .invoke_trigger(action.filter_address(filtered_topics))\n\n .unwrap(),\n\n serde_json::json!({})\n\n );\n\n couchdb.delete().await.expect(\"Stopping Container Failed\");\n\n }\n\n}\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 75, "score": 53654.2453739794 }, { "content": " }\n\n\n\n #[tokio::test]\n\n async fn filter_address_pass() {\n\n let config = Config::new();\n\n let couchdb = CouchDB::new(\"admin\".to_string(), \"password\".to_string())\n\n .await\n\n .unwrap();\n\n sleep(Duration::from_millis(5000)).await;\n\n let url = format!(\"http://admin:password@localhost:{}\", couchdb.port());\n\n let topic = \"418a8b8c-02b8-11ec-9a03-0242ac130003\".to_string();\n\n let messages = vec![\n\n Message {\n\n topic: \"418a8b8c-02b8-11ec-9a03-0242ac130003\".to_string(),\n\n value: serde_json::json!({\n\n \"from\": \"12o3hWM94g5EoNkEiPibo7WMToM6gKvL8osJCGht9W79iEpf\",\n\n \"to\":\"12o3hWM94g5EoNkEiPibo7WMToM6gKvL8osJCGht9W79iEpf\",\n\n \"value\": \"100.0000\".to_string(),\n\n })\n\n .to_string(),\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 76, "score": 53653.299626279244 }, { "content": " #[tokio::test]\n\n async fn filter_topics_pass() {\n\n let config = Config::new();\n\n let couchdb = CouchDB::new(\"admin\".to_string(), \"password\".to_string())\n\n .await\n\n .unwrap();\n\n sleep(Duration::from_millis(5000)).await;\n\n let url = format!(\"http://admin:password@localhost:{}\", couchdb.port());\n\n let topic = \"1234\".to_string();\n\n let messages = vec![\n\n Message {\n\n topic: \"mytopic\".to_string(),\n\n value: serde_json::json!({\n\n \"from\": \"12o3hWM94g5EoNkEiPibo7WMToM6gKvL8osJCGht9W79iEpf\",\n\n \"to\":\"15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw\",\n\n \"value\": \"100.0000\".to_string(),\n\n })\n\n .to_string(),\n\n },\n\n Message {\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 77, "score": 53652.94563038514 }, { "content": " if let Ok(topic) = self.get_context().get_document(&message.topic) {\n\n message.parse_value().unwrap();\n\n if message.parse_value().is_ok() {\n\n payload.push((\n\n message.parse_value().unwrap(),\n\n serde_json::from_value(topic).unwrap(),\n\n ))\n\n }\n\n }\n\n }\n\n payload\n\n }\n\n\n\n fn filter_address(&self, payload: Payload) -> Payload {\n\n payload\n\n .into_iter()\n\n .filter(|message| message.1.filters.contains_key(&message.0.address))\n\n .collect()\n\n }\n\n\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 78, "score": 53652.4237845589 }, { "content": " },\n\n Message {\n\n topic: \"418a8b8c-02b8-11ec-9a03-0242ac130003\".to_string(),\n\n value: serde_json::json!({\n\n \"from\": \"12o3hWM94g5EoNkEiPibo7WMToM6gKvL8osJCGht9W79iEpf\",\n\n \"to\":\"15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw\",\n\n \"value\": \"100.0000\".to_string(),\n\n })\n\n .to_string(),\n\n },\n\n ];\n\n\n\n let input = serde_json::from_value::<Input>(serde_json::json!({\n\n \"push_notification_trigger\": \"push_notification\",\n\n \"db_name\": \"test\",\n\n \"db_url\": url,\n\n \"messages\": [{\n\n \"topic\":\"418a8b8c-02b8-11ec-9a03-0242ac130003\",\n\n \"value\": \"{\\\"from\\\":\\\"12o3hWM94g5EoNkEiPibo7WMToM6gKvL8osJCGht9W79iEpf\\\",\\\"to\\\":\\\"15ss3TDX2NLG31ugk6QN5zHhq2MUfiaPhePSjWwht6Dr9RUw\\\",\\\"value\\\":\\\"100.0000\\\"}\"\n\n }]\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 79, "score": 53652.236616485265 }, { "content": " }\n\n }\n\n if !failed_triggers.is_empty() {\n\n return Err(format!(\"error in triggers {}\", failed_triggers.join(\", \")))\n\n .map_err(serde::de::Error::custom);\n\n }\n\n Ok(serde_json::json!({\n\n \"action\": \"success\"\n\n }))\n\n }\n\n}\n\n\n", "file_path": "actions/balance-filter/src/lib.rs", "rank": 80, "score": 53647.2297197267 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Trigger {\n\n pub name: String,\n\n pub url: String\n\n}\n\n\n\nimpl Trigger {\n\n pub fn new(name: String, url: String) -> Self {\n\n Trigger{ name, url }\n\n }\n\n}", "file_path": "actions/common/src/types/trigger.rs", "rank": 81, "score": 52840.60289569611 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Source {\n\n name: String,\n\n trigger: String\n\n}\n\n\n\nimpl Source {\n\n pub fn new(name: String, trigger: String) -> Self {\n\n Source { name, trigger }\n\n }\n\n}\n", "file_path": "actions/event-registration/src/types/source.rs", "rank": 82, "score": 51159.065064177186 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Message {\n\n pub title: String,\n\n pub body: String,\n\n}\n", "file_path": "actions/push-notification/src/types/message.rs", "rank": 83, "score": 51158.48866860638 }, { "content": "// struct Spec {\n\n// url: String,\n\n// version: String\n\n// }\n\n\n\n// struct Field {\n\n\n\n// }\n\n\n\npub struct Event {\n\n // source: String,\n\n payload: String, // TODO: encoded\n\n // event_type: String,\n\n // time: String,\n\n // spec: Spec,\n\n // encoding: String, // for serializing and deserializing eg base64, SCALE, BORSH etc\n\n // fields: Vec<Field>\n\n}", "file_path": "actions/event-receiver/src/types/event.rs", "rank": 84, "score": 51150.226842038624 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Response {\n\n pub rows: Vec<Row>,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Row {\n\n doc: Doc,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n", "file_path": "actions/balance-notification-registration/src/types/response.rs", "rank": 85, "score": 49577.87672793259 }, { "content": "const types = require('./types.json');\n", "file_path": "examples/substrate-push-notification/src/config/index.js", "rank": 86, "score": 48688.5448245762 }, { "content": "# Configuration\n\n\n\nConfiguration values below are passed as parameters to the script.\n\n\n\n### Openwhisk API Host\n\n\n\nOpenwhisk API Endpoint to where the actions to be deployed\n\n\n\n| Parameter | Default Value |\n\n| :--- | :--- |\n\n| --openwhiskApiHost | `https://localhost:31001` |\n\n\n\n#### usage\n\n\n\n```text\n\n./deploy.sh --openwhiskApiHost https://localhost:31001\n\n```\n\n\n\n### \n\n\n\n### Openwhisk API Key\n\n\n\nOpenwhisk authentication key.\n\n\n\n| Parameter | Default Value |\n\n| :--- | :--- |\n\n| --openwhiskApiKey | 23bc46b1-71f6-4ed5-8c54-816aa4f8c502:123zO3xZCLrMN6v2BKK1dXYFpXlPkccOFqm12CdAsMgRU4VrNZ9lyGVCGuMDGIwP |\n\n\n\n#### usage\n\n\n\n```text\n\n./deploy.sh --openwhiskApiKey 23bc46b1-71f6-4ed5-8c54-816aa4f8c502:123zO3xZCLrMN6v2BKK1dXYFpXlPkccOFqm12CdAsMgRU4VrNZ9lyGVCGuMDGIwP\n\n```\n\n\n\n### \n\n\n\n### Openwhisk Namespace\n\n\n\nOrganization space where the actions to be deployed\n\n\n\n| Parameter | Default Value |\n\n| :--- | :--- |\n\n| --openwhiskNamespace | guest |\n\n\n\n#### usage\n\n\n\n```text\n\n./deploy.sh --openwhiskNamespace guest\n\n```\n\n\n\n###\n\n\n\n### Build Action\n\n\n\nProvide actions to be build, Accepts `none` or name of the actions separated by `,` to be build\n\n\n\n| Parameter | Default Value |\n\n| :--- | :--- |\n\n| --build | push-notification,balance-filter,event-receiver,event-registration,balance-notification-registration,event-producer,kafka-provider-feed,kafka-provider-web,substrate-event-processor |\n\n\n\n#### usage\n\n\n\n```text\n\n./deploy.sh --build push-notification\n\n```\n\n\n\n### Skip Build and Deploy Action\n\n\n\nProvide actions to be skip, Accepts name of the available actions separated by `,`\n\n\n\n| Parameter | Default Value |\n\n| :--- | :--- |\n\n| --skip | none |\n\n\n\n#### usage\n\n\n\n```text\n\n./deploy.sh --skip push-notification,balance-filter\n", "file_path": "docs/configuration.md", "rank": 87, "score": 45540.38772796422 }, { "content": "def validateParams(params):\n\n validatedParams = params.copy()\n\n requiredParams = ['brokers', 'topic', 'value']\n\n missingParams = []\n\n\n\n for requiredParam in requiredParams:\n\n if requiredParam not in params:\n\n missingParams.append(requiredParam)\n\n\n\n if len(missingParams) > 0:\n\n return (False, \"You must supply all of the following parameters: {}\".format(', '.join(missingParams)))\n\n\n\n if isinstance(params['brokers'], str):\n\n # turn it into a List\n\n validatedParams['brokers'] = params['brokers'].split(',')\n\n\n\n shuffle(validatedParams['brokers'])\n\n\n\n if 'base64DecodeValue' in params and params['base64DecodeValue'] == True:\n\n try:\n\n validatedParams['value'] = base64.b64decode(params['value']).decode('utf-8')\n\n except:\n\n return (False, \"value parameter is not Base64 encoded\")\n\n\n\n if len(validatedParams['value']) == 0:\n\n return (False, \"value parameter is not Base64 encoded\")\n\n\n\n if 'base64DecodeKey' in params and params['base64DecodeKey'] == True:\n\n try:\n\n validatedParams['key'] = base64.b64decode(params['key']).decode('utf-8')\n\n except:\n\n return (False, \"key parameter is not Base64 encoded\")\n\n\n\n if len(validatedParams['key']) == 0:\n\n return (False, \"key parameter is not Base64 encoded\")\n\n\n", "file_path": "actions/event-producer/event-producer.py", "rank": 88, "score": 45468.33888125497 }, { "content": "const useSubstrate = () => ({ ...useContext(SubstrateContext) });\n", "file_path": "examples/substrate-push-notification/src/substrate-lib/SubstrateContext.js", "rank": 89, "score": 44459.14253708321 }, { "content": "# Integration test\n\n\n\n## Steps\n\n1. While the Aurras system is up and running\n\n2. Make sure [wsk cli](https://github.com/apache/openwhisk-cli) is added to the path\n\n3. Install the [actions](../../../#installation)\n\n4. Register event source using the below command with name as param eg: --name polkadot-balance if connecting to polkadot\n\n\n\n```\n\n./register_event_source.sh --name polkadot-balance\n\n```\n\n5. Get the generated uuid and add as an environment variable to the substrate [event feed](../../../../?q=aurras-event-feed)\n\n6. Connect event feed with a Substrate based chain\n\n7. Navigate to examples/susbtrate-push-notification\n\n8. Add [API configuration](../examples/substrate-push-notification/#api-configuration) and [Firebase Push Notification Configuration](../examples/substrate-push-notification/#push-notification-configuration)\n\n9. Install Node Dependencies using `yarn install`\n\n10. Start susbtrate-push-notification using `yarn start`\n\n> For Brave brower enable `Use Google services for push messaging` using brave://settings/privacy\n\n11. Upon Notification Permission Prompt Click Allow\n\n![Allow Push Notification](../examples/substrate-push-notification/images/Screen-1.png)\n\n12. Select the account for which balance notification to be received \n\n13. Click Register Balance Notification button\n\n14. Select the Event Source\n\n##### Push Notification Token\n\n![Allow Push Notification](../examples/substrate-push-notification/images/Screen-2.png)\n\n15. Click Register\n\n![Allow Push Notification](../examples/substrate-push-notification/images/Screen-3.png)\n\n16. Perform an amount transfer transaction to the registered wallet.\n\n> Make sure substrate-push-notification app is not in foreground\n\n17. Verify the push notification received.\n\n![Allow Push Notification](../examples/substrate-push-notification/images/Screen-4.png)\n\n![Allow Push Notification](../examples/substrate-push-notification/images/Screen-5.png)\n", "file_path": "docs/integration-testing.md", "rank": 90, "score": 44410.37217346033 }, { "content": "import firebase from \"firebase\";\n\n\n\nexport const config = {\n\n apiKey: \"xxxxxx\",\n\n authDomain: \"xxxxxx\",\n\n projectId: \"xxxxxx\",\n\n storageBucket: \"xxxxxx\",\n\n messagingSenderId: \"xxxxxx\",\n\n appId: \"xxxxxx\",\n\n measurementId: \"xxxxxx\"\n\n}\n\n\n\nfirebase.initializeApp(config);\n", "file_path": "examples/substrate-push-notification/src/config/firebase.js", "rank": 91, "score": 40223.57538908167 }, { "content": "import configCommon from './common.json';\n\nimport firebase from './firebase';\n\n// Using `require` as `import` does not support dynamic loading (yet).\n\nconst configEnv = require(`./${process.env.NODE_ENV}.json`);\n\nconst types = require('./types.json');\n\n\n\n// Accepting React env vars and aggregating them into `config` object.\n\nconst envVarNames = [\n\n 'REACT_APP_PROVIDER_SOCKET',\n\n 'REACT_APP_DEVELOPMENT_KEYRING'\n\n];\n\nconst envVars = envVarNames.reduce((mem, n) => {\n\n // Remove the `REACT_APP_` prefix\n\n if (process.env[n] !== undefined) mem[n.slice(10)] = process.env[n];\n\n return mem;\n\n}, {});\n\n\n\nconst config = { ...configCommon, ...configEnv, ...envVars, types, firebase };\n\nexport default config;\n", "file_path": "examples/substrate-push-notification/src/config/index.js", "rank": 92, "score": 40223.57538908167 }, { "content": "// Forked from https://github.com/apache/openwhisk-package-kafka\n\n\n\n/*\n\n * Licensed to the Apache Software Foundation (ASF) under one or more\n\n * contributor license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright ownership.\n\n * The ASF licenses this file to You under the Apache License, Version 2.0\n\n * (the \"License\"); you may not use this file except in compliance with\n\n * the License. You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nconst common = require('./lib/common');\n\n\n\n/**\n\n * Feed to listen to Kafka messages\n\n * @param {string} brokers - array of Kafka brokers\n\n * @param {string} topic - topic to subscribe to\n\n * @param {bool} isJSONData - attempt to parse messages as JSON\n\n * @param {bool} isBinaryKey - encode key as Base64\n\n * @param {bool} isBinaryValue - encode message as Base64\n\n * @param {string} endpoint - address to OpenWhisk deployment (expected to be bound at deployment)\n\n */\n\nfunction main(params) {\n\n const endpoint = params.endpoint;\n\n const webActionName = params.web_action;\n\n\n\n var massagedParams = common.massageParamsForWeb(params);\n\n massagedParams.triggerName = common.getTriggerFQN(params.triggerName);\n\n\n\n if (params.lifecycleEvent === 'CREATE') {\n\n return common.createTrigger(endpoint, massagedParams, webActionName);\n\n } else if (params.lifecycleEvent === 'READ') {\n\n return common.getTrigger(endpoint, massagedParams, webActionName);\n\n } else if (params.lifecycleEvent === 'UPDATE') {\n\n return common.updateTrigger(endpoint, massagedParams, webActionName);\n\n } else if (params.lifecycleEvent === 'DELETE') {\n\n return common.deleteTrigger(endpoint, massagedParams, webActionName);\n\n }\n\n\n\n return {\n\n error: 'unsupported lifecycleEvent'\n\n };\n\n}\n\n\n\nexports.main = main;\n", "file_path": "actions/kafka-provider-feed/main.js", "rank": 93, "score": 37048.31388051941 }, { "content": "// Forked from https://github.com/apache/openwhisk-package-kafka\n\n\n\n/*\n\n * Licensed to the Apache Software Foundation (ASF) under one or more\n\n * contributor license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright ownership.\n\n * The ASF licenses this file to You under the Apache License, Version 2.0\n\n * (the \"License\"); you may not use this file except in compliance with\n\n * the License. You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nconst common = require('./lib/common');\n\nconst Database = require('./lib/Database');\n\nvar moment = require('moment');\n\n\n\n/**\n\n * Feed to listen to Kafka messages\n\n * @param {string} brokers - array of Kafka brokers\n\n * @param {string} topic - topic to subscribe to\n\n * @param {bool} isJSONData - attempt to parse messages as JSON\n\n * @param {bool} isBinaryKey - encode key as Base64\n\n * @param {bool} isBinaryValue - encode message as Base64\n\n * @param {string} endpoint - address to OpenWhisk deployment (expected to be bound at deployment)\n\n * @param {string} db_url - URL for the DB, must include authentication (expected to be bound at deployment)\n\n * @param {string} db_name - DB name (expected to be bound at deployment)\n\n */\n\nfunction main(params) {\n\n var promise = new Promise((resolve, reject) => {\n\n // hold off initializing this until definitely needed\n\n var db;\n\n\n\n if (params.__ow_method === \"post\") {\n\n var validatedParams;\n\n return validateParameters(params)\n\n .then(cleanParams => {\n\n validatedParams = cleanParams;\n\n\n\n console.log(`VALIDATED: ${JSON.stringify(validatedParams, null, 2)}`);\n\n db = new Database(params.db_url, params.db_name);\n\n\n\n // do these in parallel!\n\n return Promise.all([\n\n db.ensureTriggerIsUnique(validatedParams.triggerName),\n\n verifyTriggerAuth(validatedParams.triggerURL, params.authKey, true)\n\n ]);\n\n })\n\n .then(() => {\n\n var workers = (params.workers || []);\n\n return db.getTriggerAssignment(workers)\n\n })\n\n .then((worker) => {\n\n validatedParams['worker'] = worker;\n\n return db.recordTrigger(validatedParams);\n\n })\n\n .then(() => {\n\n console.log('successfully wrote the trigger');\n\n resolve(common.webResponse(200, validatedParams.uuid));\n\n })\n\n .catch(error => {\n\n console.log(`Failed to write the trigger ${error}`);\n\n\n\n // defaults to potentially be overridden\n\n var statusCode = 500;\n\n var body = error.toString();\n\n\n\n if(error.validationError) {\n\n statusCode = 400;\n\n body = error.validationError;\n\n } else if(error.authError) {\n\n statusCode = 401;\n\n body = error.authError;\n\n }\n\n\n\n resolve(common.webResponse(statusCode, body));\n\n });\n\n } else if (params.__ow_method === \"get\") {\n\n const triggerURL = common.getTriggerURL(params.endpoint, params.triggerName);\n\n\n\n return verifyTriggerAuth(triggerURL, params.authKey, true)\n\n .then(() => {\n\n db = new Database(params.db_url, params.db_name);\n\n return db.getTrigger(params.triggerName);\n\n })\n\n .then((triggerDoc) => {\n\n var body = {\n\n config: {\n\n triggerName: triggerDoc.triggerName,\n\n topic: triggerDoc.topic,\n\n isJSONData: triggerDoc.isJSONData,\n\n isBinaryValue: triggerDoc.isBinaryValue,\n\n isBinaryKey: triggerDoc.isBinaryKey,\n\n brokers: triggerDoc.brokers\n\n },\n\n status: {\n\n active: triggerDoc.status.active,\n\n dateChanged: moment(triggerDoc.status.dateChanged).utc().valueOf(),\n\n dateChangedISO: moment(triggerDoc.status.dateChanged).utc().format(),\n\n reason: triggerDoc.status.reason\n\n }\n\n }\n\n resolve(common.webResponse(200, body, 'application/json'));\n\n })\n\n .catch(error => {\n\n resolve(common.webResponse(500, error.toString()));\n\n });\n\n } else if (params.__ow_method === \"put\") {\n\n const triggerURL = common.getTriggerURL(params.endpoint, params.triggerName);\n\n\n\n return verifyTriggerAuth(triggerURL, params.authKey, true)\n\n .then(() => {\n\n db = new Database(params.db_url, params.db_name);\n\n return db.getTrigger(params.triggerName);\n\n })\n\n .then(triggerDoc => {\n\n if (!triggerDoc.status.active) {\n\n return resolve(common.webResponse(400, `${params.triggerName} cannot be updated because it is disabled`));\n\n }\n\n\n\n return common.performUpdateParameterValidation(params, triggerDoc)\n\n .then(updatedParams => {\n\n return db.disableTrigger(triggerDoc)\n\n .then(() => db.getTrigger(params.triggerName))\n\n .then(doc => db.updateTrigger(doc, updatedParams));\n\n });\n\n })\n\n .then(() => {\n\n console.log('successfully updated the trigger');\n\n resolve(common.webResponse(200, 'updated trigger'));\n\n })\n\n .catch(error => {\n\n console.log(`Failed to update trigger ${error}`);\n\n var statusCode = 500;\n\n var body = error.toString();\n\n\n\n if (error.validationError) {\n\n statusCode = 400;\n\n body = error.validationError;\n\n }\n\n resolve(common.webResponse(statusCode, body));\n\n });\n\n } else if (params.__ow_method === \"delete\") {\n\n const triggerURL = common.getTriggerURL(params.endpoint, params.triggerName);\n\n\n\n return verifyTriggerAuth(triggerURL, params.authKey, false)\n\n .then(() => {\n\n db = new Database(params.db_url, params.db_name);\n\n return db.deleteTrigger(params.triggerName);\n\n })\n\n .then(() => {\n\n console.log('successfully deleted the trigger');\n\n resolve(common.webResponse(200, 'deleted trigger'));\n\n })\n\n .catch(error => {\n\n console.log(`Failed to remove trigger ${error}`);\n\n resolve(common.webResponse(500, error.toString()));\n\n });\n\n } else {\n\n resolve(common.webResponse(400, 'unsupported lifecycleEvent'));\n\n }\n\n });\n\n\n\n return promise;\n\n}\n\n\n\nfunction validateParameters(rawParams) {\n\n var promise = new Promise((resolve, reject) => {\n\n var validatedParams;\n\n\n\n var commonValidationResult = common.performCommonParameterValidation(rawParams);\n\n if(commonValidationResult.validationError) {\n\n reject(commonValidationResult);\n\n return;\n\n } else {\n\n validatedParams = commonValidationResult.validatedParams;\n\n }\n\n\n\n // brokers\n\n if (rawParams.brokers) {\n\n validatedParams.brokers = common.validateBrokerParam(rawParams.brokers);\n\n if (!validatedParams.brokers) {\n\n reject( { validationError: \"You must supply a 'brokers' parameter as an array of Message Hub brokers.\" });\n\n return;\n\n }\n\n } else {\n\n reject( { validationError: \"You must supply a 'brokers' parameter.\" });\n\n return;\n\n }\n\n\n\n validatedParams.isMessageHub = false;\n\n\n\n resolve(validatedParams);\n\n });\n\n\n\n return promise;\n\n}\n\n\n\nfunction verifyTriggerAuth(triggerURL, apiKey, rejectNotFound) {\n\n var auth = apiKey.split(':');\n\n return common.verifyTriggerAuth(triggerURL, { user: auth[0], pass: auth[1] }, rejectNotFound);\n\n}\n\n\n\nexports.main = main;\n", "file_path": "actions/kafka-provider-web/main.js", "rank": 94, "score": 37048.31388051941 }, { "content": "// Forked from https://github.com/apache/openwhisk-package-kafka\n\n\n\n/*\n\n * Licensed to the Apache Software Foundation (ASF) under one or more\n\n * contributor license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright ownership.\n\n * The ASF licenses this file to You under the Apache License, Version 2.0\n\n * (the \"License\"); you may not use this file except in compliance with\n\n * the License. You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\n// constructor for DB object - a thin, promise-loving wrapper around nano\n\nmodule.exports = function(dbURL, dbName) {\n\n var nano = require('nano')(dbURL);\n\n this.db = nano.db.use(dbName);\n\n\n\n const designDoc = \"filters\";\n\n const assignmentView = \"by-worker\";\n\n\n\n this.getTrigger = function(triggerFQN) {\n\n return new Promise((resolve, reject) => {\n\n this.db.get(triggerFQN, (err, result) => {\n\n if(err) {\n\n reject(err);\n\n } else {\n\n resolve(result);\n\n }\n\n });\n\n });\n\n };\n\n\n\n this.ensureTriggerIsUnique = function(triggerFQN) {\n\n return this.getTrigger(this.db, triggerFQN)\n\n .then(result => {\n\n return Promise.reject('Trigger already exists');\n\n })\n\n .catch(err => {\n\n // turn that frown upside-down!\n\n return true;\n\n });\n\n };\n\n\n\n this.recordTrigger = function(params) {\n\n console.log('recording trigger');\n\n\n\n params['_id'] = params.triggerName;\n\n params['status'] = {\n\n 'active': true,\n\n 'dateChanged': Date.now()\n\n };\n\n console.log(params);\n\n return new Promise((resolve, reject) => {\n\n this.db.insert(params, (err, result) => {\n\n if(err) {\n\n if(err.statusCode && err.statusCode === 409) {\n\n this.getTrigger(params.triggerName)\n\n .then(doc => this.disableTrigger(doc))\n\n .then(() => this.getTrigger(params.triggerName))\n\n .then(doc => this.updateTrigger(params, {_rev: doc._rev}))\n\n .then(result => resolve(result))\n\n .catch(err => reject(err));\n\n } else {\n\n reject(err);\n\n }\n\n } else {\n\n resolve(result);\n\n }\n\n });\n\n });\n\n };\n\n\n\n this.deleteTrigger = function(triggerFQN) {\n\n return this.getTrigger(triggerFQN)\n\n .then(doc => {\n\n return new Promise((resolve, reject) => {\n\n this.db.destroy(doc._id, doc._rev, (err, result) => {\n\n if(err) {\n\n reject(err);\n\n } else {\n\n resolve(result);\n\n }\n\n });\n\n });\n\n })\n\n };\n\n\n\n this.getTriggerAssignment = function(workers) {\n\n\n\n return new Promise((resolve, reject) => {\n\n var assignment = workers[0] || 'worker0';\n\n\n\n if (workers.length > 1) {\n\n this.db.view(designDoc, assignmentView, {group: true}, (err, result) => {\n\n if (err) {\n\n reject(err);\n\n } else {\n\n // a map between available workers and their number of assigned triggers\n\n // values will be populated with the results of the assignment view\n\n var counter = {};\n\n workers.forEach(worker => {\n\n counter[worker] = 0;\n\n });\n\n\n\n // update counter values with the number of assigned triggers\n\n // for each worker\n\n result.rows.forEach(row => {\n\n if (row.key in counter) {\n\n counter[row.key] = row.value;\n\n }\n\n });\n\n\n\n // find which of the available workers has the least number of\n\n // assigned triggers\n\n for (availableWorker in counter) {\n\n if (counter[availableWorker] < counter[assignment]) {\n\n assignment = availableWorker;\n\n }\n\n }\n\n resolve(assignment);\n\n }\n\n });\n\n } else {\n\n resolve(assignment);\n\n }\n\n });\n\n };\n\n\n\n this.disableTrigger = function(existing) {\n\n return new Promise((resolve, reject) => {\n\n var message = 'Automatically disabled trigger while updating';\n\n var status = {\n\n 'active': false,\n\n 'dateChanged': Date.now(),\n\n 'reason': {'kind': 'AUTO', 'statusCode': undefined, 'message': message}\n\n };\n\n existing.status = status;\n\n this.db.insert(existing, (err, result) => {\n\n if (err) {\n\n reject(err);\n\n } else {\n\n resolve(result);\n\n }\n\n });\n\n })\n\n };\n\n\n\n this.updateTrigger = function(existing, params) {\n\n for (var key in params) {\n\n if (params[key] !== undefined) {\n\n existing[key] = params[key];\n\n }\n\n }\n\n var status = {\n\n 'active': true,\n\n 'dateChanged': Date.now()\n\n };\n\n existing.status = status;\n\n\n\n return new Promise((resolve, reject) => {\n\n this.db.insert(existing, (err, result) => {\n\n if(err) {\n\n reject(err);\n\n } else {\n\n resolve(result);\n\n }\n\n });\n\n });\n\n };\n\n};\n", "file_path": "actions/kafka-provider-web/lib/Database.js", "rank": 95, "score": 36196.20913010317 }, { "content": "// Forked from https://github.com/apache/openwhisk-package-kafka\n\n\n\n/*\n\n * Licensed to the Apache Software Foundation (ASF) under one or more\n\n * contributor license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright ownership.\n\n * The ASF licenses this file to You under the Apache License, Version 2.0\n\n * (the \"License\"); you may not use this file except in compliance with\n\n * the License. You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\n// constructor for DB object - a thin, promise-loving wrapper around nano\n\nmodule.exports = function(dbURL, dbName) {\n\n var nano = require('nano')(dbURL);\n\n this.db = nano.db.use(dbName);\n\n\n\n const designDoc = \"filters\";\n\n const assignmentView = \"by-worker\";\n\n\n\n this.getTrigger = function(triggerFQN) {\n\n return new Promise((resolve, reject) => {\n\n this.db.get(triggerFQN, (err, result) => {\n\n if(err) {\n\n reject(err);\n\n } else {\n\n resolve(result);\n\n }\n\n });\n\n });\n\n };\n\n\n\n this.ensureTriggerIsUnique = function(triggerFQN) {\n\n return this.getTrigger(this.db, triggerFQN)\n\n .then(result => {\n\n return Promise.reject('Trigger already exists');\n\n })\n\n .catch(err => {\n\n // turn that frown upside-down!\n\n return true;\n\n });\n\n };\n\n\n\n this.recordTrigger = function(params) {\n\n console.log('recording trigger');\n\n\n\n params['_id'] = params.triggerName;\n\n params['status'] = {\n\n 'active': true,\n\n 'dateChanged': Date.now()\n\n };\n\n\n\n return new Promise((resolve, reject) => {\n\n this.db.insert(params, (err, result) => {\n\n if(err) {\n\n if(err.statusCode && err.statusCode === 409) {\n\n this.getTrigger(params.triggerName)\n\n .then(doc => this.disableTrigger(doc))\n\n .then(() => this.getTrigger(params.triggerName))\n\n .then(doc => this.updateTrigger(params, {_rev: doc._rev}))\n\n .then(result => resolve(result))\n\n .catch(err => reject(err));\n\n } else {\n\n reject(err);\n\n }\n\n } else {\n\n resolve(result);\n\n }\n\n });\n\n });\n\n };\n\n\n\n this.deleteTrigger = function(triggerFQN) {\n\n return this.getTrigger(triggerFQN)\n\n .then(doc => {\n\n return new Promise((resolve, reject) => {\n\n this.db.destroy(doc._id, doc._rev, (err, result) => {\n\n if(err) {\n\n reject(err);\n\n } else {\n\n resolve(result);\n\n }\n\n });\n\n });\n\n })\n\n };\n\n\n\n this.getTriggerAssignment = function(workers) {\n\n\n\n return new Promise((resolve, reject) => {\n\n var assignment = workers[0] || 'worker0';\n\n\n\n if (workers.length > 1) {\n\n this.db.view(designDoc, assignmentView, {group: true}, (err, result) => {\n\n if (err) {\n\n reject(err);\n\n } else {\n\n // a map between available workers and their number of assigned triggers\n\n // values will be populated with the results of the assignment view\n\n var counter = {};\n\n workers.forEach(worker => {\n\n counter[worker] = 0;\n\n });\n\n\n\n // update counter values with the number of assigned triggers\n\n // for each worker\n\n result.rows.forEach(row => {\n\n if (row.key in counter) {\n\n counter[row.key] = row.value;\n\n }\n\n });\n\n\n\n // find which of the available workers has the least number of\n\n // assigned triggers\n\n for (availableWorker in counter) {\n\n if (counter[availableWorker] < counter[assignment]) {\n\n assignment = availableWorker;\n\n }\n\n }\n\n resolve(assignment);\n\n }\n\n });\n\n } else {\n\n resolve(assignment);\n\n }\n\n });\n\n };\n\n\n\n this.disableTrigger = function(existing) {\n\n return new Promise((resolve, reject) => {\n\n var message = 'Automatically disabled trigger while updating';\n\n var status = {\n\n 'active': false,\n\n 'dateChanged': Date.now(),\n\n 'reason': {'kind': 'AUTO', 'statusCode': undefined, 'message': message}\n\n };\n\n existing.status = status;\n\n this.db.insert(existing, (err, result) => {\n\n if (err) {\n\n reject(err);\n\n } else {\n\n resolve(result);\n\n }\n\n });\n\n })\n\n };\n\n\n\n this.updateTrigger = function(existing, params) {\n\n for (var key in params) {\n\n if (params[key] !== undefined) {\n\n existing[key] = params[key];\n\n }\n\n }\n\n var status = {\n\n 'active': true,\n\n 'dateChanged': Date.now()\n\n };\n\n existing.status = status;\n\n\n\n return new Promise((resolve, reject) => {\n\n this.db.insert(existing, (err, result) => {\n\n if(err) {\n\n reject(err);\n\n } else {\n\n resolve(result);\n\n }\n\n });\n\n });\n\n };\n\n};\n", "file_path": "actions/kafka-provider-feed/lib/Database.js", "rank": 96, "score": 36196.20913010317 }, { "content": "// Forked from https://github.com/apache/openwhisk-package-kafka\n\n\n\n/*\n\n * Licensed to the Apache Software Foundation (ASF) under one or more\n\n * contributor license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright ownership.\n\n * The ASF licenses this file to You under the Apache License, Version 2.0\n\n * (the \"License\"); you may not use this file except in compliance with\n\n * the License. You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nconst request = require('request-promise');\n\n\n\nfunction triggerComponents(triggerName) {\n\n var split = triggerName.split(\"/\");\n\n\n\n return {\n\n namespace: split[1],\n\n triggerName: split[2]\n\n };\n\n}\n\n\n\nfunction addHTTPS(url) {\n\n if (!/^https?\\:\\/\\//.test(url)) {\n\n url = \"https://\" + url;\n\n }\n\n return url;\n\n}\n\n\n\nfunction getTriggerURL(endpoint, triggerName) {\n\n var apiHost = addHTTPS(endpoint);\n\n\n\n var components = triggerComponents(triggerName);\n\n var namespace = components.namespace;\n\n var trigger = components.triggerName;\n\n\n\n var url = `${apiHost}/api/v1/namespaces/${encodeURIComponent(namespace)}/triggers/${encodeURIComponent(trigger)}`;\n\n\n\n return url;\n\n}\n\n\n\nfunction verifyTriggerAuth(triggerURL, auth, rejectNotFound) {\n\n var options = {\n\n method: 'GET',\n\n url: triggerURL,\n\n rejectUnauthorized: false,\n\n headers: {\n\n 'Content-Type': 'application/json',\n\n 'User-Agent': 'whisk'\n\n },\n\n auth: auth\n\n };\n\n\n\n return request(options)\n\n .catch(err => {\n\n if (err.statusCode && err.statusCode === 404 && !rejectNotFound) {\n\n return Promise.resolve()\n\n } else {\n\n console.log(`Trigger auth error: ${JSON.stringify(err)}`);\n\n return Promise.reject({ authError: 'You are not authorized for this trigger.' });\n\n }\n\n });\n\n}\n\n\n\nfunction validateBrokerParam(brokerParam) {\n\n if (isNonEmptyArray(brokerParam)) {\n\n return brokerParam;\n\n } else if (typeof brokerParam === 'string') {\n\n return brokerParam.split(',');\n\n } else {\n\n return undefined;\n\n }\n\n}\n\n\n\nfunction getBooleanFromArgs(args, key) {\n\n return (typeof args[key] !== 'undefined' && args[key] && (args[key] === true || args[key].toString().trim().toLowerCase() === 'true'));\n\n}\n\n\n\nfunction isNonEmptyArray(obj) {\n\n return obj && Array.isArray(obj) && obj.length !== 0;\n\n}\n\n\n\n// Return the trigger FQN with the resolved namespace\n\n// This is required to avoid naming conflicts when using the default namespace \"_\"\n\nfunction getTriggerFQN(triggerName) {\n\n var components = triggerName.split('/');\n\n return `/${process.env['__OW_NAMESPACE']}/${components[2]}`;\n\n}\n\n\n\nfunction massageParamsForWeb(rawParams) {\n\n var massagedParams = Object.assign({ }, rawParams);\n\n\n\n // remove these parameters as they may conflict with bound parameters of the web action\n\n delete massagedParams.endpoint;\n\n delete massagedParams.bluemixServiceName;\n\n delete massagedParams.lifecycleEvent;\n\n\n\n return massagedParams;\n\n}\n\n\n\nfunction getWebActionURL(endpoint, actionName) {\n\n var apiHost = addHTTPS(endpoint);\n\n\n\n return `${apiHost}/api/v1/web/${process.env['__OW_NAMESPACE']}/default/${actionName}`;\n\n}\n\n\n\nfunction createTrigger(endpoint, params, actionName) {\n\n var options = {\n\n method: 'POST',\n\n url: getWebActionURL(endpoint, actionName),\n\n rejectUnauthorized: false,\n\n json: true,\n\n body: params,\n\n auth: {\n\n 'user': params.authKey.split(\":\")[0],\n\n 'pass': params.authKey.split(\":\")[1]\n\n },\n\n headers: {\n\n 'Content-Type': 'application/json',\n\n 'Accept': 'text/plain',\n\n 'User-Agent': 'whisk'\n\n }\n\n };\n\n\n\n return request(options)\n\n .then(response => {\n\n console.log(`response ${JSON.stringify(response, null, 2)}`)\n\n return {\n\n uuid: response\n\n };\n\n })\n\n .catch(error => {\n\n console.log(`Error creating trigger: ${JSON.stringify(error, null, 2)}`);\n\n return Promise.reject(error.response.body);\n\n });\n\n}\n\n\n\nfunction deleteTrigger(endpoint, params, actionName) {\n\n var options = {\n\n method: 'DELETE',\n\n url: getWebActionURL(endpoint, actionName),\n\n rejectUnauthorized: false,\n\n json: true,\n\n auth: {\n\n 'user': params.authKey.split(\":\")[0],\n\n 'pass': params.authKey.split(\":\")[1]\n\n },\n\n body: params,\n\n headers: {\n\n 'Content-Type': 'application/json',\n\n 'Accept': 'text/plain',\n\n 'User-Agent': 'whisk'\n\n }\n\n };\n\n\n\n return request(options)\n\n .then(response => {\n\n // do not pass the response back to the caller, its contents are secret\n\n return;\n\n }).catch(error => {\n\n console.log(`Error deleting trigger: ${JSON.stringify(error, null, 2)}`);\n\n return Promise.reject(error.response.body);\n\n });\n\n}\n\n\n\nfunction getTrigger(endpoint, params, actionName) {\n\n var options = {\n\n method: 'GET',\n\n url: getWebActionURL(endpoint, actionName),\n\n rejectUnauthorized: false,\n\n json: true,\n\n auth: {\n\n 'user': params.authKey.split(\":\")[0],\n\n 'pass': params.authKey.split(\":\")[1]\n\n },\n\n qs: params,\n\n headers: {\n\n 'Accept': 'application/json',\n\n 'User-Agent': 'whisk'\n\n }\n\n };\n\n\n\n return request(options)\n\n .then(response => {\n\n return response;\n\n })\n\n .catch(error => {\n\n console.log(`Error fetching trigger: ${JSON.stringify(error, null, 2)}`);\n\n return Promise.reject(error.response.body);\n\n });\n\n}\n\n\n\nfunction updateTrigger(endpoint, params, actionName) {\n\n var options = {\n\n method: 'PUT',\n\n url: getWebActionURL(endpoint, actionName),\n\n auth: {\n\n 'user': params.authKey.split(\":\")[0],\n\n 'pass': params.authKey.split(\":\")[1]\n\n },\n\n rejectUnauthorized: false,\n\n json: true,\n\n body: params,\n\n headers: {\n\n 'Content-Type': 'application/json',\n\n 'Accept': 'text/plain',\n\n 'User-Agent': 'whisk'\n\n }\n\n };\n\n\n\n return request(options)\n\n .then(response => {\n\n return;\n\n })\n\n .catch(error => {\n\n console.log(`Error updating trigger: ${JSON.stringify(error, null, 2)}`);\n\n return Promise.reject(error.response.body);\n\n });\n\n}\n\n\n\n// perform parameter validation that is common to both feed actions\n\nfunction performCommonParameterValidation(rawParams) {\n\n var validatedParams = { };\n\n\n\n // topic\n\n if (rawParams.topic && rawParams.topic.length > 0) {\n\n validatedParams.topic = rawParams.topic;\n\n } else {\n\n return { validationError: \"You must supply a 'topic' parameter.\" };\n\n }\n\n\n\n // triggerName\n\n if (rawParams.triggerName) {\n\n validatedParams.triggerName = rawParams.triggerName;\n\n } else {\n\n return { validationError: \"You must supply a 'triggerName' parameter.\" };\n\n }\n\n\n\n validatedParams.isJSONData = getBooleanFromArgs(rawParams, 'isJSONData');\n\n validatedParams.isBinaryValue = getBooleanFromArgs(rawParams, 'isBinaryValue');\n\n\n\n if (validatedParams.isJSONData && validatedParams.isBinaryValue) {\n\n return { validationError: 'isJSONData and isBinaryValue cannot both be enabled.' };\n\n }\n\n\n\n // now that everything else is valid, let's add these\n\n validatedParams.isBinaryKey = getBooleanFromArgs(rawParams, 'isBinaryKey');\n\n validatedParams.authKey = rawParams.authKey;\n\n validatedParams.triggerURL = getTriggerURL(rawParams.endpoint, rawParams.triggerName);\n\n\n\n const uuid = require('uuid');\n\n validatedParams.uuid = uuid.v4();\n\n\n\n return { validatedParams: validatedParams };\n\n}\n\n\n\nfunction performUpdateParameterValidation(params, doc) {\n\n return new Promise((resolve, reject) => {\n\n\n\n if (params.isBinaryKey !== undefined || params.isBinaryValue !== undefined || params.isJSONData !== undefined) {\n\n var updatedParams = {\n\n isJSONData: doc.isJSONData,\n\n isBinaryKey: doc.isBinaryKey,\n\n isBinaryValue: doc.isBinaryValue\n\n };\n\n\n\n if (params.isJSONData !== undefined) {\n\n updatedParams.isJSONData = getBooleanFromArgs(params, 'isJSONData');\n\n }\n\n\n\n if (params.isBinaryValue !== undefined) {\n\n updatedParams.isBinaryValue = getBooleanFromArgs(params, 'isBinaryValue');\n\n }\n\n\n\n if (updatedParams.isJSONData && updatedParams.isBinaryValue) {\n\n reject({ validationError: 'isJSONData and isBinaryValue cannot both be enabled.' });\n\n }\n\n\n\n if (params.isBinaryKey !== undefined) {\n\n updatedParams.isBinaryKey = getBooleanFromArgs(params, 'isBinaryKey');\n\n }\n\n resolve(updatedParams);\n\n } else {\n\n // cannot update any other parameters\n\n reject({ validationError: 'At least one of isJsonData, isBinaryKey, or isBinaryValue must be supplied.' });\n\n }\n\n });\n\n}\n\n\n\nfunction webResponse(code, body, contentType = 'text/plain') {\n\n return {\n\n statusCode: code,\n\n headers: {\n\n 'Content-Type': contentType\n\n },\n\n body: body\n\n };\n\n}\n\n\n\nmodule.exports = {\n\n 'createTrigger': createTrigger,\n\n 'deleteTrigger': deleteTrigger,\n\n 'getTrigger': getTrigger,\n\n 'updateTrigger': updateTrigger,\n\n 'getBooleanFromArgs': getBooleanFromArgs,\n\n 'getTriggerFQN': getTriggerFQN,\n\n 'getTriggerURL': getTriggerURL,\n\n 'massageParamsForWeb': massageParamsForWeb,\n\n 'performCommonParameterValidation': performCommonParameterValidation,\n\n 'performUpdateParameterValidation': performUpdateParameterValidation,\n\n 'validateBrokerParam': validateBrokerParam,\n\n 'verifyTriggerAuth': verifyTriggerAuth,\n\n 'webResponse': webResponse\n\n};\n", "file_path": "actions/kafka-provider-web/lib/common.js", "rank": 97, "score": 36195.003873238384 }, { "content": "// Forked from https://github.com/apache/openwhisk-package-kafka\n\n\n\n/*\n\n * Licensed to the Apache Software Foundation (ASF) under one or more\n\n * contributor license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright ownership.\n\n * The ASF licenses this file to You under the Apache License, Version 2.0\n\n * (the \"License\"); you may not use this file except in compliance with\n\n * the License. You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nconst request = require('request-promise');\n\n\n\nfunction triggerComponents(triggerName) {\n\n var split = triggerName.split(\"/\");\n\n\n\n return {\n\n namespace: split[1],\n\n triggerName: split[2]\n\n };\n\n}\n\n\n\nfunction addHTTPS(url) {\n\n if (!/^https?\\:\\/\\//.test(url)) {\n\n url = \"https://\" + url;\n\n }\n\n return url;\n\n}\n\n\n\nfunction getTriggerURL(endpoint, triggerName) {\n\n var apiHost = addHTTPS(endpoint);\n\n\n\n var components = triggerComponents(triggerName);\n\n var namespace = components.namespace;\n\n var trigger = components.triggerName;\n\n\n\n var url = `${apiHost}/api/v1/namespaces/${encodeURIComponent(namespace)}/triggers/${encodeURIComponent(trigger)}`;\n\n\n\n return url;\n\n}\n\n\n\nfunction verifyTriggerAuth(triggerURL, auth, rejectNotFound) {\n\n var options = {\n\n method: 'GET',\n\n url: triggerURL,\n\n rejectUnauthorized: false,\n\n headers: {\n\n 'Content-Type': 'application/json',\n\n 'User-Agent': 'whisk'\n\n },\n\n auth: auth\n\n };\n\n\n\n return request(options)\n\n .catch(err => {\n\n if (err.statusCode && err.statusCode === 404 && !rejectNotFound) {\n\n return Promise.resolve()\n\n } else {\n\n console.log(`Trigger auth error: ${JSON.stringify(err)}`);\n\n return Promise.reject({ authError: 'You are not authorized for this trigger.' });\n\n }\n\n });\n\n}\n\n\n\nfunction validateBrokerParam(brokerParam) {\n\n if (isNonEmptyArray(brokerParam)) {\n\n return brokerParam;\n\n } else if (typeof brokerParam === 'string') {\n\n return brokerParam.split(',');\n\n } else {\n\n return undefined;\n\n }\n\n}\n\n\n\nfunction getBooleanFromArgs(args, key) {\n\n return (typeof args[key] !== 'undefined' && args[key] && (args[key] === true || args[key].toString().trim().toLowerCase() === 'true'));\n\n}\n\n\n\nfunction isNonEmptyArray(obj) {\n\n return obj && Array.isArray(obj) && obj.length !== 0;\n\n}\n\n\n\n// Return the trigger FQN with the resolved namespace\n\n// This is required to avoid naming conflicts when using the default namespace \"_\"\n\nfunction getTriggerFQN(triggerName) {\n\n var components = triggerName.split('/');\n\n return `/${process.env['__OW_NAMESPACE']}/${components[2]}`;\n\n}\n\n\n\nfunction massageParamsForWeb(rawParams) {\n\n var massagedParams = Object.assign({ }, rawParams);\n\n\n\n // remove these parameters as they may conflict with bound parameters of the web action\n\n delete massagedParams.endpoint;\n\n delete massagedParams.bluemixServiceName;\n\n delete massagedParams.lifecycleEvent;\n\n\n\n return massagedParams;\n\n}\n\n\n\nfunction getWebActionURL(endpoint, actionName) {\n\n var apiHost = addHTTPS(endpoint);\n\n\n\n return `${apiHost}/api/v1/web/${process.env['__OW_NAMESPACE']}/default/${actionName}`;\n\n}\n\n\n\nfunction createTrigger(endpoint, params, actionName) {\n\n var options = {\n\n method: 'POST',\n\n url: getWebActionURL(endpoint, actionName),\n\n rejectUnauthorized: false,\n\n json: true,\n\n body: params,\n\n auth: {\n\n 'user': params.authKey.split(\":\")[0],\n\n 'pass': params.authKey.split(\":\")[1]\n\n },\n\n headers: {\n\n 'Content-Type': 'application/json',\n\n 'Accept': 'text/plain',\n\n 'User-Agent': 'whisk'\n\n }\n\n };\n\n\n\n return request(options)\n\n .then(response => {\n\n console.log(`response ${JSON.stringify(response, null, 2)}`)\n\n return {\n\n uuid: response\n\n };\n\n })\n\n .catch(error => {\n\n console.log(`Error creating trigger: ${JSON.stringify(error, null, 2)}`);\n\n return Promise.reject(error.response.body);\n\n });\n\n}\n\n\n\nfunction deleteTrigger(endpoint, params, actionName) {\n\n var options = {\n\n method: 'DELETE',\n\n url: getWebActionURL(endpoint, actionName),\n\n rejectUnauthorized: false,\n\n json: true,\n\n auth: {\n\n 'user': params.authKey.split(\":\")[0],\n\n 'pass': params.authKey.split(\":\")[1]\n\n },\n\n body: params,\n\n headers: {\n\n 'Content-Type': 'application/json',\n\n 'Accept': 'text/plain',\n\n 'User-Agent': 'whisk'\n\n }\n\n };\n\n\n\n return request(options)\n\n .then(response => {\n\n // do not pass the response back to the caller, its contents are secret\n\n return;\n\n }).catch(error => {\n\n console.log(`Error deleting trigger: ${JSON.stringify(error, null, 2)}`);\n\n return Promise.reject(error.response.body);\n\n });\n\n}\n\n\n\nfunction getTrigger(endpoint, params, actionName) {\n\n var options = {\n\n method: 'GET',\n\n url: getWebActionURL(endpoint, actionName),\n\n rejectUnauthorized: false,\n\n json: true,\n\n auth: {\n\n 'user': params.authKey.split(\":\")[0],\n\n 'pass': params.authKey.split(\":\")[1]\n\n },\n\n qs: params,\n\n headers: {\n\n 'Accept': 'application/json',\n\n 'User-Agent': 'whisk'\n\n }\n\n };\n\n\n\n return request(options)\n\n .then(response => {\n\n return response;\n\n })\n\n .catch(error => {\n\n console.log(`Error fetching trigger: ${JSON.stringify(error, null, 2)}`);\n\n return Promise.reject(error.response.body);\n\n });\n\n}\n\n\n\nfunction updateTrigger(endpoint, params, actionName) {\n\n var options = {\n\n method: 'PUT',\n\n url: getWebActionURL(endpoint, actionName),\n\n auth: {\n\n 'user': params.authKey.split(\":\")[0],\n\n 'pass': params.authKey.split(\":\")[1]\n\n },\n\n rejectUnauthorized: false,\n\n json: true,\n\n body: params,\n\n headers: {\n\n 'Content-Type': 'application/json',\n\n 'Accept': 'text/plain',\n\n 'User-Agent': 'whisk'\n\n }\n\n };\n\n\n\n return request(options)\n\n .then(response => {\n\n return;\n\n })\n\n .catch(error => {\n\n console.log(`Error updating trigger: ${JSON.stringify(error, null, 2)}`);\n\n return Promise.reject(error.response.body);\n\n });\n\n}\n\n\n\n// perform parameter validation that is common to both feed actions\n\nfunction performCommonParameterValidation(rawParams) {\n\n var validatedParams = { };\n\n\n\n // topic\n\n if (rawParams.topic && rawParams.topic.length > 0) {\n\n validatedParams.topic = rawParams.topic;\n\n } else {\n\n return { validationError: \"You must supply a 'topic' parameter.\" };\n\n }\n\n\n\n // triggerName\n\n if (rawParams.triggerName) {\n\n validatedParams.triggerName = rawParams.triggerName;\n\n } else {\n\n return { validationError: \"You must supply a 'triggerName' parameter.\" };\n\n }\n\n\n\n validatedParams.isJSONData = getBooleanFromArgs(rawParams, 'isJSONData');\n\n validatedParams.isBinaryValue = getBooleanFromArgs(rawParams, 'isBinaryValue');\n\n\n\n if (validatedParams.isJSONData && validatedParams.isBinaryValue) {\n\n return { validationError: 'isJSONData and isBinaryValue cannot both be enabled.' };\n\n }\n\n\n\n // now that everything else is valid, let's add these\n\n validatedParams.isBinaryKey = getBooleanFromArgs(rawParams, 'isBinaryKey');\n\n validatedParams.authKey = rawParams.authKey;\n\n validatedParams.triggerURL = getTriggerURL(rawParams.endpoint, rawParams.triggerName);\n\n\n\n const uuid = require('uuid');\n\n validatedParams.uuid = uuid.v4();\n\n\n\n return { validatedParams: validatedParams };\n\n}\n\n\n\nfunction performUpdateParameterValidation(params, doc) {\n\n return new Promise((resolve, reject) => {\n\n\n\n if (params.isBinaryKey !== undefined || params.isBinaryValue !== undefined || params.isJSONData !== undefined) {\n\n var updatedParams = {\n\n isJSONData: doc.isJSONData,\n\n isBinaryKey: doc.isBinaryKey,\n\n isBinaryValue: doc.isBinaryValue\n\n };\n\n\n\n if (params.isJSONData !== undefined) {\n\n updatedParams.isJSONData = getBooleanFromArgs(params, 'isJSONData');\n\n }\n\n\n\n if (params.isBinaryValue !== undefined) {\n\n updatedParams.isBinaryValue = getBooleanFromArgs(params, 'isBinaryValue');\n\n }\n\n\n\n if (updatedParams.isJSONData && updatedParams.isBinaryValue) {\n\n reject({ validationError: 'isJSONData and isBinaryValue cannot both be enabled.' });\n\n }\n\n\n\n if (params.isBinaryKey !== undefined) {\n\n updatedParams.isBinaryKey = getBooleanFromArgs(params, 'isBinaryKey');\n\n }\n\n resolve(updatedParams);\n\n } else {\n\n // cannot update any other parameters\n\n reject({ validationError: 'At least one of isJsonData, isBinaryKey, or isBinaryValue must be supplied.' });\n\n }\n\n });\n\n}\n\n\n\nfunction webResponse(code, body, contentType = 'text/plain') {\n\n return {\n\n statusCode: code,\n\n headers: {\n\n 'Content-Type': contentType\n\n },\n\n body: body\n\n };\n\n}\n\n\n\nmodule.exports = {\n\n 'createTrigger': createTrigger,\n\n 'deleteTrigger': deleteTrigger,\n\n 'getTrigger': getTrigger,\n\n 'updateTrigger': updateTrigger,\n\n 'getBooleanFromArgs': getBooleanFromArgs,\n\n 'getTriggerFQN': getTriggerFQN,\n\n 'getTriggerURL': getTriggerURL,\n\n 'massageParamsForWeb': massageParamsForWeb,\n\n 'performCommonParameterValidation': performCommonParameterValidation,\n\n 'performUpdateParameterValidation': performUpdateParameterValidation,\n\n 'validateBrokerParam': validateBrokerParam,\n\n 'verifyTriggerAuth': verifyTriggerAuth,\n\n 'webResponse': webResponse\n\n};\n", "file_path": "actions/kafka-provider-feed/lib/common.js", "rank": 98, "score": 36195.003873238384 }, { "content": "const configEnv = require(`./${process.env.NODE_ENV}.json`);\n", "file_path": "examples/substrate-push-notification/src/config/index.js", "rank": 99, "score": 32977.10858724993 } ]
Rust
src/async/bufwriter.rs
kimhyunkang/rssh
046275ccdee7962230d5598b175e8f9815db605c
use super::buf::AsyncBuf; use super::DEFAULT_BUFSIZE; use std::{cmp, io}; use std::io::Write; use futures::{Async, Poll}; #[derive(Debug)] pub struct AsyncBufWriter<W: Write> { inner: Option<W>, buf: AsyncBuf, panicked: bool } #[derive(Debug)] pub struct IntoInnerError<W>(W, io::Error); impl <W: Write> AsyncBufWriter<W> { pub fn new(inner: W) -> AsyncBufWriter<W> { AsyncBufWriter::with_capacity(DEFAULT_BUFSIZE, inner) } pub fn with_capacity(capacity: usize, inner: W) -> AsyncBufWriter<W> { AsyncBufWriter { inner: Some(inner), buf: AsyncBuf::with_capacity(capacity), panicked: false } } pub fn nb_into_inner(mut self) -> Poll<W, IntoInnerError<AsyncBufWriter<W>>> { match self.nb_flush() { Ok(Async::NotReady) => Ok(Async::NotReady), Ok(Async::Ready(())) => Ok(Async::Ready(self.inner.take().unwrap())), Err(e) => Err(IntoInnerError(self, e)) } } #[inline] fn write_inner(&mut self, buf: &[u8]) -> io::Result<usize> { self.panicked = true; let res = self.inner.as_mut().expect("attempted to write after into_inner called").write(buf); self.panicked = false; res } #[inline] fn flush_inner(&mut self) -> io::Result<()> { self.panicked = true; let res = self.inner.as_mut().expect("attempted to flush after into_inner called").flush(); self.panicked = false; res } pub fn nb_flush_buf(&mut self) -> Poll<(), io::Error> { self.panicked = true; let amt = try_nb!(self.inner.as_mut().expect("attempted to flush after into_inner called").write(self.buf.get_ref())); self.panicked = false; self.buf.consume(amt); if self.buf.is_empty() { Ok(Async::Ready(())) } else { Ok(Async::NotReady) } } pub fn nb_flush(&mut self) -> Poll<(), io::Error> { if !self.buf.is_empty() { if let Async::NotReady = try!(self.nb_flush_buf()) { return Ok(Async::NotReady); } } if self.buf.is_empty() { try_nb!(self.flush_inner()); Ok(Async::Ready(())) } else { Ok(Async::NotReady) } } pub fn nb_write_exact(&mut self, buf: &[u8]) -> Poll<(), io::Error> { if buf.len() > self.buf.capacity() { if !self.buf.is_empty() { if let Async::NotReady = try!(self.nb_flush_buf()) { return Ok(Async::NotReady); } } match self.write_inner(buf) { Ok(amt) => { self.buf.write_all(&buf[amt ..]); Ok(Async::Ready(())) } Err(e) => match e.kind() { io::ErrorKind::WouldBlock => Ok(Async::NotReady), _ => Err(e) } } } else { if self.buf.try_write_all(buf) { Ok(Async::Ready(())) } else { match try!(self.nb_flush_buf()) { Async::NotReady => Ok(Async::NotReady), Async::Ready(()) => if self.buf.try_write_all(buf) { Ok(Async::Ready(())) } else { Ok(Async::NotReady) } } } } } pub fn nb_write<F>(&mut self, len: usize, mut f: F) -> Poll<(), io::Error> where F: FnMut(&mut [u8]) -> () { if !self.buf.try_reserve(len) { match try!(self.nb_flush_buf()) { Async::NotReady => return Ok(Async::NotReady), Async::Ready(()) => self.buf.reserve(len) } } f(&mut self.buf.get_mut()[.. len]); self.buf.fill(len); Ok(Async::Ready(())) } } impl <W: Write> Write for AsyncBufWriter<W> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { if buf.len() > self.buf.reserve_size() { try!(self.nb_flush_buf()); } if self.buf.is_empty() && self.buf.capacity() < buf.len() { self.write_inner(buf) } else { let datasize = cmp::min(self.buf.reserve_size(), buf.len()); self.buf.get_mut()[.. datasize].copy_from_slice(&buf[.. datasize]); self.buf.fill(datasize); Ok(datasize) } } fn flush(&mut self) -> io::Result<()> { while let Async::NotReady = try!(self.nb_flush_buf()) { () } self.flush_inner() } } impl <W: Write> Drop for AsyncBufWriter<W> { fn drop(&mut self) { if self.inner.is_some() && !self.panicked { let _r = self.nb_flush_buf(); } } } #[cfg(test)] mod test { use super::*; use std::io::{Cursor, Write}; use futures::Async; #[test] fn nb_write_exact() { let writer = { let buf = vec![0u8; 16]; let writer = Cursor::new(buf); let mut bufwriter = AsyncBufWriter::with_capacity(4, writer); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"Hell").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"o, ").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"wor").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"ld!").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_flush().expect("error!")); if let Async::Ready(w) = bufwriter.nb_into_inner().expect("error!") { w } else { panic!("not ready"); } }; let wsize = writer.position() as usize; assert_eq!(b"Hello, world!".len(), wsize); let buf = writer.into_inner(); assert_eq!(b"Hello, world!".as_ref(), &buf[.. wsize]); } #[test] fn nb_write_exact_larger_than_buf() { let writer = { let buf = vec![0u8; 16]; let writer = Cursor::new(buf); let mut bufwriter = AsyncBufWriter::with_capacity(4, writer); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"Hello, ").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"world!").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_flush().expect("error!")); if let Async::Ready(w) = bufwriter.nb_into_inner().expect("error!") { w } else { panic!("not ready"); } }; let wsize = writer.position() as usize; assert_eq!(b"Hello, world!".len(), wsize); let buf = writer.into_inner(); assert_eq!(b"Hello, world!".as_ref(), &buf[.. wsize]); } #[test] fn nb_write_exact_then_flush() { let writer = { let buf = vec![0u8; 16]; let writer = Cursor::new(buf); let mut bufwriter = AsyncBufWriter::with_capacity(4, writer); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"Hello, ").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"world!").expect("error!")); bufwriter.flush().expect("error!"); if let Async::Ready(w) = bufwriter.nb_into_inner().expect("error!") { w } else { panic!("not ready"); } }; let wsize = writer.position() as usize; assert_eq!(b"Hello, world!".len(), wsize); let buf = writer.into_inner(); assert_eq!(b"Hello, world!".as_ref(), &buf[.. wsize]); } #[test] fn nb_write_then_flush() { let writer = { let buf = vec![0u8; 16]; let writer = Cursor::new(buf); let mut bufwriter = AsyncBufWriter::with_capacity(4, writer); assert_eq!(Async::Ready(()), bufwriter.nb_write(7, |buf| buf.copy_from_slice(b"Hello, ")).expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_write(6, |buf| buf.copy_from_slice(b"world!")).expect("error!")); bufwriter.flush().expect("error!"); if let Async::Ready(w) = bufwriter.nb_into_inner().expect("error!") { w } else { panic!("not ready"); } }; let wsize = writer.position() as usize; assert_eq!(b"Hello, world!".len(), wsize); let buf = writer.into_inner(); assert_eq!(b"Hello, world!".as_ref(), &buf[.. wsize]); } }
use super::buf::AsyncBuf; use super::DEFAULT_BUFSIZE; use std::{cmp, io}; use std::io::Write; use futures::{Async, Poll}; #[derive(Debug)] pub struct AsyncBufWriter<W: Write> { inner: Option<W>, buf: AsyncBuf, panicked: bool } #[derive(Debug)] pub struct IntoInnerError<W>(W, io::Error); impl <W: Write> AsyncBufWriter<W> { pub fn new(inner: W) -> AsyncBufWriter<W> { AsyncBufWriter::with_capacity(DEFAULT_BUFSIZE, inner) } pub fn with_capacity(capacity: usize, inner: W) -> AsyncBufWriter<W> { AsyncBufWriter { inner: Some(inner), buf: AsyncBuf::with_capacity(capacity), panicked: false } } pub fn nb_into_inner(mut self) -> Poll<W, IntoInnerError<AsyncBufWriter<W>>> { match self.nb_flush() { Ok(Async::NotReady) => Ok(Async::NotReady), Ok(Async::Ready(())) => Ok(Async::Ready(self.inner.take().unwrap())), Err(e) => Err(IntoInnerError(self, e)) } } #[inline] fn write_inner(&mut self, buf: &[u8]) -> io::Result<usize> { self.panicked = true; let res = self.inner.as_mut().expect("attempted to write after into_inner called").write(buf); self.panicked = false; res } #[inline] fn flush_inner(&mut self) -> io::Result<()> { self.panicked = true; let res = self.inner.as_mut().expect("attempted to flush after into_inner called").flush(); self.panicked = false; res } pub fn nb_flush_buf(&mut self) -> Poll<(), io::Error> { self.panicked = true; let amt = try_nb!(self.inner.as_mut().expect("attempted to flush after into_inner called").write(self.buf.get_ref())); self.panicked = false; self.buf.consume(amt); if self.buf.is_empty() { Ok(Async::Ready(())) } else { Ok(Async::NotReady) } } pub fn nb_flush(&mut self) -> Poll<(), io::Error> { if !self.buf.is_empty() { if let Async::NotReady = try!(self.nb_flush_buf()) { return Ok(Async::NotReady); } } if self.buf.is_empty() { try_nb!(self.flush_inner()); Ok(Async::Ready(())) } else { Ok(Async::NotReady) } } pub fn nb_write_exact(&mut self, buf: &[u8]) -> Poll<(), io::Error> { if buf.len() > self.buf.capacity() { if !self.buf.is_empty() { if let Async::NotReady = try!(self.nb_flush_buf()) { return Ok(Async::NotReady); } } match self.write_inner(buf) { Ok(amt) => { self.buf.write_all(&buf[amt ..]); Ok(Async::Ready(())) } Err(e) => match e.kind() { io::ErrorKind::WouldBlock => Ok(Async::NotReady), _ => Err(e) } } } else { if self.buf.try_write_all(buf) { Ok(Async::Ready(())) } else { match try!(self.nb_flush_buf()) { Async::NotReady => Ok(Async::NotReady), Async::Ready(()) => if self.buf.try_write_all(buf) { Ok(Async::Ready(())) } else { Ok(Async::NotReady) } } } } } pub fn nb_write<F>(&mut self, len: usize, mut f: F) -> Poll<(), io::Error> where F: FnMut(&mut [u8]) -> () { if !self.buf.try_reserve(len) { match try!(self.nb_flush_buf()) { Async::NotReady => return Ok(Async::NotReady), Async::Ready(()) => self.buf.reserve(len) } } f(&mut self.buf.get_mut()[.. len]); self.buf.fill(len); Ok(Async::Ready(())) } } impl <W: Write> Write for AsyncBufWriter<W> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { if buf.len() > self.buf.reserve_size() { try!(self.nb_flush_buf()); } if self.buf.is_empty() && self.buf.capacity() < buf.len() { self.write_inner(buf) } else { let datasize = cmp::min(self.buf.reserve_size(), buf.len()); self.buf.get_mut()[.. datasize].copy_from_slice(&buf[.. datasize]); self.buf.fill(datasize); Ok(datasize) } } fn flush(&mut self) -> io::Result<()> { while let Async::NotReady = try!(self.nb_flush_buf()) { () } self.flush_inner() } } impl <W: Write> Drop for AsyncBufWriter<W> { fn drop(&mut self) { if self.inner.is_some() && !self.panicked { let _r = self.nb_flush_buf(); } } } #[cfg(test)] mod test { use super::*; use std::io::{Cursor, Write}; use futures::Async; #[test] fn nb_write_exact() { let writer = { let buf = vec![0u8; 16]; let writer = Cursor::new(buf); let mut buf
#[test] fn nb_write_exact_larger_than_buf() { let writer = { let buf = vec![0u8; 16]; let writer = Cursor::new(buf); let mut bufwriter = AsyncBufWriter::with_capacity(4, writer); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"Hello, ").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"world!").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_flush().expect("error!")); if let Async::Ready(w) = bufwriter.nb_into_inner().expect("error!") { w } else { panic!("not ready"); } }; let wsize = writer.position() as usize; assert_eq!(b"Hello, world!".len(), wsize); let buf = writer.into_inner(); assert_eq!(b"Hello, world!".as_ref(), &buf[.. wsize]); } #[test] fn nb_write_exact_then_flush() { let writer = { let buf = vec![0u8; 16]; let writer = Cursor::new(buf); let mut bufwriter = AsyncBufWriter::with_capacity(4, writer); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"Hello, ").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"world!").expect("error!")); bufwriter.flush().expect("error!"); if let Async::Ready(w) = bufwriter.nb_into_inner().expect("error!") { w } else { panic!("not ready"); } }; let wsize = writer.position() as usize; assert_eq!(b"Hello, world!".len(), wsize); let buf = writer.into_inner(); assert_eq!(b"Hello, world!".as_ref(), &buf[.. wsize]); } #[test] fn nb_write_then_flush() { let writer = { let buf = vec![0u8; 16]; let writer = Cursor::new(buf); let mut bufwriter = AsyncBufWriter::with_capacity(4, writer); assert_eq!(Async::Ready(()), bufwriter.nb_write(7, |buf| buf.copy_from_slice(b"Hello, ")).expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_write(6, |buf| buf.copy_from_slice(b"world!")).expect("error!")); bufwriter.flush().expect("error!"); if let Async::Ready(w) = bufwriter.nb_into_inner().expect("error!") { w } else { panic!("not ready"); } }; let wsize = writer.position() as usize; assert_eq!(b"Hello, world!".len(), wsize); let buf = writer.into_inner(); assert_eq!(b"Hello, world!".as_ref(), &buf[.. wsize]); } }
writer = AsyncBufWriter::with_capacity(4, writer); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"Hell").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"o, ").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"wor").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_write_exact(b"ld!").expect("error!")); assert_eq!(Async::Ready(()), bufwriter.nb_flush().expect("error!")); if let Async::Ready(w) = bufwriter.nb_into_inner().expect("error!") { w } else { panic!("not ready"); } }; let wsize = writer.position() as usize; assert_eq!(b"Hello, world!".len(), wsize); let buf = writer.into_inner(); assert_eq!(b"Hello, world!".as_ref(), &buf[.. wsize]); }
function_block-function_prefixed
[ { "content": "pub fn ntoh(buf: &[u8]) -> u32 {\n\n ((buf[0] as u32) << 24) + ((buf[1] as u32) << 16) + ((buf[2] as u32) << 8) + (buf[3] as u32)\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 0, "score": 151752.52172682478 }, { "content": "pub fn compute_pad_len<R: Rng>(payload_len: usize, blk_size: usize, rng: &mut R) -> Result<(u32, u8), ()> {\n\n let min_unit = cmp::max(blk_size, 8);\n\n\n\n // maximum possible pkt_len = 5 byte header + payload_len + 255\n\n let pkt_upperbound = cmp::min(try_add!(payload_len, 5 + 255), try_add!(::std::u32::MAX as usize, 4));\n\n\n\n // pkt_len = 5 byte header + payload_len + pad_len\n\n // pad_len must be 4 bytes or larger\n\n // pkt_len must be 16 bytes or larger, and it must be multiple of max(blk_size, 8)\n\n let pkt_lowerbound = cmp::max(try_add!(payload_len, 5 + 4), cmp::max(min_unit, 16));\n\n\n\n let max_pkt_len = pkt_upperbound - (pkt_upperbound % min_unit);\n\n let min_pkt_len = try_add!(pkt_lowerbound, min_unit - 1) / min_unit * min_unit;\n\n\n\n let except_pad = try_add!(payload_len, 5);\n\n let max_pad_len = try_sub!(max_pkt_len, except_pad);\n\n let min_pad_len = try_sub!(min_pkt_len, except_pad);\n\n\n\n if 4 <= min_pad_len && min_pad_len <= max_pad_len && max_pad_len <= 255 {\n\n let pad_len = \n", "file_path": "src/transport.rs", "rank": 1, "score": 147045.52955190456 }, { "content": "pub fn version_exchange<R, W>(reader: AsyncBufReader<R>, writer: AsyncBufWriter<W>, version_string: &str, comment: &str)\n\n -> Box<Future<Item=(AsyncBufReader<R>, AsyncBufWriter<W>, (String, String)), Error=HandshakeError>>\n\n where R: Read + Send + 'static, W: Write + Send + 'static\n\n{\n\n let v_c = format!(\"SSH-2.0-{} {}\", version_string, comment);\n\n\n\n let mut buf = Vec::with_capacity(256);\n\n buf.write(v_c.as_bytes()).unwrap();\n\n buf.write(b\"\\r\\n\").unwrap();\n\n if buf.len() > 255 {\n\n panic!(\"version string and comment too long\");\n\n }\n\n\n\n let w = write_all(writer, buf).and_then(|(writer, _)| {\n\n flush(writer)\n\n }).map_err(|e| e.into());\n\n \n\n let r = read_until(reader, b'\\n', Vec::with_capacity(256)).map_err(|e| e.into()).and_then(|(reader, buf)| {\n\n if buf.starts_with(b\"SSH-2.0-\") && buf.ends_with(b\"\\r\\n\") {\n\n match str::from_utf8(&buf[.. buf.len() - 2]) {\n", "file_path": "src/handshake.rs", "rank": 2, "score": 144125.14817417852 }, { "content": "pub fn client_key_exchange<R, W>(reader: AsyncBufReader<R>, writer: AsyncBufWriter<W>, neg: AlgorithmNegotiation, v_c: String, v_s: String)\n\n -> ClearTransport<R, W, OsRng, ClientKeyExchange>\n\n where R: Read, W: Write\n\n{ \n\n let mut rng = OsRng::new().unwrap();\n\n let i_c = build_kexinit_payload(&neg, &mut rng).unwrap();\n\n let st = AlgorithmExchangeState {\n\n v_c: v_c,\n\n v_s: v_s,\n\n i_c: i_c,\n\n written: false,\n\n res: None\n\n };\n\n let kex = ClientKex::AlgorithmExchange(st);\n\n\n\n ClearTransport::new(reader, writer, rng, ClientKeyExchange { st: kex })\n\n}\n\n\n", "file_path": "src/handshake.rs", "rank": 3, "score": 139377.36757613998 }, { "content": "fn into_mpint(buf: &[u8]) -> Vec<u8> {\n\n if buf.len() == 0 {\n\n Vec::new()\n\n } else if buf[0] <= 0x7f {\n\n buf.into()\n\n } else {\n\n let mut v = Vec::with_capacity(buf.len() + 1);\n\n v.push(0);\n\n v.extend_from_slice(buf);\n\n v\n\n }\n\n}\n\n\n", "file_path": "src/handshake.rs", "rank": 4, "score": 129174.59655245856 }, { "content": "fn from_mpint(data: &[u8]) -> &[u8] {\n\n if data.len() > 0 && data[0] == 0 {\n\n &data[1..]\n\n } else {\n\n data\n\n }\n\n}\n", "file_path": "src/handshake.rs", "rank": 5, "score": 115520.38382236034 }, { "content": "struct UnencryptedTransport<R: Read, W: Write, RNG, T> {\n\n rd: AsyncBufReader<R>,\n\n rd_st: PacketReadState,\n\n wr: AsyncBufWriter<W>,\n\n wr_st: PacketWriteState,\n\n rng: RNG,\n\n st: T,\n\n}\n\n\n\nimpl <R: Read, W: Write, RNG, T> UnencryptedTransport<R, W, RNG, T> {\n\n fn new(rd: AsyncBufReader<R>,\n\n wr: AsyncBufWriter<W>,\n\n rng: RNG,\n\n st: T) -> UnencryptedTransport<R, W, RNG, T>\n\n {\n\n UnencryptedTransport {\n\n rd: rd,\n\n rd_st: PacketReadState::Idle,\n\n wr: wr,\n\n wr_st: PacketWriteState::Idle,\n\n rng: rng,\n\n st: st\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 6, "score": 113384.8675530541 }, { "content": "pub fn hton(n: u32) -> [u8; 4] {\n\n let mut buf = [0u8; 4];\n\n buf[0] = (n >> 24) as u8;\n\n buf[1] = ((n >> 16) & 0xff) as u8;\n\n buf[2] = ((n >> 8) & 0xff) as u8;\n\n buf[3] = (n & 0xff) as u8;\n\n\n\n buf\n\n}\n\n\n\npub struct PacketWriteRequest {\n\n pub payload: Vec<u8>,\n\n pub flush: bool\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 7, "score": 111366.64061070475 }, { "content": "pub fn build_kexinit_payload(neg: &AlgorithmNegotiation, rng: &mut Rng) -> Result<Vec<u8>, HandshakeError> {\n\n let kexinit = serialize(neg).unwrap();\n\n let mut payload = Vec::new();\n\n try!(payload.write(&[SSH_MSG_KEXINIT]));\n\n let mut cookie = [0u8; 16];\n\n rng.fill_bytes(&mut cookie);\n\n try!(payload.write(&cookie));\n\n try!(payload.write(&kexinit));\n\n\n\n Ok(payload)\n\n}\n\n\n", "file_path": "src/handshake.rs", "rank": 8, "score": 109871.48039701076 }, { "content": "pub fn de_bytes<D: de::Deserializer>(d: &mut D) -> Result<Vec<u8>, D::Error> {\n\n struct IntoVisitor;\n\n\n\n impl de::Visitor for IntoVisitor {\n\n type Value = Vec<u8>;\n\n\n\n fn visit_bytes<E>(&mut self, v: &[u8]) -> Result<Vec<u8>, E>\n\n {\n\n Ok(v.into())\n\n }\n\n }\n\n\n\n d.deserialize_bytes(IntoVisitor)\n\n}\n\n\n", "file_path": "src/packet/decoder.rs", "rank": 9, "score": 108863.5918179396 }, { "content": "#[inline]\n\npub fn ser_bytes<S: ser::Serializer, T: AsRef<[u8]>>(val: T, s: &mut S) -> Result<(), S::Error> {\n\n s.serialize_bytes(val.as_ref())\n\n}\n\n\n", "file_path": "src/packet/encoder.rs", "rank": 10, "score": 99329.26434352885 }, { "content": "fn digest_bytes(ctx: &mut Context, bytes: &[u8]) -> Result<(), HandshakeError> {\n\n let len: u32 = match TryFrom::try_from(bytes.len()) {\n\n Ok(l) => l,\n\n Err(_) => return Err(HandshakeError::KexFailed)\n\n };\n\n\n\n ctx.update(&hton(len));\n\n ctx.update(bytes);\n\n Ok(())\n\n}\n\n\n\nimpl Future for AlgorithmExchangeState {\n\n type Item = KeyExchangeState;\n\n type Error = HandshakeError;\n\n\n\n fn poll(&mut self) -> Poll<KeyExchangeState, HandshakeError> {\n\n match self.res.take() {\n\n Some((neg, ctx)) => {\n\n let ring_rng = rand::SystemRandom::new();\n\n let keygen = agreement::EphemeralPrivateKey::generate(\n", "file_path": "src/handshake.rs", "rank": 11, "score": 97048.79432915758 }, { "content": "pub fn deserialize_msg<T: de::Deserialize>(bytes: &[u8]) -> Result<(u8, T), DecoderError> {\n\n let mut decoder = BinaryDecoder::new(&bytes[1..]);\n\n de::Deserialize::deserialize(&mut decoder).and_then(|x| {\n\n if decoder.is_end_of_data() {\n\n Ok((bytes[0], x))\n\n } else {\n\n Err(DecoderError::TrailingData)\n\n }\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[derive(Debug, PartialEq, Deserialize)]\n\n pub struct TestStruct {\n\n #[serde(deserialize_with = \"de_bytes\")]\n\n data: Vec<u8>,\n\n flag: bool\n\n }\n\n\n\n #[test]\n\n fn detect_trailing_data() {\n\n let data = b\"\\x00\\x00\\x00\\x04test\\x01trailing\";\n\n let res = deserialize::<TestStruct>(data);\n\n assert_eq!(Err(DecoderError::TrailingData), res);\n\n }\n\n}\n", "file_path": "src/packet/decoder.rs", "rank": 12, "score": 94937.34314134168 }, { "content": "pub fn de_inner<D: de::Deserializer, T: de::Deserialize>(d: &mut D) -> Result<T, D::Error> {\n\n struct NestedVisitor<U> {\n\n _x: PhantomData<U>\n\n };\n\n\n\n impl <U: de::Deserialize> de::Visitor for NestedVisitor<U> {\n\n type Value = U;\n\n\n\n fn visit_bytes<E>(&mut self, v: &[u8]) -> Result<U, E>\n\n where E: de::Error\n\n {\n\n let mut decoder = BinaryDecoder::new(v);\n\n de::Deserialize::deserialize(&mut decoder)\n\n .map_err(|e| de::Error::custom(e.to_string()))\n\n }\n\n }\n\n\n\n let visitor = NestedVisitor { _x: PhantomData };\n\n d.deserialize_bytes(visitor)\n\n}\n\n\n", "file_path": "src/packet/decoder.rs", "rank": 13, "score": 92085.42682640364 }, { "content": "pub fn ser_inner<S: ser::Serializer, T: ser::Serialize>(val: &T, s: &mut S) -> Result<(), S::Error> {\n\n let bytes = match serialize(val) {\n\n Ok(x) => x,\n\n Err(e) => return Err(ser::Error::custom(e.to_string()))\n\n };\n\n\n\n s.serialize_bytes(bytes.as_ref())\n\n}\n\n\n", "file_path": "src/packet/encoder.rs", "rank": 14, "score": 89413.93299817215 }, { "content": "pub fn serialize_msg<T: ser::Serialize>(msg_key: u8, val: &T) -> Result<Vec<u8>, EncoderError> {\n\n use serde::ser::Serializer;\n\n\n\n let mut encoder = BinaryEncoder::new();\n\n try!(encoder.serialize_u8(msg_key));\n\n try!(val.serialize(&mut encoder));\n\n Ok(encoder.buf)\n\n}\n\n\n", "file_path": "src/packet/encoder.rs", "rank": 15, "score": 88345.09466846037 }, { "content": "pub fn deserialize<T: de::Deserialize>(bytes: &[u8]) -> Result<T, DecoderError> {\n\n let mut decoder = BinaryDecoder::new(bytes);\n\n de::Deserialize::deserialize(&mut decoder).and_then(|x| {\n\n if decoder.is_end_of_data() {\n\n Ok(x)\n\n } else {\n\n Err(DecoderError::TrailingData)\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/packet/decoder.rs", "rank": 16, "score": 86405.44768029905 }, { "content": "pub fn serialize<T: ser::Serialize>(val: &T) -> Result<Vec<u8>, EncoderError> {\n\n let mut encoder = BinaryEncoder::new();\n\n try!(val.serialize(&mut encoder));\n\n Ok(encoder.buf)\n\n}\n\n\n", "file_path": "src/packet/encoder.rs", "rank": 17, "score": 83506.23391300142 }, { "content": "pub trait TransportError : From<io::Error> + From<()> {\n\n fn invalid_header() -> Self;\n\n fn panic(&'static str) -> Self;\n\n}\n\n\n\nmacro_rules! try_add {\n\n ($a:expr, $b:expr) => {\n\n if let Some(x) = $a.checked_add($b) {\n\n x\n\n } else {\n\n return Err(().into());\n\n }\n\n }\n\n}\n\n\n\nmacro_rules! try_sub {\n\n ($a:expr, $b:expr) => {\n\n if let Some(x) = $a.checked_sub($b) {\n\n x\n\n } else {\n\n return Err(().into());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 18, "score": 76153.85409307633 }, { "content": "pub fn de_name_list<D: de::Deserializer, T: Name>(d: &mut D) -> Result<Vec<T>, D::Error> {\n\n struct IntoVisitor<U> {\n\n _x: PhantomData<U>\n\n }\n\n\n\n impl <U: Name> de::Visitor for IntoVisitor<U> {\n\n type Value = Vec<U>;\n\n\n\n fn visit_str<E>(&mut self, v: &str) -> Result<Vec<U>, E>\n\n {\n\n if v.len() == 0 {\n\n Ok(Vec::new())\n\n } else {\n\n Ok(v.split(',').map(|s| s.into()).collect())\n\n }\n\n }\n\n }\n\n\n\n let visitor = IntoVisitor { _x: PhantomData };\n\n d.deserialize_str(visitor)\n", "file_path": "src/packet/decoder.rs", "rank": 19, "score": 74563.8611140262 }, { "content": "#[inline]\n\npub fn ser_name_list<S: ser::Serializer, T: AsRef<str>, V: AsRef<[T]>>(val: V, s: &mut S) -> Result<(), S::Error> {\n\n let mut buf = String::new();\n\n for v in val.as_ref().iter() {\n\n if buf.len() != 0 {\n\n buf.write_str(\",\").unwrap();\n\n }\n\n buf.write_str(v.as_ref()).unwrap();\n\n }\n\n s.serialize_bytes(buf.as_bytes())\n\n}\n", "file_path": "src/packet/encoder.rs", "rank": 20, "score": 66682.1784796697 }, { "content": "struct IntoVisitor<T>(PhantomData<T>);\n\n\n\nimpl <T> de::Visitor for IntoVisitor<T> where T: Name {\n\n type Value = T;\n\n\n\n fn visit_str<E>(&mut self, v: &str) -> Result<T, E>\n\n {\n\n Ok(v.into())\n\n }\n\n}\n\n\n\nmacro_rules! impl_name_enum {\n\n ($ty:ident {$($variant:ident => $name:expr),*}) => {\n\n #[allow(non_camel_case_types)]\n\n #[derive(Debug, PartialEq, Clone)]\n\n pub enum $ty {\n\n $($variant),*,\n\n Unknown(String)\n\n }\n\n\n", "file_path": "src/packet/types.rs", "rank": 21, "score": 64777.09051224425 }, { "content": "pub trait AsyncPollRead {\n\n fn async_poll_read(&mut self) -> Async<()>;\n\n}\n\n\n\nimpl <R: Io> AsyncPollRead for AsyncBufReader<R> {\n\n fn async_poll_read(&mut self) -> Async<()> {\n\n self.inner.poll_read()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n use std::{cmp, io};\n\n use std::io::{BufRead, Cursor, Read};\n\n use futures::Async;\n\n\n\n pub struct MockAsyncReader {\n\n buf: Vec<u8>,\n", "file_path": "src/async/bufreader.rs", "rank": 22, "score": 61122.107397190266 }, { "content": "pub trait AsyncPacketState: Future {\n\n fn wants_read(&self) -> bool {\n\n false\n\n }\n\n\n\n fn on_read(&mut self, _msg: &[u8]) -> Result<(), Self::Error> {\n\n Ok(())\n\n }\n\n\n\n fn write_packet(&self) -> Option<PacketWriteRequest> {\n\n None\n\n }\n\n\n\n fn on_flush(&mut self) -> Result<(), Self::Error> {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 23, "score": 36591.65458493606 }, { "content": "pub trait Name: de::Deserialize + for<'a> From<&'a str> {\n\n}\n\n\n\nimpl <T> Name for T where T: de::Deserialize + for<'a> From<&'a str> {\n\n}\n\n\n", "file_path": "src/packet/decoder.rs", "rank": 24, "score": 32473.984288509662 }, { "content": " pub fn nb_write_exact(&mut self, buf: &[u8]) -> Poll<(), io::Error> {\n\n self.writer.nb_write_exact(buf)\n\n }\n\n\n\n #[inline]\n\n pub fn nb_flush(&mut self) -> Poll<(), io::Error> {\n\n self.writer.nb_flush()\n\n }\n\n\n\n #[inline]\n\n pub fn nb_flush_buf(&mut self) -> Poll<(), io::Error> {\n\n self.writer.nb_flush_buf()\n\n }\n\n}\n\n\n\nimpl <S: Read + Write> Read for BufferedIo<S> {\n\n #[inline]\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n self.reader.read(buf)\n\n }\n", "file_path": "src/async/io.rs", "rank": 25, "score": 28562.328355130125 }, { "content": "}\n\n\n\nimpl <S: Read + Write> Write for BufferedIo<S> {\n\n #[inline]\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n self.writer.write(buf)\n\n }\n\n\n\n #[inline]\n\n fn flush(&mut self) -> io::Result<()> {\n\n self.writer.flush()\n\n }\n\n}\n\n\n\nimpl <S: Read + Write> BufRead for BufferedIo<S> {\n\n #[inline]\n\n fn fill_buf(&mut self) -> io::Result<&[u8]> {\n\n self.reader.fill_buf()\n\n }\n\n\n\n #[inline]\n\n fn consume(&mut self, amt: usize) {\n\n self.reader.consume(amt)\n\n }\n\n}\n", "file_path": "src/async/io.rs", "rank": 26, "score": 28560.93330097849 }, { "content": "use super::bufreader::AsyncBufReader;\n\nuse super::bufwriter::AsyncBufWriter;\n\n\n\nuse std::io;\n\nuse std::io::{BufRead, Read, Write};\n\n\n\nuse futures::Poll;\n\nuse tokio_core::io::{Io, ReadHalf, WriteHalf};\n\n\n\nstatic DEFAULT_BUFSIZE: usize = 4096;\n\n\n\npub struct BufferedIo<S: Read + Write> {\n\n reader: AsyncBufReader<ReadHalf<S>>,\n\n writer: AsyncBufWriter<WriteHalf<S>>,\n\n}\n\n\n\nimpl <S: Read + Write + Io> BufferedIo<S> {\n\n pub fn new(stream: S) -> BufferedIo<S> {\n\n BufferedIo::with_capacity(DEFAULT_BUFSIZE, stream)\n\n }\n", "file_path": "src/async/io.rs", "rank": 27, "score": 28553.93695844317 }, { "content": "\n\n pub fn with_capacity(capacity: usize, stream: S) -> BufferedIo<S> {\n\n let (rd, wr) = stream.split();\n\n BufferedIo {\n\n reader: AsyncBufReader::with_capacity(capacity, rd),\n\n writer: AsyncBufWriter::with_capacity(capacity, wr)\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn nb_read_exact(&mut self, n: usize) -> Poll<&[u8], io::Error> {\n\n self.reader.nb_read_exact(n)\n\n }\n\n\n\n #[inline]\n\n pub fn nb_read_until(&mut self, byte: u8, limit: usize) -> Poll<&[u8], io::Error> {\n\n self.reader.nb_read_until(byte, limit)\n\n }\n\n\n\n #[inline]\n", "file_path": "src/async/io.rs", "rank": 28, "score": 28552.897690050264 }, { "content": " true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn write_all(&mut self, buf: &[u8]) {\n\n self.reserve(buf.len());\n\n self.write_buf(buf);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn async_buf_try_write_all() {\n\n let mut buf = AsyncBuf::with_capacity(8);\n", "file_path": "src/async/buf.rs", "rank": 29, "score": 28238.746829528107 }, { "content": " }\n\n\n\n #[inline]\n\n pub fn get_ref(&self) -> &[u8] {\n\n &self.buf[self.pos .. self.cap]\n\n }\n\n\n\n #[inline]\n\n pub fn get_mut(&mut self) -> &mut [u8] {\n\n &mut self.buf[self.cap ..]\n\n }\n\n\n\n #[inline]\n\n pub fn is_empty(&self) -> bool {\n\n self.data_size() == 0\n\n }\n\n\n\n #[inline]\n\n pub fn capacity(&self) -> usize {\n\n self.buf.len()\n", "file_path": "src/async/buf.rs", "rank": 30, "score": 28232.986265614007 }, { "content": "\n\n let data = &self.buf[self.pos .. self.pos + n];\n\n self.pos += n;\n\n if self.pos >= self.cap {\n\n self.pos = 0;\n\n self.cap = 0;\n\n }\n\n data\n\n }\n\n\n\n #[inline]\n\n fn write_buf(&mut self, buf: &[u8]) {\n\n let new_cap = self.cap + buf.len();\n\n self.buf[self.cap .. new_cap].copy_from_slice(buf);\n\n self.cap = new_cap;\n\n }\n\n\n\n pub fn try_write_all(&mut self, buf: &[u8]) -> bool {\n\n if self.cap + buf.len() <= self.buf.len() {\n\n self.write_buf(buf);\n", "file_path": "src/async/buf.rs", "rank": 31, "score": 28230.85469650034 }, { "content": " cap: 0\n\n }\n\n }\n\n\n\n pub fn try_reserve(&mut self, n: usize) -> bool {\n\n if self.cap + n <= self.buf.len() {\n\n return true;\n\n }\n\n\n\n let data_len = self.cap - self.pos;\n\n if data_len > self.pos || data_len + n > self.buf.len() {\n\n false\n\n } else {\n\n let (dst, src) = self.buf.split_at_mut(self.pos);\n\n dst[.. data_len].copy_from_slice(&src[.. data_len]);\n\n self.pos = 0;\n\n self.cap = data_len;\n\n true\n\n }\n\n }\n", "file_path": "src/async/buf.rs", "rank": 32, "score": 28229.41154422664 }, { "content": " }\n\n\n\n #[inline]\n\n pub fn fill(&mut self, n: usize) {\n\n self.cap = cmp::min(self.cap + n, self.buf.len());\n\n }\n\n\n\n #[inline]\n\n pub fn consume(&mut self, n: usize) {\n\n self.pos += n;\n\n if self.pos >= self.cap {\n\n self.pos = 0;\n\n self.cap = 0;\n\n }\n\n }\n\n\n\n pub fn consume_and_get(&mut self, n: usize) -> &[u8] {\n\n if self.pos + n > self.cap {\n\n panic!(\"consuming larger than actual data\");\n\n }\n", "file_path": "src/async/buf.rs", "rank": 33, "score": 28228.254811558298 }, { "content": " let result = serialize::<$tyname>(&$val);\n\n assert_eq!(Ok($bytes.to_vec()), result);\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub use super::decoder::{deserialize, de_inner, de_bytes, de_name_list};\n\n pub use super::encoder::{serialize, ser_inner, ser_bytes, ser_name_list};\n\n pub use serde::bytes::ByteBuf;\n\n pub use serde::{Deserialize, Deserializer, Serialize, Serializer};\n\n\n\n #[derive(Debug, PartialEq, Deserialize, Serialize)]\n\n pub struct TestStruct {\n\n pkt_len: u32,\n\n pad_len: u8\n\n }\n\n\n\n #[derive(Debug, PartialEq, Deserialize, Serialize)]\n\n pub struct OuterStruct {\n", "file_path": "src/packet/mod.rs", "rank": 34, "score": 28227.64756716773 }, { "content": "\n\n pub fn reserve(&mut self, n: usize) {\n\n if !self.try_reserve(n) {\n\n let target_cap = self.cap + n;\n\n let mut new_capacity = self.buf.len();\n\n while new_capacity < target_cap {\n\n new_capacity *= 2;\n\n }\n\n self.buf.resize(new_capacity, 0);\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn data_size(&self) -> usize {\n\n self.cap - self.pos\n\n }\n\n\n\n #[inline]\n\n pub fn reserve_size(&self) -> usize {\n\n self.buf.len() - self.cap\n", "file_path": "src/async/buf.rs", "rank": 35, "score": 28227.167833535328 }, { "content": "pub mod buf;\n\npub mod bufwriter;\n\npub mod bufreader;\n\n\n\npub use self::bufwriter::AsyncBufWriter;\n\npub use self::bufreader::AsyncBufReader;\n\n\n\npub static DEFAULT_BUFSIZE: usize = 0x8000;\n", "file_path": "src/async/mod.rs", "rank": 36, "score": 28226.38611030322 }, { "content": "use std::cmp;\n\n\n\nuse super::DEFAULT_BUFSIZE;\n\n\n\n#[derive(Debug)]\n\npub struct AsyncBuf {\n\n buf: Vec<u8>,\n\n pos: usize,\n\n cap: usize\n\n}\n\n\n\nimpl AsyncBuf {\n\n pub fn new() -> AsyncBuf {\n\n AsyncBuf::with_capacity(DEFAULT_BUFSIZE)\n\n }\n\n\n\n pub fn with_capacity(capacity: usize) -> AsyncBuf {\n\n AsyncBuf {\n\n buf: vec![0; capacity],\n\n pos: 0,\n", "file_path": "src/async/buf.rs", "rank": 37, "score": 28226.156477848526 }, { "content": " test_codec!(\n\n prim_string,\n\n \"test\".to_string(),\n\n &[0, 0, 0, 4, b't', b'e', b's', b't']\n\n );\n\n\n\n test_codec!(\n\n plain_struct,\n\n TestStruct { pkt_len: 0x010203, pad_len: 30 },\n\n &[0, 1, 2, 3, 30]\n\n );\n\n\n\n test_codec!(\n\n inner_struct,\n\n OuterStruct {\n\n data: b\"test\".to_vec(),\n\n inner: TestStruct { pkt_len: 0x010203, pad_len: 30 }\n\n },\n\n &[0, 0, 0, 4, b't', b'e', b's', b't', 0, 0, 0, 5, 0, 1, 2, 3, 30]\n\n );\n", "file_path": "src/packet/mod.rs", "rank": 38, "score": 28224.335235543425 }, { "content": " assert_eq!(true, buf.try_write_all(b\"Hello\"));\n\n assert_eq!(b\"Hello\", buf.get_ref());\n\n }\n\n\n\n #[test]\n\n fn async_buf_write_all() {\n\n let mut buf = AsyncBuf::with_capacity(8);\n\n assert_eq!(true, buf.try_write_all(b\"Hello\"));\n\n assert_eq!(false, buf.try_write_all(b\", world!\"));\n\n assert_eq!(b\"Hello\", buf.get_ref());\n\n buf.write_all(b\", world!\");\n\n assert_eq!(b\"Hello, world!\", buf.get_ref());\n\n }\n\n\n\n #[test]\n\n fn async_buf_try_reserve() {\n\n let mut buf = AsyncBuf::with_capacity(8);\n\n assert_eq!(0, buf.data_size());\n\n assert_eq!(true, buf.try_write_all(b\"Hello\"));\n\n assert_eq!(5, buf.data_size());\n", "file_path": "src/async/buf.rs", "rank": 39, "score": 28223.988832053037 }, { "content": " assert_eq!(b\"Hell\", buf.consume_and_get(4));\n\n assert_eq!(1, buf.data_size());\n\n assert_eq!(true, buf.try_reserve(7));\n\n assert_eq!(8, buf.capacity());\n\n assert_eq!(b\"o\", buf.get_ref());\n\n assert_eq!(7, buf.get_mut().len());\n\n }\n\n\n\n #[test]\n\n fn async_buf_reserve() {\n\n let mut buf = AsyncBuf::with_capacity(8);\n\n assert_eq!(true, buf.try_write_all(b\"Hello\"));\n\n assert_eq!(b\"He\", buf.consume_and_get(2));\n\n assert_eq!(false, buf.try_reserve(6));\n\n buf.reserve(6);\n\n assert_eq!(b\"llo\", buf.get_ref());\n\n assert!(buf.get_mut().len() >= 6);\n\n }\n\n\n\n #[test]\n\n fn async_buf_consume() {\n\n let mut buf = AsyncBuf::with_capacity(16);\n\n assert_eq!(true, buf.try_write_all(b\"Hello, world!\"));\n\n buf.consume(7);\n\n assert_eq!(b\"world!\", buf.get_ref());\n\n }\n\n}\n", "file_path": "src/async/buf.rs", "rank": 40, "score": 28223.660365728527 }, { "content": "pub mod decoder;\n\npub mod encoder;\n\npub mod types;\n\n\n\npub use self::decoder::{BinaryDecoder, deserialize, deserialize_msg};\n\npub use self::encoder::{BinaryEncoder, serialize, serialize_msg};\n\n\n\n#[cfg(test)]\n\nmod test {\n\n macro_rules! test_codec {\n\n ($mod_name:ident, $val:expr, $bytes:expr) => {\n\n mod $mod_name {\n\n use super::*;\n\n\n\n #[test]\n\n fn decode() {\n\n let result = deserialize($bytes);\n\n assert_eq!(Ok($val), result);\n\n }\n\n\n", "file_path": "src/packet/mod.rs", "rank": 41, "score": 28223.277447461795 }, { "content": " #[serde(deserialize_with = \"de_bytes\", serialize_with = \"ser_bytes\")]\n\n data: Vec<u8>,\n\n #[serde(deserialize_with = \"de_inner\", serialize_with = \"ser_inner\")]\n\n inner: TestStruct\n\n }\n\n\n\n #[derive(Debug, PartialEq, Deserialize, Serialize)]\n\n pub enum TestEnum {\n\n #[serde(rename = \"newtype\")]\n\n NewtypeVariant(u32),\n\n #[serde(rename = \"tuple\")]\n\n TupleVariant(String, String),\n\n #[serde(rename = \"struct\")]\n\n StructVariant { a: u32, b: String },\n\n #[serde(rename = \"bytes-struct\")]\n\n BytesStruct {\n\n #[serde(deserialize_with = \"de_bytes\", serialize_with = \"ser_bytes\")]\n\n a: Vec<u8>,\n\n b: bool\n\n }\n", "file_path": "src/packet/mod.rs", "rank": 42, "score": 28223.0918235562 }, { "content": " f: Vec<u8>\n\n }\n\n\n\n impl Serialize for NameEnum {\n\n fn serialize<S: Serializer>(&self, s: &mut S) -> Result<(), S::Error> {\n\n s.serialize_str(self.as_ref())\n\n }\n\n }\n\n\n\n test_codec!(prim_false, false, &[0]);\n\n test_codec!(prim_true, true, &[1]);\n\n test_codec!(prim_u8, 30u8, &[30]);\n\n test_codec!(prim_u32, 0x010203u32, &[0, 1, 2, 3]);\n\n\n\n test_codec!(\n\n bytebuf<ByteBuf>,\n\n b\"test\".to_vec().into(),\n\n &[0, 0, 0, 4, b't', b'e', b's', b't']\n\n );\n\n\n", "file_path": "src/packet/mod.rs", "rank": 43, "score": 28222.70306572006 }, { "content": " }\n\n\n\n #[derive(Debug, PartialEq, Deserialize, Serialize)]\n\n pub struct EnumWrapper {\n\n #[serde(deserialize_with = \"de_inner\", serialize_with = \"ser_inner\")]\n\n e: TestEnum,\n\n flag: bool\n\n }\n\n\n\n #[derive(Debug, PartialEq)]\n\n pub enum NameEnum {\n\n TestName,\n\n AnotherName,\n\n Unknown(String)\n\n }\n\n\n\n impl AsRef<str> for NameEnum {\n\n fn as_ref(&self) -> &str {\n\n match *self {\n\n NameEnum::TestName => \"test-name\",\n", "file_path": "src/packet/mod.rs", "rank": 44, "score": 28222.072140066353 }, { "content": " NameEnum::AnotherName => \"another-name\",\n\n NameEnum::Unknown(ref s) => s.as_ref()\n\n }\n\n }\n\n }\n\n\n\n impl <'r> From<&'r str> for NameEnum {\n\n fn from(s: &str) -> NameEnum {\n\n match s {\n\n \"test-name\" => NameEnum::TestName,\n\n \"another-name\" => NameEnum::AnotherName,\n\n _ => NameEnum::Unknown(s.to_string())\n\n }\n\n }\n\n }\n\n\n\n impl Deserialize for NameEnum {\n\n fn deserialize<D: Deserializer>(d: &mut D) -> Result<NameEnum, D::Error> {\n\n pub use serde::de::Visitor;\n\n\n", "file_path": "src/packet/mod.rs", "rank": 45, "score": 28219.520118554025 }, { "content": " #[test]\n\n fn encode() {\n\n let result = serialize(&$val);\n\n assert_eq!(Ok($bytes.to_vec()), result);\n\n }\n\n }\n\n };\n\n\n\n ($mod_name:ident<$tyname:ident>, $val:expr, $bytes:expr) => {\n\n mod $mod_name {\n\n use super::*;\n\n\n\n #[test]\n\n fn decode() {\n\n let result = deserialize::<$tyname>($bytes);\n\n assert_eq!(Ok($val), result);\n\n }\n\n\n\n #[test]\n\n fn encode() {\n", "file_path": "src/packet/mod.rs", "rank": 46, "score": 28218.84687040784 }, { "content": " 0, 0, 1, 2,\n\n 0, 0, 0, 1, b'x',\n\n ]\n\n );\n\n\n\n test_codec!(\n\n enum_bytes_in_struct,\n\n TestEnum::BytesStruct { a: b\"test\".to_vec(), b: true },\n\n b\"\\x00\\x00\\x00\\x0cbytes-struct\\x00\\x00\\x00\\x04test\\x01\"\n\n );\n\n\n\n test_codec!(\n\n wrapped_enum,\n\n EnumWrapper {\n\n e: TestEnum::StructVariant { a: 0x0102, b: \"x\".to_string() },\n\n flag: true\n\n },\n\n &[\n\n 0, 0, 0, 19,\n\n 0, 0, 0, 6, b's', b't', b'r', b'u', b'c', b't',\n", "file_path": "src/packet/mod.rs", "rank": 47, "score": 28218.201780859396 }, { "content": " struct IntoVisitor;\n\n\n\n impl Visitor for IntoVisitor {\n\n type Value = NameEnum;\n\n\n\n fn visit_str<E>(&mut self, v: &str) -> Result<NameEnum, E>\n\n {\n\n Ok(v.into())\n\n }\n\n }\n\n\n\n d.deserialize_str(IntoVisitor)\n\n }\n\n }\n\n\n\n #[derive(Debug, PartialEq, Deserialize, Serialize)]\n\n pub struct NameListWrapper {\n\n #[serde(deserialize_with = \"de_name_list\", serialize_with = \"ser_name_list\")]\n\n e: Vec<NameEnum>,\n\n #[serde(deserialize_with = \"de_bytes\", serialize_with = \"ser_bytes\")]\n", "file_path": "src/packet/mod.rs", "rank": 48, "score": 28215.753597913303 }, { "content": "\n\n test_codec!(\n\n enum_newtype,\n\n TestEnum::NewtypeVariant(0x0102),\n\n &[0, 0, 0, 7, b'n', b'e', b'w', b't', b'y', b'p', b'e', 0, 0, 1, 2]\n\n );\n\n\n\n test_codec!(\n\n enum_tuple,\n\n TestEnum::TupleVariant(\"a\".to_string(), \"bc\".to_string()),\n\n &[0, 0, 0, 5, b't', b'u', b'p', b'l', b'e',\n\n 0, 0, 0, 1, b'a',\n\n 0, 0, 0, 2, b'b', b'c'\n\n ]\n\n );\n\n\n\n test_codec!(\n\n enum_struct,\n\n TestEnum::StructVariant { a: 0x0102, b: \"x\".to_string() },\n\n &[0, 0, 0, 6, b's', b't', b'r', b'u', b'c', b't',\n", "file_path": "src/packet/mod.rs", "rank": 49, "score": 28215.218121923237 }, { "content": " 0, 0, 1, 2,\n\n 0, 0, 0, 1, b'x',\n\n 1\n\n ]\n\n );\n\n\n\n test_codec!(\n\n empty_name_list,\n\n NameListWrapper {\n\n e: vec![],\n\n f: b\"test-name\".to_vec()\n\n },\n\n b\"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x09test-name\"\n\n );\n\n\n\n test_codec!(\n\n name_list,\n\n NameListWrapper {\n\n e: vec![NameEnum::TestName, NameEnum::Unknown(\"unknown\".to_string()), NameEnum::AnotherName],\n\n f: b\"test-name\".to_vec()\n\n },\n\n b\"\\x00\\x00\\x00\\x1Etest-name,unknown,another-name\\x00\\x00\\x00\\x09test-name\"\n\n );\n\n}\n", "file_path": "src/packet/mod.rs", "rank": 50, "score": 28211.50302555343 }, { "content": "enum PacketWriteState {\n\n Idle,\n\n WritePayload(PacketWriteRequest, u32, u8),\n\n Flush\n\n}\n\n\n\nimpl PacketWriteState {\n\n fn is_idle(&self) -> bool {\n\n match *self {\n\n PacketWriteState::Idle => true,\n\n _ => false\n\n }\n\n }\n\n}\n\n\n\npub struct ClearTransport<R: Read, W: Write, RNG, T>(Option<UnencryptedTransport<R, W, RNG, T>>);\n\n\n\nimpl <R: Read, W: Write, RNG, T> ClearTransport<R, W, RNG, T> {\n\n pub fn new(rd: AsyncBufReader<R>,\n\n wr: AsyncBufWriter<W>,\n\n rng: RNG,\n\n st: T) -> ClearTransport<R, W, RNG, T>\n\n {\n\n ClearTransport(Some(UnencryptedTransport::new(rd, wr, rng, st)))\n\n }\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 51, "score": 25979.962363697803 }, { "content": "}\n\n\n\nimpl <R:Read> BufRead for AsyncBufReader<R> {\n\n #[inline]\n\n fn fill_buf(&mut self) -> io::Result<&[u8]> {\n\n let rsize = try!(self.inner.read(self.buf.get_mut()));\n\n self.buf.fill(rsize);\n\n Ok(self.buf.get_ref())\n\n }\n\n\n\n #[inline]\n\n fn consume(&mut self, amt: usize) {\n\n self.buf.consume(amt)\n\n }\n\n}\n\n\n\nimpl <R:Read> Read for AsyncBufReader<R> {\n\n #[inline]\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n if self.buf.capacity() < buf.len() && self.buf.is_empty() {\n", "file_path": "src/async/bufreader.rs", "rank": 60, "score": 24.31953367774141 }, { "content": " }\n\n } else {\n\n return Ok(Async::NotReady)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl <R, W, RNG, T, V, E> Future for ClearTransport<R, W, RNG, T>\n\n where R: Read, W: Write, RNG: Rng, T: AsyncPacketState + Future<Item=V, Error=E>, E: TransportError\n\n{\n\n type Item = (AsyncBufReader<R>, AsyncBufWriter<W>, V);\n\n type Error = E;\n\n\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n\n let retval = match *self {\n\n ClearTransport(Some(ref mut inner)) => match try!(inner.poll()) {\n\n Async::NotReady => return Ok(Async::NotReady),\n\n Async::Ready(x) => x\n\n },\n", "file_path": "src/transport.rs", "rank": 61, "score": 24.00999338003777 }, { "content": " AsyncBufReader {\n\n inner: inner,\n\n buf: AsyncBuf::with_capacity(capacity)\n\n }\n\n }\n\n}\n\n\n\nimpl <R: Read> AsyncBufReader<R> {\n\n fn fill_buf_no_eof(&mut self) -> io::Result<()> {\n\n let rsize = try!(self.inner.read(self.buf.get_mut()));\n\n if rsize == 0 {\n\n return Err(io::Error::new(io::ErrorKind::UnexpectedEof, \"Unexpected EOF\"));\n\n }\n\n self.buf.fill(rsize);\n\n Ok(())\n\n }\n\n\n\n pub fn nb_read_exact(&mut self, n: usize) -> Poll<&[u8], io::Error> {\n\n if self.buf.data_size() >= n {\n\n Ok(Async::Ready(self.buf.consume_and_get(n)))\n", "file_path": "src/async/bufreader.rs", "rank": 62, "score": 22.446090461712306 }, { "content": "use super::buf::AsyncBuf;\n\nuse super::DEFAULT_BUFSIZE;\n\n\n\nuse std::{cmp, io};\n\nuse std::io::{BufRead, Read};\n\n\n\nuse futures::{Async, Poll};\n\nuse tokio_core::io::Io;\n\n\n\npub struct AsyncBufReader<R> {\n\n inner: R,\n\n buf: AsyncBuf\n\n}\n\n\n\nimpl <R> AsyncBufReader<R> {\n\n pub fn new(inner: R) -> AsyncBufReader<R> {\n\n AsyncBufReader::with_capacity(DEFAULT_BUFSIZE, inner)\n\n }\n\n\n\n pub fn with_capacity(capacity: usize, inner: R) -> AsyncBufReader<R> {\n", "file_path": "src/async/bufreader.rs", "rank": 63, "score": 22.43970935992061 }, { "content": " pos: usize,\n\n }\n\n\n\n impl Read for MockAsyncReader {\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n let datasize = cmp::min(buf.len(), self.buf.len() - self.pos);\n\n if datasize == 0 {\n\n return Err(io::Error::new(io::ErrorKind::WouldBlock, \"data not ready\"));\n\n }\n\n let new_pos = self.pos + datasize;\n\n buf[.. datasize].copy_from_slice(&self.buf[self.pos .. new_pos]);\n\n self.pos = new_pos;\n\n Ok(datasize)\n\n }\n\n }\n\n\n\n fn mock_async_reader(data: &[u8]) -> MockAsyncReader {\n\n MockAsyncReader {\n\n buf: data.to_vec(),\n\n pos: 0\n", "file_path": "src/async/bufreader.rs", "rank": 64, "score": 20.30310983673193 }, { "content": " } else {\n\n self.buf.reserve(n);\n\n if let Err(e) = self.fill_buf_no_eof() {\n\n if let io::ErrorKind::WouldBlock = e.kind() {\n\n return Ok(Async::NotReady);\n\n } else {\n\n return Err(e)\n\n }\n\n }\n\n\n\n if self.buf.data_size() < n {\n\n Ok(Async::NotReady)\n\n } else {\n\n Ok(Async::Ready(self.buf.consume_and_get(n)))\n\n }\n\n }\n\n }\n\n\n\n pub fn nb_read_until(&mut self, byte: u8, limit: usize) -> Poll<&[u8], io::Error> {\n\n if let Some(idx) = self.buf.get_ref().iter().position(|&c| c == byte) {\n", "file_path": "src/async/bufreader.rs", "rank": 66, "score": 19.95791539569425 }, { "content": " return Ok(callback_called);\n\n }\n\n },\n\n PacketWriteState::WritePayload(ref req, pkt_len, pad_len) => {\n\n if pkt_len as usize != req.payload.len() + 1 + pad_len as usize {\n\n return Err(T::Error::panic(\"pkt_len does not match\"));\n\n }\n\n\n\n let async_res = try!(self.wr.nb_write(pkt_len as usize + 4, |buf| {\n\n buf[0] = ((pkt_len >> 24) & 0xff) as u8;\n\n buf[1] = ((pkt_len >> 16) & 0xff) as u8;\n\n buf[2] = ((pkt_len >> 8) & 0xff) as u8;\n\n buf[3] = (pkt_len & 0xff) as u8;\n\n buf[4] = pad_len;\n\n buf[5 .. 5 + req.payload.len()].copy_from_slice(&req.payload);\n\n\n\n let mut rng = thread_rng();\n\n rng.fill_bytes(&mut buf[5 + req.payload.len() ..]);\n\n }));\n\n\n", "file_path": "src/transport.rs", "rank": 67, "score": 19.109484542034913 }, { "content": " _ => panic!(\"Called the same ClearTransport twice\")\n\n };\n\n\n\n let inner = self.0.take().unwrap();\n\n Ok(Async::Ready((inner.rd, inner.wr, retval)))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use rand::thread_rng;\n\n\n\n #[test]\n\n fn test_compute_pad_len() {\n\n let mut rng = thread_rng();\n\n for payload_len in 1 .. 257 {\n\n if let Ok((pkt_len, pad_len)) = compute_pad_len(payload_len, 0, &mut rng) {\n\n assert!(pkt_len >= 12);\n\n assert_eq!(pkt_len % 8, 4);\n\n assert_eq!(pkt_len as usize, pad_len as usize + payload_len + 1);\n\n } else {\n\n panic!(\"compute_pad_len failed at payload_len = {}\", payload_len);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/transport.rs", "rank": 69, "score": 18.51720948082573 }, { "content": " }\n\n}\n\n\n\nimpl <R, W, RNG, T, V, E> Future for UnencryptedTransport<R, W, RNG, T>\n\n where R: Read, W: Write, RNG: Rng, T: AsyncPacketState + Future<Item=V, Error=E>, E: TransportError\n\n{\n\n type Item = V;\n\n type Error = E;\n\n\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n\n loop {\n\n let w = try!(self.try_write());\n\n let r = try!(self.try_read());\n\n\n\n if self.wr_st.is_idle() && self.rd_st.is_idle() {\n\n match try!(self.st.poll()) {\n\n Async::Ready(x) => return Ok(Async::Ready(x)),\n\n Async::NotReady => if !w && !r {\n\n return Ok(Async::NotReady);\n\n }\n", "file_path": "src/transport.rs", "rank": 71, "score": 17.8884397731551 }, { "content": " let len = try!(self.parse_u32()) as usize;\n\n if self.buf.len() < self.pos + len {\n\n Err(DecoderError::UnexpectedEOF)\n\n } else {\n\n let old_pos = self.pos;\n\n self.pos += len;\n\n Ok(&self.buf[old_pos .. self.pos])\n\n }\n\n }\n\n\n\n #[inline]\n\n fn is_end_of_data(self) -> bool {\n\n self.buf.len() == self.pos\n\n }\n\n}\n\n\n", "file_path": "src/packet/decoder.rs", "rank": 72, "score": 17.850054752976835 }, { "content": "use std::error::Error;\n\nuse std::fmt;\n\nuse std::fmt::Write;\n\n\n\nuse serde::ser;\n\n\n\npub struct BinaryEncoder {\n\n buf: Vec<u8>\n\n}\n\n\n\nimpl BinaryEncoder {\n\n pub fn new() -> BinaryEncoder {\n\n BinaryEncoder { buf: Vec::new() }\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum EncoderError {\n\n UnsupportedType(&'static str),\n\n DataTooLarge(usize),\n", "file_path": "src/packet/encoder.rs", "rank": 73, "score": 17.827566075383846 }, { "content": " DecoderError::Serde(ref e) => write!(f, \"Serde: {}\", e)\n\n }\n\n }\n\n}\n\n\n\nimpl From<str::Utf8Error> for DecoderError {\n\n fn from(e: str::Utf8Error) -> DecoderError {\n\n DecoderError::Utf8Error(e)\n\n }\n\n}\n\n\n\nimpl<'a> BinaryDecoder<'a> {\n\n pub fn new<'n>(data: &'n [u8]) -> BinaryDecoder<'n> {\n\n BinaryDecoder { buf: data, pos: 0 }\n\n }\n\n\n\n fn parse_u32(&mut self) -> Result<u32, DecoderError> {\n\n if self.buf.len() < self.pos + 4 {\n\n Err(DecoderError::UnexpectedEOF)\n\n } else {\n", "file_path": "src/packet/decoder.rs", "rank": 74, "score": 17.525910627869052 }, { "content": " rng.gen_range(0, (max_pad_len - min_pad_len) / min_unit + 1) * min_unit + min_pad_len;\n\n let pkt_len = try_add!(pad_len, except_pad) - 4;\n\n Ok((pkt_len as u32, pad_len as u8))\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n\nimpl <R, W, RNG, T> UnencryptedTransport<R, W, RNG, T>\n\n where R: Read, W: Write, RNG: Rng, T: AsyncPacketState, T::Error: TransportError\n\n{\n\n fn try_write(&mut self) -> Result<bool, T::Error> {\n\n let mut callback_called = false;\n\n loop {\n\n let next_state = match self.wr_st {\n\n PacketWriteState::Idle => {\n\n if let Some(req) = self.st.write_packet() {\n\n let (pkt_len, pad_len) = try!(compute_pad_len(req.payload.len(), 0, &mut self.rng));\n\n PacketWriteState::WritePayload(req, pkt_len, pad_len)\n\n } else {\n", "file_path": "src/transport.rs", "rank": 75, "score": 16.775041215929416 }, { "content": " #[inline]\n\n fn serialize_bytes(&mut self, v: &[u8]) -> Result<(), EncoderError> {\n\n if v.len() > 0xffffffff {\n\n return Err(EncoderError::DataTooLarge(v.len()))\n\n }\n\n\n\n try!(self.serialize_u32(v.len() as u32));\n\n self.buf.extend_from_slice(v);\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn serialize_newtype_variant<T>(&mut self, \n\n name: &'static str,\n\n index: usize,\n\n variant_name: &'static str,\n\n value: T)\n\n -> Result<(), EncoderError>\n\n where T: ser::Serialize\n\n {\n", "file_path": "src/packet/encoder.rs", "rank": 76, "score": 16.638025757079017 }, { "content": "use async::bufreader::AsyncBufReader;\n\nuse async::bufwriter::AsyncBufWriter;\n\n\n\nuse std::{cmp, io};\n\nuse std::io::{Read, Write};\n\n\n\nuse futures::{Async, Future, Poll};\n\nuse rand::{Rng, thread_rng};\n\n\n", "file_path": "src/transport.rs", "rank": 78, "score": 16.29201817418275 }, { "content": " }\n\n\n\n fn write_packet(&self) -> Option<PacketWriteRequest> {\n\n match self.st {\n\n ClientKex::AlgorithmExchange(ref st) => st.write_packet(),\n\n ClientKex::KeyExchange(ref st) => st.write_packet(),\n\n ClientKex::Agreed(ref st) => st.write_packet(),\n\n }\n\n }\n\n\n\n fn on_flush(&mut self) -> Result<(), Self::Error> {\n\n match self.st {\n\n ClientKex::AlgorithmExchange(ref mut st) => st.on_flush(),\n\n ClientKex::KeyExchange(ref mut st) => st.on_flush(),\n\n ClientKex::Agreed(ref mut st) => st.on_flush(),\n\n }\n\n }\n\n}\n\n\n\npub struct AlgorithmExchangeState {\n\n v_c: String,\n\n v_s: String,\n\n i_c: Vec<u8>,\n\n written: bool,\n\n res: Option<(NegotiatedAlgorithm, Context)>\n\n}\n\n\n", "file_path": "src/handshake.rs", "rank": 79, "score": 15.740280495229527 }, { "content": " fn poll(&mut self) -> Poll<SecureContext, HandshakeError> {\n\n if self.new_key_sent && self.new_key_received {\n\n match self.ctx.take() {\n\n Some(ctx) => Ok(Async::Ready(ctx)),\n\n None => panic!(\"Called Agreed::poll() twice\")\n\n }\n\n } else {\n\n Ok(Async::NotReady)\n\n }\n\n }\n\n}\n\n\n\nimpl AsyncPacketState for Agreed {\n\n fn wants_read(&self) -> bool {\n\n !self.new_key_received\n\n }\n\n\n\n fn on_read(&mut self, buf: &[u8]) -> Result<(), HandshakeError> {\n\n if buf == &[SSH_MSG_NEWKEYS] {\n\n self.new_key_received = true;\n", "file_path": "src/handshake.rs", "rank": 80, "score": 15.57331830554082 }, { "content": "use async::bufreader::AsyncBufReader;\n\nuse async::bufwriter::AsyncBufWriter;\n\nuse packet::types::*;\n\nuse packet::{deserialize, serialize, serialize_msg};\n\nuse transport::{AsyncPacketState, ClearTransport, PacketWriteRequest, TransportError, hton};\n\n\n\nuse std::{fmt, io, str};\n\nuse std::convert::TryFrom;\n\nuse std::io::{Read, Write};\n\nuse futures::{Async, Future, Poll};\n\nuse rand::{OsRng, Rng};\n\nuse ring::{agreement, digest, rand, signature};\n\nuse ring::digest::Context;\n\nuse tokio_core::io::{flush, read_until, write_all};\n\nuse untrusted;\n\n\n\nuse ::{SSH_MSG_KEXINIT, SSH_MSG_NEWKEYS, SSH_MSG_KEXDH_INIT, SSH_MSG_KEXDH_REPLY};\n\n\n\n#[derive(Debug)]\n\npub enum HandshakeError {\n", "file_path": "src/handshake.rs", "rank": 81, "score": 15.366462035995937 }, { "content": " Ok(())\n\n } else {\n\n Err(HandshakeError::ExpectedNewKeys)\n\n }\n\n }\n\n\n\n fn write_packet(&self) -> Option<PacketWriteRequest> {\n\n if self.new_key_sent {\n\n None\n\n } else {\n\n Some(PacketWriteRequest { payload: vec![SSH_MSG_NEWKEYS], flush: true })\n\n }\n\n }\n\n\n\n fn on_flush(&mut self) -> Result<(), Self::Error> {\n\n self.new_key_sent = true;\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/handshake.rs", "rank": 82, "score": 14.776779075411644 }, { "content": "pub struct KeyContext {\n\n v_c: String,\n\n v_s: String,\n\n i_c: Vec<u8>,\n\n i_s: Vec<u8>,\n\n k_s: Vec<u8>,\n\n e: Vec<u8>,\n\n f: Vec<u8>,\n\n k: Vec<u8>\n\n}\n\n\n\npub enum ClientHandshakeState {\n\n VersionExchange { v_c: String, v_s: Option<String>, end_of_write: bool },\n\n AlgorithmExchange { i_c: String, i_s: Option<String>, end_of_write: bool },\n\n KeyWrite { e: Vec<u8>, flushing: bool },\n\n KexReply\n\n}\n", "file_path": "src/context.rs", "rank": 83, "score": 14.434539594842985 }, { "content": "}\n\n\n\npub struct KeyExchangeState {\n\n neg: NegotiatedAlgorithm,\n\n keyshare: Option<(Context, agreement::EphemeralPrivateKey)>,\n\n e: Vec<u8>,\n\n written: bool,\n\n res: Option<Vec<u8>>\n\n}\n\n\n\nimpl Future for KeyExchangeState {\n\n type Item = Agreed;\n\n type Error = HandshakeError;\n\n\n\n fn poll(&mut self) -> Poll<Agreed, HandshakeError> {\n\n if !self.written {\n\n return Ok(Async::NotReady);\n\n }\n\n\n\n match self.res.take() {\n", "file_path": "src/handshake.rs", "rank": 84, "score": 14.156225575371039 }, { "content": " };\n\n}\n\n\n\nimpl ser::Serializer for BinaryEncoder {\n\n type Error = EncoderError;\n\n type SeqState = ();\n\n type TupleState = ();\n\n type TupleStructState = ();\n\n type TupleVariantState = ();\n\n type MapState = ();\n\n type StructState = ();\n\n type StructVariantState = ();\n\n\n\n #[inline]\n\n fn serialize_bool(&mut self, v: bool) -> Result<(), EncoderError> {\n\n let b = if v { 1 } else { 0 };\n\n self.buf.push(b);\n\n Ok(())\n\n }\n\n\n", "file_path": "src/packet/encoder.rs", "rank": 85, "score": 13.897036627678107 }, { "content": " return self.inner.read(buf);\n\n }\n\n\n\n if self.buf.data_size() < buf.len() {\n\n let rsize = try!(self.inner.read(self.buf.get_mut()));\n\n self.buf.fill(rsize);\n\n }\n\n\n\n let rsize = cmp::min(self.buf.data_size(), buf.len());\n\n buf[.. rsize].copy_from_slice(&self.buf.get_ref()[.. rsize]);\n\n self.buf.consume(rsize);\n\n Ok(rsize)\n\n }\n\n}\n\n\n", "file_path": "src/async/bufreader.rs", "rank": 86, "score": 13.775837949713688 }, { "content": "#![feature(proc_macro)]\n\n#![feature(try_from)]\n\n\n\nextern crate futures;\n\nextern crate rand;\n\nextern crate ring;\n\n#[macro_use]\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate serde_derive;\n\n#[macro_use]\n\nextern crate tokio_core;\n\nextern crate untrusted;\n\n\n\n#[cfg(test)]\n\nextern crate rustc_serialize;\n\n\n\npub mod async;\n\npub mod handshake;\n\npub mod packet;\n\npub mod transport;\n\n\n\npub const SSH_MSG_KEXINIT: u8 = 20;\n\npub const SSH_MSG_NEWKEYS: u8 = 21;\n\npub const SSH_MSG_KEXDH_INIT: u8 = 30;\n\npub const SSH_MSG_KEXDH_REPLY: u8 = 31;\n", "file_path": "src/lib.rs", "rank": 87, "score": 13.590464826988455 }, { "content": "use std::{fmt, str};\n\nuse std::error::Error;\n\nuse std::marker::PhantomData;\n\n\n\nuse serde::de;\n\n\n\npub struct BinaryDecoder<'a> {\n\n buf: &'a [u8],\n\n pos: usize\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum DecoderError {\n\n UnsupportedType(&'static str),\n\n UnexpectedEOF,\n\n TrailingData,\n\n NonBoolean,\n\n Utf8Error(str::Utf8Error),\n\n Serde(de::value::Error)\n\n}\n", "file_path": "src/packet/decoder.rs", "rank": 88, "score": 13.558393146598888 }, { "content": " })\n\n }\n\n }\n\n\n\n fn on_flush(&mut self) -> Result<(), HandshakeError> {\n\n self.written = true;\n\n Ok(())\n\n }\n\n}\n\n\n\npub struct Agreed {\n\n ctx: Option<SecureContext>,\n\n new_key_received: bool,\n\n new_key_sent: bool\n\n}\n\n\n\nimpl Future for Agreed {\n\n type Item = SecureContext;\n\n type Error = HandshakeError;\n\n\n", "file_path": "src/handshake.rs", "rank": 89, "score": 13.50057905452053 }, { "content": " }\n\n\n\n fn deserialize_u8<V>(&mut self, mut visitor: V) -> Result<V::Value, DecoderError>\n\n where V: de::Visitor\n\n {\n\n visitor.visit_u8(try!(self.parse_u8()))\n\n }\n\n\n\n fn deserialize_bool<V>(&mut self, mut visitor: V) -> Result<V::Value, DecoderError>\n\n where V: de::Visitor\n\n {\n\n match try!(self.parse_u8()) {\n\n 0 => visitor.visit_bool(false),\n\n 1 => visitor.visit_bool(true),\n\n _ => Err(DecoderError::NonBoolean)\n\n }\n\n }\n\n\n\n fn deserialize_str<V>(&mut self, mut visitor: V) -> Result<V::Value, DecoderError>\n\n where V: de::Visitor\n", "file_path": "src/packet/decoder.rs", "rank": 91, "score": 13.23525831790856 }, { "content": "\n\n #[inline]\n\n fn serialize_tuple_end(&mut self, _st: ()) -> Result<(), EncoderError> {\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn serialize_tuple_struct(&mut self,\n\n _name: &'static str,\n\n len: usize)\n\n -> Result<(), EncoderError>\n\n {\n\n self.serialize_tuple(len)\n\n }\n\n\n\n #[inline]\n\n fn serialize_tuple_struct_elt<T>(&mut self, st: &mut (), elt: T)\n\n -> Result<(), EncoderError>\n\n where T: ser::Serialize\n\n {\n", "file_path": "src/packet/encoder.rs", "rank": 92, "score": 13.159276191679414 }, { "content": "\n\n self.wr_st = next_state;\n\n }\n\n }\n\n\n\n fn try_read(&mut self) -> Result<bool, T::Error> {\n\n let mut callback_called = false;\n\n\n\n loop {\n\n let next_state = match self.rd_st {\n\n PacketReadState::Idle => {\n\n if !self.st.wants_read() {\n\n return Ok(callback_called);\n\n }\n\n\n\n if let Async::Ready(buf) = try!(self.rd.nb_read_exact(5)) {\n\n let pkt_len = ntoh(&buf[.. 4]);\n\n let pad_len = buf[4];\n\n if pkt_len < 12 || pkt_len < (pad_len as u32) + 1 {\n\n return Err(T::Error::invalid_header());\n", "file_path": "src/transport.rs", "rank": 93, "score": 13.070549504491385 }, { "content": " impl_error!(serialize_char(char), \"char\");\n\n impl_error!(serialize_unit(), \"unit\");\n\n impl_error!(serialize_unit_struct(&'static str), \"unit_struct\");\n\n impl_error!(serialize_unit_variant(&'static str, usize, &'static str), \"unit_variant\");\n\n impl_error!(serialize_newtype_struct<T>(&'static str, T), \"newtype_struct\");\n\n impl_error!(serialize_none(), \"none\");\n\n impl_error!(serialize_some<T>(T), \"some\");\n\n impl_error!(serialize_seq(Option<usize>), \"seq\");\n\n impl_error!(serialize_seq_elt<T>(&mut Self::SeqState, T), \"seq_elt\");\n\n impl_error!(serialize_seq_end(Self::SeqState), \"seq_end\");\n\n impl_error!(serialize_seq_fixed_size(usize) -> Self::SeqState, \"seq_fixed_size\");\n\n impl_error!(serialize_map(Option<usize>) -> Self::MapState, \"map\");\n\n impl_error!(serialize_map_key<T>(&mut Self::MapState, T), \"map_key\");\n\n impl_error!(serialize_map_value<T>(&mut Self::MapState, T), \"map_value\");\n\n impl_error!(serialize_map_end(Self::MapState), \"map_end\");\n\n}\n\n\n", "file_path": "src/packet/encoder.rs", "rank": 95, "score": 12.778149450429394 }, { "content": " let v = ((self.buf[self.pos] as u32) << 24)\n\n + ((self.buf[self.pos+1] as u32) << 16)\n\n + ((self.buf[self.pos+2] as u32) << 8)\n\n + self.buf[self.pos+3] as u32;\n\n self.pos += 4;\n\n Ok(v)\n\n }\n\n }\n\n\n\n fn parse_u8(&mut self) -> Result<u8, DecoderError> {\n\n if self.buf.len() < self.pos + 1 {\n\n Err(DecoderError::UnexpectedEOF)\n\n } else {\n\n let v = self.buf[self.pos];\n\n self.pos += 1;\n\n Ok(v)\n\n }\n\n }\n\n\n\n fn parse_bytes(&mut self) -> Result<&[u8], DecoderError> {\n", "file_path": "src/packet/decoder.rs", "rank": 96, "score": 12.704077901757508 }, { "content": " #[inline]\n\n fn serialize_u8(&mut self, v: u8) -> Result<(), EncoderError> {\n\n self.buf.push(v);\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn serialize_u32(&mut self, v: u32) -> Result<(), EncoderError> {\n\n self.buf.push((v >> 24) as u8);\n\n self.buf.push((v >> 16) as u8);\n\n self.buf.push((v >> 8) as u8);\n\n self.buf.push(v as u8);\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn serialize_str(&mut self, v: &str) -> Result<(), EncoderError> {\n\n self.serialize_bytes(v.as_ref())\n\n }\n\n\n", "file_path": "src/packet/encoder.rs", "rank": 97, "score": 12.607339256964565 }, { "content": "\n\n #[inline]\n\n fn serialize_struct_variant(&mut self,\n\n _name: &'static str,\n\n _index: usize,\n\n variant_name: &'static str,\n\n _len: usize)\n\n -> Result<(), EncoderError>\n\n {\n\n self.serialize_bytes(variant_name.as_ref())\n\n }\n\n\n\n #[inline]\n\n fn serialize_struct_variant_elt<T>(&mut self,\n\n _st: &mut (),\n\n _key: &'static str,\n\n elt: T)\n\n -> Result<(), EncoderError>\n\n where T: ser::Serialize\n\n {\n", "file_path": "src/packet/encoder.rs", "rank": 98, "score": 12.41303133678688 }, { "content": " Serde(String)\n\n}\n\n\n\nimpl fmt::Display for EncoderError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n EncoderError::UnsupportedType(ref e) => write!(f, \"UnsupportedType({})\", e),\n\n EncoderError::DataTooLarge(n) => write!(f, \"DataTooLarge({})\", n),\n\n EncoderError::Serde(ref e) => write!(f, \"Serde({})\", e),\n\n }\n\n }\n\n}\n\n\n\nimpl Error for EncoderError {\n\n fn description(&self) -> &str {\n\n match *self {\n\n EncoderError::UnsupportedType(_) => \"UnsupportedType\",\n\n EncoderError::DataTooLarge(_) => \"DataTooLarge\",\n\n EncoderError::Serde(ref e) => e.as_ref(),\n\n }\n", "file_path": "src/packet/encoder.rs", "rank": 99, "score": 12.037259675748977 } ]
Rust
src/tokenizer.rs
crowlKats/rust-urlpattern
1dc0054b693ec652c8e4702a0edffc243ef5cd2c
use derive_more::Display; use crate::error::TokenizerError; use crate::Error; #[derive(Debug, Display, Clone, Eq, PartialEq)] pub enum TokenType { Open, Close, Regexp, Name, Char, EscapedChar, OtherModifier, Asterisk, End, InvalidChar, } #[derive(Debug, Clone)] pub struct Token { pub kind: TokenType, pub index: usize, pub value: String, } #[derive(Debug, Eq, PartialEq)] pub enum TokenizePolicy { Strict, Lenient, } struct Tokenizer { input: Vec<char>, policy: TokenizePolicy, token_list: Vec<Token>, index: usize, next_index: usize, code_point: Option<char>, } impl Tokenizer { #[inline] fn get_next_codepoint(&mut self) { self.code_point = Some(self.input[self.next_index]); self.next_index += 1; } #[inline] fn add_token_with_default_pos_and_len(&mut self, kind: TokenType) { self.add_token_with_default_len(kind, self.next_index, self.index); } #[inline] fn add_token_with_default_len( &mut self, kind: TokenType, next_pos: usize, value_pos: usize, ) { self.add_token(kind, next_pos, value_pos, next_pos - value_pos); } #[inline] fn add_token( &mut self, kind: TokenType, next_pos: usize, value_pos: usize, value_len: usize, ) { let range = value_pos..(value_pos + value_len); let value = self.input[range].iter().collect::<String>(); self.token_list.push(Token { kind, index: self.index, value, }); self.index = next_pos; } fn process_tokenizing_error( &mut self, next_pos: usize, value_pos: usize, error: TokenizerError, ) -> Result<(), Error> { if self.policy == TokenizePolicy::Strict { Err(Error::Tokenizer(error, value_pos)) } else { self.add_token_with_default_len( TokenType::InvalidChar, next_pos, value_pos, ); Ok(()) } } #[inline] fn seek_and_get_next_codepoint(&mut self, index: usize) { self.next_index = index; self.get_next_codepoint(); } } pub fn tokenize( input: &str, policy: TokenizePolicy, ) -> Result<Vec<Token>, Error> { let mut tokenizer = Tokenizer { input: input.chars().collect::<Vec<char>>(), policy, token_list: vec![], index: 0, next_index: 0, code_point: None, }; while tokenizer.index < tokenizer.input.len() { tokenizer.next_index = tokenizer.index; tokenizer.get_next_codepoint(); if tokenizer.code_point == Some('*') { tokenizer.add_token_with_default_pos_and_len(TokenType::Asterisk); continue; } if matches!(tokenizer.code_point, Some('+') | Some('?')) { tokenizer.add_token_with_default_pos_and_len(TokenType::OtherModifier); continue; } if tokenizer.code_point == Some('\\') { if tokenizer.index == (tokenizer.input.len() - 1) { tokenizer.process_tokenizing_error( tokenizer.next_index, tokenizer.index, TokenizerError::IncompleteEscapeCode, )?; continue; } let escaped_index = tokenizer.next_index; tokenizer.get_next_codepoint(); tokenizer.add_token_with_default_len( TokenType::EscapedChar, tokenizer.next_index, escaped_index, ); continue; } if tokenizer.code_point == Some('{') { tokenizer.add_token_with_default_pos_and_len(TokenType::Open); continue; } if tokenizer.code_point == Some('}') { tokenizer.add_token_with_default_pos_and_len(TokenType::Close); continue; } if tokenizer.code_point == Some(':') { let mut name_pos = tokenizer.next_index; let name_start = name_pos; while name_pos < tokenizer.input.len() { tokenizer.seek_and_get_next_codepoint(name_pos); let first_code_point = name_pos == name_start; let valid_codepoint = is_valid_name_codepoint( tokenizer.code_point.unwrap(), first_code_point, ); if !valid_codepoint { break; } name_pos = tokenizer.next_index; } if name_pos <= name_start { tokenizer.process_tokenizing_error( name_start, tokenizer.index, TokenizerError::InvalidName, )?; continue; } tokenizer.add_token_with_default_len( TokenType::Name, name_pos, name_start, ); continue; } if tokenizer.code_point == Some('(') { let mut depth = 1; let mut regexp_pos = tokenizer.next_index; let regexp_start = regexp_pos; let mut error = false; while regexp_pos < tokenizer.input.len() { tokenizer.seek_and_get_next_codepoint(regexp_pos); if !tokenizer.code_point.unwrap().is_ascii() || (regexp_pos == regexp_start && tokenizer.code_point == Some('?')) { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::InvalidRegex( "must not start with ?, and may only contain ascii", ), )?; error = true; break; } if tokenizer.code_point == Some('\\') { if regexp_pos == (tokenizer.input.len() - 1) { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::IncompleteEscapeCode, )?; error = true; break; } tokenizer.get_next_codepoint(); if !tokenizer.code_point.unwrap().is_ascii() { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::InvalidRegex("non ascii character was escaped"), )?; error = true; break; } regexp_pos = tokenizer.next_index; continue; } if tokenizer.code_point == Some(')') { depth -= 1; if depth == 0 { regexp_pos = tokenizer.next_index; break; } } else if tokenizer.code_point == Some('(') { depth += 1; if regexp_pos == (tokenizer.input.len() - 1) { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::InvalidRegex("nested groups not closed"), )?; error = true; break; } let temp_pos = tokenizer.next_index; tokenizer.get_next_codepoint(); if tokenizer.code_point != Some('?') { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::InvalidRegex("nested groups must start with ?"), )?; error = true; break; } tokenizer.next_index = temp_pos; } regexp_pos = tokenizer.next_index; } if error { continue; } if depth != 0 { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::InvalidRegex("missing closing )"), )?; continue; } let regexp_len = regexp_pos - regexp_start - 1; if regexp_len == 0 { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::InvalidRegex("length must be > 0"), )?; continue; } tokenizer.add_token( TokenType::Regexp, regexp_pos, regexp_start, regexp_len, ); continue; } tokenizer.add_token_with_default_pos_and_len(TokenType::Char); } tokenizer.add_token_with_default_len( TokenType::End, tokenizer.index, tokenizer.index, ); Ok(tokenizer.token_list) } #[inline] pub(crate) fn is_valid_name_codepoint(code_point: char, first: bool) -> bool { if first { unic_ucd_ident::is_id_start(code_point) || matches!(code_point, '$' | '_') } else { unic_ucd_ident::is_id_continue(code_point) || matches!(code_point, '$' | '\u{200C}' | '\u{200D}') } }
use derive_more::Display; use crate::error::TokenizerError; use crate::Error; #[derive(Debug, Display, Clone, Eq, PartialEq)] pub enum TokenType { Open, Close, Regexp, Name, Char, EscapedChar, OtherModifier, Asterisk, End, InvalidChar, } #[derive(Debug, Clone)] pub struct Token { pub kind: TokenType, pub index: usize, pub value: String, } #[derive(Debug, Eq, PartialEq)] pub enum TokenizePolicy { Strict, Lenient, } struct Tokenizer { input: Vec<char>, policy: TokenizePolicy, token_list: Vec<Token>, index: usize, next_index: usize, code_point: Option<char>, } impl Tokenizer { #[inline] fn get_next_codepoint(&mut self) { self.code_point = Some(self.input[self.next_index]); self.next_index += 1; } #[inline] fn add_token_with_default_pos_and_len(&mut self, kind: TokenType) { self.add_token_with_default_len(kind, self.next_index, self.index); } #[inline] fn add_token_with_default_len( &mut self, kind: TokenType, next_pos: usize, value_pos: usize, ) { self.add_token(kind, next_pos, value_pos, next_pos - value_pos); } #[inline]
fn process_tokenizing_error( &mut self, next_pos: usize, value_pos: usize, error: TokenizerError, ) -> Result<(), Error> { if self.policy == TokenizePolicy::Strict { Err(Error::Tokenizer(error, value_pos)) } else { self.add_token_with_default_len( TokenType::InvalidChar, next_pos, value_pos, ); Ok(()) } } #[inline] fn seek_and_get_next_codepoint(&mut self, index: usize) { self.next_index = index; self.get_next_codepoint(); } } pub fn tokenize( input: &str, policy: TokenizePolicy, ) -> Result<Vec<Token>, Error> { let mut tokenizer = Tokenizer { input: input.chars().collect::<Vec<char>>(), policy, token_list: vec![], index: 0, next_index: 0, code_point: None, }; while tokenizer.index < tokenizer.input.len() { tokenizer.next_index = tokenizer.index; tokenizer.get_next_codepoint(); if tokenizer.code_point == Some('*') { tokenizer.add_token_with_default_pos_and_len(TokenType::Asterisk); continue; } if matches!(tokenizer.code_point, Some('+') | Some('?')) { tokenizer.add_token_with_default_pos_and_len(TokenType::OtherModifier); continue; } if tokenizer.code_point == Some('\\') { if tokenizer.index == (tokenizer.input.len() - 1) { tokenizer.process_tokenizing_error( tokenizer.next_index, tokenizer.index, TokenizerError::IncompleteEscapeCode, )?; continue; } let escaped_index = tokenizer.next_index; tokenizer.get_next_codepoint(); tokenizer.add_token_with_default_len( TokenType::EscapedChar, tokenizer.next_index, escaped_index, ); continue; } if tokenizer.code_point == Some('{') { tokenizer.add_token_with_default_pos_and_len(TokenType::Open); continue; } if tokenizer.code_point == Some('}') { tokenizer.add_token_with_default_pos_and_len(TokenType::Close); continue; } if tokenizer.code_point == Some(':') { let mut name_pos = tokenizer.next_index; let name_start = name_pos; while name_pos < tokenizer.input.len() { tokenizer.seek_and_get_next_codepoint(name_pos); let first_code_point = name_pos == name_start; let valid_codepoint = is_valid_name_codepoint( tokenizer.code_point.unwrap(), first_code_point, ); if !valid_codepoint { break; } name_pos = tokenizer.next_index; } if name_pos <= name_start { tokenizer.process_tokenizing_error( name_start, tokenizer.index, TokenizerError::InvalidName, )?; continue; } tokenizer.add_token_with_default_len( TokenType::Name, name_pos, name_start, ); continue; } if tokenizer.code_point == Some('(') { let mut depth = 1; let mut regexp_pos = tokenizer.next_index; let regexp_start = regexp_pos; let mut error = false; while regexp_pos < tokenizer.input.len() { tokenizer.seek_and_get_next_codepoint(regexp_pos); if !tokenizer.code_point.unwrap().is_ascii() || (regexp_pos == regexp_start && tokenizer.code_point == Some('?')) { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::InvalidRegex( "must not start with ?, and may only contain ascii", ), )?; error = true; break; } if tokenizer.code_point == Some('\\') { if regexp_pos == (tokenizer.input.len() - 1) { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::IncompleteEscapeCode, )?; error = true; break; } tokenizer.get_next_codepoint(); if !tokenizer.code_point.unwrap().is_ascii() { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::InvalidRegex("non ascii character was escaped"), )?; error = true; break; } regexp_pos = tokenizer.next_index; continue; } if tokenizer.code_point == Some(')') { depth -= 1; if depth == 0 { regexp_pos = tokenizer.next_index; break; } } else if tokenizer.code_point == Some('(') { depth += 1; if regexp_pos == (tokenizer.input.len() - 1) { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::InvalidRegex("nested groups not closed"), )?; error = true; break; } let temp_pos = tokenizer.next_index; tokenizer.get_next_codepoint(); if tokenizer.code_point != Some('?') { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::InvalidRegex("nested groups must start with ?"), )?; error = true; break; } tokenizer.next_index = temp_pos; } regexp_pos = tokenizer.next_index; } if error { continue; } if depth != 0 { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::InvalidRegex("missing closing )"), )?; continue; } let regexp_len = regexp_pos - regexp_start - 1; if regexp_len == 0 { tokenizer.process_tokenizing_error( regexp_start, tokenizer.index, TokenizerError::InvalidRegex("length must be > 0"), )?; continue; } tokenizer.add_token( TokenType::Regexp, regexp_pos, regexp_start, regexp_len, ); continue; } tokenizer.add_token_with_default_pos_and_len(TokenType::Char); } tokenizer.add_token_with_default_len( TokenType::End, tokenizer.index, tokenizer.index, ); Ok(tokenizer.token_list) } #[inline] pub(crate) fn is_valid_name_codepoint(code_point: char, first: bool) -> bool { if first { unic_ucd_ident::is_id_start(code_point) || matches!(code_point, '$' | '_') } else { unic_ucd_ident::is_id_continue(code_point) || matches!(code_point, '$' | '\u{200C}' | '\u{200D}') } }
fn add_token( &mut self, kind: TokenType, next_pos: usize, value_pos: usize, value_len: usize, ) { let range = value_pos..(value_pos + value_len); let value = self.input[range].iter().collect::<String>(); self.token_list.push(Token { kind, index: self.index, value, }); self.index = next_pos; }
function_block-full_function
[ { "content": "// Ref: https://wicg.github.io/urlpattern/#escape-a-pattern-string\n\nfn escape_pattern_string(input: &str) -> String {\n\n assert!(input.is_ascii());\n\n let mut result = String::new();\n\n for char in input.chars() {\n\n if matches!(char, '+' | '*' | '?' | ':' | '{' | '}' | '(' | ')' | '\\\\') {\n\n result.push('\\\\');\n\n }\n\n result.push(char);\n\n }\n\n result\n\n}\n", "file_path": "src/component.rs", "rank": 1, "score": 112737.22070395088 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#canonicalize-a-username\n\npub fn canonicalize_username(value: &str) -> Result<String, Error> {\n\n if value.is_empty() {\n\n return Ok(String::new());\n\n }\n\n let mut url = url::Url::parse(\"http://dummy.test\").unwrap();\n\n // Note: unwrap is safe, because this is a HTTP url that supports username.\n\n url.set_username(value).unwrap();\n\n Ok(url.username().to_string())\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 2, "score": 101609.78024776261 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#canonicalize-a-protocol\n\npub fn canonicalize_protocol(value: &str) -> Result<String, Error> {\n\n if value.is_empty() {\n\n return Ok(String::new());\n\n }\n\n url::Url::parse(&format!(\"{}://dummy.test\", value))\n\n .map(|url| url.scheme().to_owned())\n\n .map_err(Error::Url)\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 3, "score": 101609.78024776261 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#canonicalize-a-password\n\npub fn canonicalize_password(value: &str) -> Result<String, Error> {\n\n if value.is_empty() {\n\n return Ok(String::new());\n\n }\n\n let mut url = url::Url::parse(\"http://dummy.test\").unwrap();\n\n // Note: unwrap is safe, because this is a HTTP url that supports password.\n\n url.set_password(Some(value)).unwrap();\n\n Ok(url.password().unwrap().to_string())\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 4, "score": 101609.78024776261 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#canonicalize-a-search\n\npub fn canonicalize_search(value: &str) -> Result<String, Error> {\n\n if value.is_empty() {\n\n return Ok(String::new());\n\n }\n\n let mut url = url::Url::parse(\"http://dummy.test\").unwrap();\n\n url.set_query(Some(value));\n\n Ok(url.query().unwrap_or(\"\").to_string())\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 5, "score": 101609.78024776261 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#canonicalize-a-search\n\npub fn canonicalize_hash(value: &str) -> Result<String, Error> {\n\n if value.is_empty() {\n\n return Ok(String::new());\n\n }\n\n let mut url = url::Url::parse(\"http://dummy.test\").unwrap();\n\n url.set_fragment(Some(value));\n\n Ok(url.fragment().unwrap_or(\"\").to_string())\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum ProcessType {\n\n Pattern,\n\n Url,\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 6, "score": 101609.78024776261 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#canonicalize-a-pathname\n\npub fn canonicalize_pathname(value: &str) -> Result<String, Error> {\n\n if value.is_empty() || matches!(value, \".\" | \"..\") {\n\n return Ok(value.to_string());\n\n }\n\n let mut url = url::Url::parse(\"http://dummy.test\").unwrap();\n\n url.set_path(value);\n\n let mut pathname = url::quirks::pathname(&url);\n\n if !value.starts_with('/') {\n\n pathname = &pathname[1..];\n\n }\n\n Ok(pathname.to_string())\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 7, "score": 101609.78024776261 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#canonicalize-a-hostname\n\npub fn canonicalize_hostname(value: &str) -> Result<String, Error> {\n\n if value.is_empty() {\n\n return Ok(String::new());\n\n }\n\n let mut url = url::Url::parse(\"http://dummy.test\").unwrap();\n\n url.set_host(Some(value)).map_err(Error::Url)?;\n\n Ok(url::quirks::hostname(&url).to_string())\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 8, "score": 101609.78024776261 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#canonicalize-an-ipv6-hostname\n\npub fn canonicalize_ipv6_hostname(value: &str) -> Result<String, Error> {\n\n let valid_ipv6 = value\n\n .chars()\n\n .all(|c| c.is_ascii_hexdigit() || matches!(c, '[' | ']' | ':'));\n\n if !valid_ipv6 {\n\n Err(Error::Url(url::ParseError::InvalidIpv6Address))\n\n } else {\n\n Ok(value.to_ascii_lowercase())\n\n }\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 9, "score": 98861.34496316603 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#canonicalize-an-opaque-pathname\n\npub fn canonicalize_an_opaque_pathname(value: &str) -> Result<String, Error> {\n\n if value.is_empty() {\n\n return Ok(String::new());\n\n }\n\n let mut url = url::Url::parse(\"data:dummy,test\").unwrap();\n\n url.set_path(value);\n\n Ok(url::quirks::pathname(&url).to_string())\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 10, "score": 98861.34496316603 }, { "content": "pub fn process_match_input(\n\n input: StringOrInit,\n\n base_url_str: Option<&str>,\n\n) -> Result<Option<(crate::UrlPatternMatchInput, Inputs)>, Error> {\n\n let mut inputs = (input.clone(), None);\n\n let init = match input {\n\n StringOrInit::String(url) => {\n\n let base_url = if let Some(base_url) = base_url_str {\n\n match Url::parse(base_url) {\n\n Ok(base_url) => {\n\n inputs.1 = Some(base_url.to_string());\n\n Some(base_url)\n\n }\n\n Err(_) => return Ok(None),\n\n }\n\n } else {\n\n None\n\n };\n\n match Url::options().base_url(base_url.as_ref()).parse(&url) {\n\n Ok(url) => crate::UrlPatternMatchInput::Url(url),\n", "file_path": "src/quirks.rs", "rank": 11, "score": 98513.69459788475 }, { "content": "pub fn parse_match_input(\n\n input: crate::UrlPatternMatchInput,\n\n) -> Option<MatchInput> {\n\n let mut i = MatchInput::default();\n\n match input {\n\n crate::UrlPatternMatchInput::Init(init) => {\n\n if let Ok(apply_result) = init.process(\n\n crate::canonicalize_and_process::ProcessType::Url,\n\n Some(i.protocol),\n\n Some(i.username),\n\n Some(i.password),\n\n Some(i.hostname),\n\n Some(i.port),\n\n Some(i.pathname),\n\n Some(i.search),\n\n Some(i.hash),\n\n ) {\n\n i.protocol = apply_result.protocol.unwrap();\n\n i.username = apply_result.username.unwrap();\n\n i.password = apply_result.password.unwrap();\n", "file_path": "src/quirks.rs", "rank": 12, "score": 98513.69459788475 }, { "content": "/// This function constructs a UrlPattern given a string or UrlPatternInit and\n\n/// optionally a base url.\n\npub fn process_construct_pattern_input(\n\n input: StringOrInit,\n\n base_url: Option<&str>,\n\n) -> Result<crate::UrlPatternInit, Error> {\n\n let init = match input {\n\n StringOrInit::String(pattern) => {\n\n let base_url =\n\n base_url.map(Url::parse).transpose().map_err(Error::Url)?;\n\n crate::UrlPatternInit::parse_constructor_string(&pattern, base_url)?\n\n }\n\n StringOrInit::Init(init) => {\n\n if base_url.is_some() {\n\n return Err(Error::BaseUrlWithInit);\n\n }\n\n let base_url = init\n\n .base_url\n\n .map(|s| Url::parse(&s))\n\n .transpose()\n\n .map_err(Error::Url)?;\n\n crate::UrlPatternInit {\n", "file_path": "src/quirks.rs", "rank": 13, "score": 95233.33119805776 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#parse-a-pattern-string\n\npub fn parse_pattern_string<F>(\n\n input: &str,\n\n options: &Options,\n\n encoding_callback: F,\n\n) -> Result<Vec<Part>, Error>\n\nwhere\n\n F: Fn(&str) -> Result<String, Error>,\n\n{\n\n let token_list = crate::tokenizer::tokenize(\n\n input,\n\n crate::tokenizer::TokenizePolicy::Strict,\n\n )?;\n\n\n\n let mut parser = PatternParser {\n\n token_list,\n\n encoding_callback,\n\n segment_wildcard_regexp: options.generate_segment_wildcard_regexp(),\n\n part_list: vec![],\n\n pending_fixed_value: String::new(),\n\n index: 0,\n", "file_path": "src/parser.rs", "rank": 14, "score": 91421.63564971095 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#canonicalize-a-port\n\npub fn canonicalize_port(\n\n value: &str,\n\n mut protocol: Option<&str>,\n\n) -> Result<String, Error> {\n\n if value.is_empty() {\n\n return Ok(String::new());\n\n }\n\n if let Some(\"\") = protocol {\n\n protocol = None;\n\n }\n\n let port = value\n\n .parse::<u16>()\n\n .map_err(|_| Error::Url(url::ParseError::InvalidPort))?;\n\n // Note: this unwrap is safe, because the protocol was previously parsed to be\n\n // valid.\n\n let mut url =\n\n url::Url::parse(&format!(\"{}://dummy.test\", protocol.unwrap_or(\"dummy\")))\n\n .unwrap();\n\n url.set_port(Some(port)).unwrap(); // TODO: dont unwrap, instead ParseError\n\n Ok(url::quirks::port(&url).to_string())\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 16, "score": 76631.99591119701 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#process-username-for-init\n\npub fn process_username_init(\n\n value: &str,\n\n kind: &ProcessType,\n\n) -> Result<String, Error> {\n\n if kind == &ProcessType::Pattern {\n\n Ok(value.to_string())\n\n } else {\n\n canonicalize_username(value)\n\n }\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 17, "score": 74468.9787612281 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#process-password-for-init\n\npub fn process_password_init(\n\n value: &str,\n\n kind: &ProcessType,\n\n) -> Result<String, Error> {\n\n if kind == &ProcessType::Pattern {\n\n Ok(value.to_string())\n\n } else {\n\n canonicalize_password(value)\n\n }\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 18, "score": 74468.9787612281 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#process-port-for-init\n\npub fn process_port_init(\n\n port_value: &str,\n\n protocol_value: Option<&str>,\n\n kind: &ProcessType,\n\n) -> Result<String, Error> {\n\n if kind == &ProcessType::Pattern {\n\n Ok(port_value.to_string())\n\n } else {\n\n canonicalize_port(port_value, protocol_value)\n\n }\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 19, "score": 74468.9787612281 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#process-pathname-for-init\n\npub fn process_pathname_init(\n\n pathname_value: &str,\n\n protocol_value: Option<&str>,\n\n kind: &ProcessType,\n\n) -> Result<String, Error> {\n\n if kind == &ProcessType::Pattern {\n\n Ok(pathname_value.to_string())\n\n } else {\n\n match protocol_value {\n\n Some(protocol) if protocol.is_empty() || is_special_scheme(protocol) => {\n\n canonicalize_pathname(pathname_value)\n\n }\n\n _ => canonicalize_an_opaque_pathname(pathname_value),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 20, "score": 74468.9787612281 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#process-hostname-for-init\n\npub fn process_hostname_init(\n\n value: &str,\n\n kind: &ProcessType,\n\n) -> Result<String, Error> {\n\n if kind == &ProcessType::Pattern {\n\n Ok(value.to_string())\n\n } else {\n\n canonicalize_hostname(value)\n\n }\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 21, "score": 74468.9787612281 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#process-search-for-init\n\npub fn process_search_init(\n\n value: &str,\n\n kind: &ProcessType,\n\n) -> Result<String, Error> {\n\n let stripped_value = if value.starts_with('?') {\n\n value.get(1..).unwrap()\n\n } else {\n\n value\n\n };\n\n if kind == &ProcessType::Pattern {\n\n Ok(stripped_value.to_string())\n\n } else {\n\n canonicalize_search(stripped_value)\n\n }\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 22, "score": 74468.9787612281 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#process-protocol-for-init\n\npub fn process_protocol_init(\n\n value: &str,\n\n kind: &ProcessType,\n\n) -> Result<String, Error> {\n\n let stripped_value = value.strip_suffix(':').unwrap_or(value);\n\n if kind == &ProcessType::Pattern {\n\n Ok(stripped_value.to_string())\n\n } else {\n\n canonicalize_protocol(stripped_value)\n\n }\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 23, "score": 74468.9787612281 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#process-hash-for-init\n\npub fn process_hash_init(\n\n value: &str,\n\n kind: &ProcessType,\n\n) -> Result<String, Error> {\n\n let stripped_value = if value.starts_with('#') {\n\n value.get(1..).unwrap()\n\n } else {\n\n value\n\n };\n\n if kind == &ProcessType::Pattern {\n\n Ok(stripped_value.to_string())\n\n } else {\n\n canonicalize_hash(stripped_value)\n\n }\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 24, "score": 74468.9787612281 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#hostname-pattern-is-an-ipv6-address\n\nfn hostname_pattern_is_ipv6_address(input: &str) -> bool {\n\n // TODO: code point length\n\n if input.len() < 2 {\n\n return false;\n\n }\n\n\n\n input.starts_with('[') || input.starts_with(\"{[\") || input.starts_with(\"\\\\[\")\n\n}\n\n\n\n// Ref: https://wicg.github.io/urlpattern/#dictdef-urlpatternresult\n\n/// A result of a URL pattern match.\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct UrlPatternResult {\n\n pub protocol: UrlPatternComponentResult,\n\n pub username: UrlPatternComponentResult,\n\n pub password: UrlPatternComponentResult,\n\n pub hostname: UrlPatternComponentResult,\n\n pub port: UrlPatternComponentResult,\n\n pub pathname: UrlPatternComponentResult,\n\n pub search: UrlPatternComponentResult,\n", "file_path": "src/lib.rs", "rank": 25, "score": 67128.14426585987 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\nenum ConstructorStringParserState {\n\n Init,\n\n Protocol,\n\n Authority,\n\n Username,\n\n Password,\n\n Hostname,\n\n Port,\n\n Pathname,\n\n Search,\n\n Hash,\n\n Done,\n\n}\n\n\n", "file_path": "src/constructor_parser.rs", "rank": 26, "score": 64514.60888978842 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#constructor-string-parser\n\nstruct ConstructorStringParser<'a> {\n\n input: &'a str,\n\n token_list: Vec<Token>,\n\n result: UrlPatternInit,\n\n component_start: usize,\n\n token_index: usize,\n\n token_increment: usize,\n\n group_depth: usize,\n\n hostname_ipv6_bracket_depth: usize,\n\n protocol_matches_special_scheme: bool,\n\n state: ConstructorStringParserState,\n\n}\n\n\n\nimpl<'a> ConstructorStringParser<'a> {\n\n // Ref: https://wicg.github.io/urlpattern/#rewind\n\n #[inline]\n\n fn rewind(&mut self) {\n\n self.token_index = self.component_start;\n\n self.token_increment = 0;\n\n }\n", "file_path": "src/constructor_parser.rs", "rank": 27, "score": 63824.89471928851 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#generate-a-pattern-string\n\nfn generate_pattern_string(part_list: Vec<Part>, options: &Options) -> String {\n\n let mut result = String::new();\n\n let mut prev_part: Option<&Part> = None;\n\n for (i, part) in part_list.iter().enumerate() {\n\n let next_part: Option<&Part> = part_list.get(i + 1);\n\n if part.kind == PartType::FixedText {\n\n if part.modifier == PartModifier::None {\n\n result.push_str(&escape_pattern_string(&part.value));\n\n continue;\n\n }\n\n result.push_str(&format!(\n\n \"{{{}}}{}\",\n\n escape_pattern_string(&part.value),\n\n part.modifier\n\n ));\n\n continue;\n\n }\n\n let custom_name = !part.name.chars().next().unwrap().is_ascii_digit();\n\n let mut needs_grouping = !part.suffix.is_empty()\n\n || (!part.prefix.is_empty() && part.prefix != options.prefix_code_point);\n", "file_path": "src/component.rs", "rank": 28, "score": 63447.01453166711 }, { "content": "pub fn is_special_scheme(scheme: &str) -> bool {\n\n matches!(scheme, \"http\" | \"https\" | \"ws\" | \"wss\" | \"ftp\" | \"file\")\n\n}\n\n\n", "file_path": "src/canonicalize_and_process.rs", "rank": 29, "score": 61914.879654146134 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#generate-a-regular-expression-and-name-list\n\nfn generate_regular_expression_and_name_list(\n\n part_list: &[Part],\n\n options: &Options,\n\n) -> (String, Vec<String>) {\n\n let mut result = String::from(\"^\");\n\n let mut name_list = vec![];\n\n for part in part_list {\n\n if part.kind == PartType::FixedText {\n\n if part.modifier == PartModifier::None {\n\n result.push_str(&options.escape_regexp_string(&part.value));\n\n } else {\n\n result.push_str(&format!(\n\n \"(?:{}){}\",\n\n options.escape_regexp_string(&part.value),\n\n part.modifier\n\n ));\n\n }\n\n continue;\n\n }\n\n\n", "file_path": "src/component.rs", "rank": 30, "score": 59804.02275297833 }, { "content": "pub fn special_scheme_default_port(scheme: &str) -> Option<&'static str> {\n\n match scheme {\n\n \"http\" => Some(\"80\"),\n\n \"https\" => Some(\"443\"),\n\n \"ws\" => Some(\"80\"),\n\n \"wss\" => Some(\"443\"),\n\n \"ftp\" => Some(\"21\"),\n\n \"file\" => None,\n\n _ => None,\n\n }\n\n}\n", "file_path": "src/canonicalize_and_process.rs", "rank": 31, "score": 52482.67748193207 }, { "content": "/// Parse a pattern into its components.\n\npub fn parse_pattern(init: crate::UrlPatternInit) -> Result<UrlPattern, Error> {\n\n let pattern = crate::UrlPattern::parse_internal(init, false)?;\n\n let urlpattern = UrlPattern {\n\n protocol: UrlPatternComponent {\n\n pattern_string: pattern.protocol.pattern_string,\n\n regexp_string: pattern.protocol.ecma_regexp_string,\n\n group_name_list: pattern.protocol.group_name_list,\n\n },\n\n username: UrlPatternComponent {\n\n pattern_string: pattern.username.pattern_string,\n\n regexp_string: pattern.username.ecma_regexp_string,\n\n group_name_list: pattern.username.group_name_list,\n\n },\n\n password: UrlPatternComponent {\n\n pattern_string: pattern.password.pattern_string,\n\n regexp_string: pattern.password.ecma_regexp_string,\n\n group_name_list: pattern.password.group_name_list,\n\n },\n\n hostname: UrlPatternComponent {\n\n pattern_string: pattern.hostname.pattern_string,\n", "file_path": "src/quirks.rs", "rank": 32, "score": 49840.91695578172 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#pattern-parser\n\nstruct PatternParser<F>\n\nwhere\n\n F: Fn(&str) -> Result<String, Error>,\n\n{\n\n token_list: Vec<Token>,\n\n encoding_callback: F,\n\n segment_wildcard_regexp: String,\n\n part_list: Vec<Part>,\n\n pending_fixed_value: String,\n\n index: usize,\n\n next_numeric_name: usize,\n\n}\n\n\n\nimpl<F> PatternParser<F>\n\nwhere\n\n F: Fn(&str) -> Result<String, Error>,\n\n{\n\n // Ref: https://wicg.github.io/urlpattern/#try-to-consume-a-token\n\n fn try_consume_token(&mut self, kind: TokenType) -> Option<Token> {\n\n assert!(self.index < self.token_list.len());\n", "file_path": "src/parser.rs", "rank": 33, "score": 43465.56462750607 }, { "content": "// Ref: https://wicg.github.io/urlpattern/#is-an-absolute-pathname\n\nfn is_absolute_pathname(\n\n input: &str,\n\n kind: &canonicalize_and_process::ProcessType,\n\n) -> bool {\n\n if input.is_empty() {\n\n return false;\n\n }\n\n if input.starts_with('/') {\n\n return true;\n\n }\n\n if kind == &canonicalize_and_process::ProcessType::Url {\n\n return false;\n\n }\n\n // TODO: input code point length\n\n if input.len() < 2 {\n\n return false;\n\n }\n\n\n\n input.starts_with(\"\\\\/\") || input.starts_with(\"{/\")\n\n}\n", "file_path": "src/lib.rs", "rank": 34, "score": 42544.920268959046 }, { "content": " // Ref: https://wicg.github.io/urlpattern/#is-an-ipv6-close\n\n #[inline]\n\n fn is_ipv6_close(&self) -> bool {\n\n self.is_non_special_pattern_char(self.token_index, \"]\")\n\n }\n\n}\n\n\n\n// Ref: https://wicg.github.io/urlpattern/#parse-a-constructor-string\n\npub(crate) fn parse_constructor_string(\n\n input: &str,\n\n) -> Result<UrlPatternInit, Error> {\n\n let token_list = crate::tokenizer::tokenize(\n\n input,\n\n crate::tokenizer::TokenizePolicy::Lenient,\n\n )?;\n\n\n\n let mut parser = ConstructorStringParser {\n\n input,\n\n token_list,\n\n result: UrlPatternInit {\n", "file_path": "src/constructor_parser.rs", "rank": 49, "score": 23.59836293270649 }, { "content": " let next_token = self.token_list[self.index].clone();\n\n if next_token.kind != kind {\n\n None\n\n } else {\n\n self.index += 1;\n\n Some(next_token)\n\n }\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#try-to-consume-a-regexp-or-wildcard-token\n\n #[inline]\n\n fn try_consume_regexp_or_wildcard_token(\n\n &mut self,\n\n name_token_is_none: bool,\n\n ) -> Option<Token> {\n\n let token = self.try_consume_token(TokenType::Regexp);\n\n if name_token_is_none && token.is_none() {\n\n self.try_consume_token(TokenType::Asterisk)\n\n } else {\n\n token\n", "file_path": "src/parser.rs", "rank": 50, "score": 22.499757970111556 }, { "content": " next_numeric_name: 0,\n\n };\n\n\n\n while parser.index < parser.token_list.len() {\n\n let char_token = parser.try_consume_token(TokenType::Char);\n\n let mut name_token = parser.try_consume_token(TokenType::Name);\n\n let mut regexp_or_wildcard_token =\n\n parser.try_consume_regexp_or_wildcard_token(name_token.is_none());\n\n if name_token.is_some() || regexp_or_wildcard_token.is_some() {\n\n let mut prefix = String::new();\n\n if let Some(char_token) = char_token {\n\n prefix = char_token.value.to_owned();\n\n }\n\n if !prefix.is_empty() && prefix != options.prefix_code_point {\n\n parser.pending_fixed_value.push_str(&prefix);\n\n prefix = String::new();\n\n }\n\n parser.maybe_add_part_from_pending_fixed_value()?;\n\n let modifier_token = parser.try_consume_modifier_token();\n\n parser.add_part(\n", "file_path": "src/parser.rs", "rank": 51, "score": 21.384207961239806 }, { "content": " }\n\n if self.token_index == 0 {\n\n return true;\n\n }\n\n let previous_token = self.get_safe_token(self.token_index - 1);\n\n !matches!(\n\n previous_token.kind,\n\n TokenType::Name\n\n | TokenType::Regexp\n\n | TokenType::Close\n\n | TokenType::Asterisk\n\n )\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#is-a-password-prefix\n\n #[inline]\n\n fn is_password_prefix(&self) -> bool {\n\n self.is_non_special_pattern_char(self.token_index, \":\")\n\n }\n\n\n", "file_path": "src/constructor_parser.rs", "rank": 52, "score": 21.352967774455852 }, { "content": " loop {\n\n let mut token = self.try_consume_token(TokenType::Char);\n\n if token.is_none() {\n\n token = self.try_consume_token(TokenType::EscapedChar);\n\n }\n\n if token.is_none() {\n\n break;\n\n }\n\n result.push_str(&token.unwrap().value);\n\n }\n\n result\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#consume-a-required-token\n\n #[inline]\n\n fn consume_required_token(\n\n &mut self,\n\n kind: TokenType,\n\n ) -> Result<Token, Error> {\n\n self.try_consume_token(kind.clone()).ok_or_else(|| {\n\n Error::Parser(ParserError::ExpectedToken(\n\n kind,\n\n self.token_list[self.index].kind.clone(),\n\n self.token_list[self.index].value.clone(),\n\n ))\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 53, "score": 21.170727170261152 }, { "content": " }\n\n }\n\n\n\n pub fn with_syntax(mut self, syntax: RegexSyntax) -> Self {\n\n self.regex_syntax = syntax;\n\n self\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#escape-a-regexp-string\n\n pub fn escape_regexp_string(&self, input: &str) -> String {\n\n assert!(input.is_ascii());\n\n let mut result = String::new();\n\n for char in input.chars() {\n\n if matches!(\n\n char,\n\n '.'\n\n | '+'\n\n | '*'\n\n | '?'\n\n | '^'\n", "file_path": "src/parser.rs", "rank": 54, "score": 20.976552948971115 }, { "content": " if regexp_value == self.segment_wildcard_regexp {\n\n kind = PartType::SegmentWildcard;\n\n regexp_value = String::new();\n\n } else if regexp_value == FULL_WILDCARD_REGEXP_VALUE {\n\n kind = PartType::FullWildcard;\n\n regexp_value = String::new();\n\n }\n\n\n\n let mut name = String::new();\n\n if let Some(name_token) = name_token {\n\n name = name_token.value;\n\n } else if regexp_or_wildcard_token.is_some() {\n\n name = self.next_numeric_name.to_string();\n\n self.next_numeric_name += 1;\n\n }\n\n if self.is_duplicate_name(&name) {\n\n return Err(Error::Parser(ParserError::DuplicateName(name)));\n\n }\n\n let encoded_prefix = (self.encoding_callback)(prefix)?;\n\n let encoded_suffix = (self.encoding_callback)(suffix)?;\n", "file_path": "src/parser.rs", "rank": 55, "score": 20.910348923659647 }, { "content": " self.part_list.push(Part::new(\n\n PartType::FixedText,\n\n encoded_value,\n\n modifier,\n\n ));\n\n return Ok(());\n\n }\n\n\n\n let mut regexp_value = match &regexp_or_wildcard_token {\n\n None => self.segment_wildcard_regexp.to_owned(),\n\n Some(regexp_or_wildcard_token) => {\n\n if regexp_or_wildcard_token.kind == TokenType::Asterisk {\n\n FULL_WILDCARD_REGEXP_VALUE.to_string()\n\n } else {\n\n regexp_or_wildcard_token.value.to_owned()\n\n }\n\n }\n\n };\n\n\n\n let mut kind = PartType::Regexp;\n", "file_path": "src/parser.rs", "rank": 56, "score": 19.967176649753362 }, { "content": "// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.\n\n\n\nuse crate::parser::Options;\n\nuse crate::parser::Part;\n\nuse crate::parser::PartModifier;\n\nuse crate::parser::PartType;\n\nuse crate::parser::FULL_WILDCARD_REGEXP_VALUE;\n\nuse crate::tokenizer::is_valid_name_codepoint;\n\nuse crate::Error;\n\n\n\n// Ref: https://wicg.github.io/urlpattern/#component\n\n#[derive(Debug)]\n\npub(crate) struct Component {\n\n pub pattern_string: String,\n\n pub rust_regexp: Result<regex::Regex, Error>,\n\n pub ecma_regexp_string: String,\n\n pub group_name_list: Vec<String>,\n\n}\n\n\n\nimpl Component {\n", "file_path": "src/component.rs", "rank": 57, "score": 19.337200020013544 }, { "content": " let token = self.get_safe_token(index);\n\n if token.value != value {\n\n false\n\n } else {\n\n matches!(\n\n token.kind,\n\n TokenType::Char | TokenType::EscapedChar | TokenType::InvalidChar\n\n )\n\n }\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#get-a-safe-token\n\n fn get_safe_token(&self, index: usize) -> &Token {\n\n if index < self.token_list.len() {\n\n &self.token_list[index]\n\n } else {\n\n assert!(self.token_list.len() <= 1);\n\n let token = self.token_list.last().unwrap();\n\n assert!(token.kind == TokenType::End);\n\n token\n", "file_path": "src/constructor_parser.rs", "rank": 58, "score": 18.961855097314945 }, { "content": "// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.\n\n\n\nuse crate::error::ParserError;\n\nuse crate::tokenizer::Token;\n\nuse crate::tokenizer::TokenType;\n\nuse crate::Error;\n\n\n\n// Ref: https://wicg.github.io/urlpattern/#full-wildcard-regexp-value\n\npub const FULL_WILDCARD_REGEXP_VALUE: &str = \".*\";\n\n\n\n/// The regexp syntax that should be used.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum RegexSyntax {\n\n /// Compile regexes to rust-regex syntax. This is the default.\n\n Rust,\n\n /// Compile regexes to ECMAScript syntax. This should be used with the\n\n /// [crate::quirks::component_regex].\n\n ///\n\n /// NOTE: enabling this syntax kind, means the regex syntax will NOT be\n\n /// validated during parsing.\n", "file_path": "src/parser.rs", "rank": 59, "score": 18.651759773350722 }, { "content": " pub hash: UrlPatternComponentResult,\n\n}\n\n\n\n// Ref: https://wicg.github.io/urlpattern/#dictdef-urlpatterncomponentresult\n\n/// A result of a URL pattern match on a single component.\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct UrlPatternComponentResult {\n\n /// The matched input for this component.\n\n pub input: String,\n\n /// The values for all named groups in the pattern.\n\n pub groups: std::collections::HashMap<String, String>,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::collections::HashMap;\n\n\n\n use serde::Deserialize;\n\n use url::Url;\n\n\n", "file_path": "src/lib.rs", "rank": 60, "score": 18.622593598293886 }, { "content": " &prefix,\n\n name_token,\n\n regexp_or_wildcard_token,\n\n \"\",\n\n modifier_token,\n\n )?;\n\n continue;\n\n }\n\n let mut fixed_token = char_token;\n\n if fixed_token.is_none() {\n\n fixed_token = parser.try_consume_token(TokenType::EscapedChar);\n\n }\n\n if let Some(fixed_token) = fixed_token {\n\n parser.pending_fixed_value.push_str(&fixed_token.value);\n\n continue;\n\n }\n\n let open_token = parser.try_consume_token(TokenType::Open);\n\n if open_token.is_some() {\n\n let prefix = parser.consume_text();\n\n name_token = parser.try_consume_token(TokenType::Name);\n", "file_path": "src/parser.rs", "rank": 61, "score": 18.499325852932678 }, { "content": " PartModifier::ZeroOrMore => \"*\",\n\n PartModifier::OneOrMore => \"+\",\n\n })\n\n }\n\n}\n\n\n\n// Ref: https://wicg.github.io/urlpattern/#part\n\n#[derive(Debug)]\n\npub struct Part {\n\n pub kind: PartType,\n\n pub value: String,\n\n pub modifier: PartModifier,\n\n pub name: String,\n\n pub prefix: String,\n\n pub suffix: String,\n\n}\n\n\n\nimpl Part {\n\n fn new(kind: PartType, value: String, modifier: PartModifier) -> Self {\n\n Part {\n", "file_path": "src/parser.rs", "rank": 62, "score": 18.309119911372722 }, { "content": " fn is_group_open(&self) -> bool {\n\n self.token_list[self.token_index].kind == TokenType::Open\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#is-a-group-close\n\n #[inline]\n\n fn is_group_close(&self) -> bool {\n\n self.token_list[self.token_index].kind == TokenType::Close\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#compute-protocol-matches-a-special-scheme-flag\n\n fn compute_protocol_matches_special_scheme(&mut self) -> Result<(), Error> {\n\n let protocol_string = self.make_component_string();\n\n let protocol_component = crate::component::Component::compile(\n\n Some(&protocol_string),\n\n crate::canonicalize_and_process::canonicalize_protocol,\n\n Default::default(),\n\n )?;\n\n if protocol_component.protocol_component_matches_special_scheme() {\n\n self.protocol_matches_special_scheme = true;\n", "file_path": "src/constructor_parser.rs", "rank": 63, "score": 17.98409230110373 }, { "content": " fn make_component_string(&self) -> String {\n\n assert!(self.token_index < self.token_list.len());\n\n let token = &self.token_list[self.token_index];\n\n let component_start_index = self.get_safe_token(self.component_start).index;\n\n self\n\n .input\n\n .get(component_start_index..token.index) // TODO: check & codepoint\n\n .unwrap()\n\n .to_string()\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#rewind-and-set-state\n\n #[inline]\n\n fn rewind_and_set_state(&mut self, state: ConstructorStringParserState) {\n\n self.rewind();\n\n self.state = state;\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#is-a-group-open\n\n #[inline]\n", "file_path": "src/constructor_parser.rs", "rank": 64, "score": 17.437345715862573 }, { "content": "use derive_more::Display;\n\n\n\nuse crate::tokenizer::TokenType;\n\n\n\n/// A error occuring during URL pattern construction, or matching.\n\n#[derive(Display)]\n\npub enum Error {\n\n #[display(fmt = \"a relative input without a base URL is not valid\")]\n\n BaseUrlRequired,\n\n\n\n #[display(\n\n fmt = \"specifying both an init object, and a seperate base URL is not valid\"\n\n )]\n\n BaseUrlWithInit,\n\n\n\n #[display(fmt = \"tokenizer error: {} (at char {})\", _0, _1)]\n\n Tokenizer(TokenizerError, usize),\n\n\n\n #[display(fmt = \"parser error: {}\", _0)]\n\n Parser(ParserError),\n", "file_path": "src/error.rs", "rank": 65, "score": 16.99482810515757 }, { "content": "use crate::component::Component;\n\n\n\n/// The structured input used to create a URL pattern.\n\n#[derive(Debug, Default, Clone, Eq, PartialEq)]\n\npub struct UrlPatternInit {\n\n pub protocol: Option<String>,\n\n pub username: Option<String>,\n\n pub password: Option<String>,\n\n pub hostname: Option<String>,\n\n pub port: Option<String>,\n\n pub pathname: Option<String>,\n\n pub search: Option<String>,\n\n pub hash: Option<String>,\n\n pub base_url: Option<Url>,\n\n}\n\n\n\nimpl UrlPatternInit {\n\n pub fn parse_constructor_string(\n\n pattern: &str,\n\n base_url: Option<Url>,\n", "file_path": "src/lib.rs", "rank": 66, "score": 16.850974463675442 }, { "content": " pub port: UrlPatternComponent,\n\n pub pathname: UrlPatternComponent,\n\n pub search: UrlPatternComponent,\n\n pub hash: UrlPatternComponent,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct UrlPatternComponent {\n\n pub pattern_string: String,\n\n pub regexp_string: String,\n\n pub group_name_list: Vec<String>,\n\n}\n\n\n", "file_path": "src/quirks.rs", "rank": 67, "score": 15.595747006129582 }, { "content": " base_url,\n\n };\n\n crate::UrlPatternMatchInput::Init(init)\n\n }\n\n };\n\n\n\n Ok(Some((init, inputs)))\n\n}\n\n\n\n#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]\n\npub struct MatchInput {\n\n pub protocol: String,\n\n pub username: String,\n\n pub password: String,\n\n pub hostname: String,\n\n pub port: String,\n\n pub pathname: String,\n\n pub search: String,\n\n pub hash: String,\n\n}\n\n\n", "file_path": "src/quirks.rs", "rank": 68, "score": 15.539136369395614 }, { "content": " InvalidRegex(&'static str),\n\n}\n\n\n\n#[derive(Debug, Display)]\n\npub enum ParserError {\n\n #[display(fmt = \"expected token {}, found '{}' of type {}\", _0, _2, _1)]\n\n ExpectedToken(TokenType, TokenType, String),\n\n\n\n #[display(fmt = \"pattern contains duplicate name {}\", _0)]\n\n DuplicateName(String),\n\n}\n", "file_path": "src/error.rs", "rank": 69, "score": 15.493747365124484 }, { "content": " }\n\n Ok(())\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#next-is-authority-slashes\n\n #[inline]\n\n fn next_is_authority_slashes(&self) -> bool {\n\n if !self.is_non_special_pattern_char(self.token_index + 1, \"/\") {\n\n false\n\n } else {\n\n self.is_non_special_pattern_char(self.token_index + 2, \"/\")\n\n }\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#is-an-ipv6-open\n\n #[inline]\n\n fn is_ipv6_open(&self) -> bool {\n\n self.is_non_special_pattern_char(self.token_index, \"[\")\n\n }\n\n\n", "file_path": "src/constructor_parser.rs", "rank": 70, "score": 15.254758863202152 }, { "content": " self.part_list.push(Part {\n\n kind,\n\n value: regexp_value,\n\n modifier,\n\n name,\n\n prefix: encoded_prefix,\n\n suffix: encoded_suffix,\n\n });\n\n\n\n Ok(())\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#is-a-duplicate-name\n\n fn is_duplicate_name(&self, name: &str) -> bool {\n\n self.part_list.iter().any(|p| p.name == name)\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#consume-text\n\n fn consume_text(&mut self) -> String {\n\n let mut result = String::new();\n", "file_path": "src/parser.rs", "rank": 71, "score": 15.218968739341175 }, { "content": " // Ref: https://wicg.github.io/urlpattern/#is-a-port-prefix\n\n #[inline]\n\n fn is_port_prefix(&self) -> bool {\n\n self.is_non_special_pattern_char(self.token_index, \":\")\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#is-a-pathname-start\n\n #[inline]\n\n fn is_pathname_start(&self) -> bool {\n\n self.is_non_special_pattern_char(self.token_index, \"/\")\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#is-an-identity-terminator\n\n #[inline]\n\n fn is_identity_terminator(&self) -> bool {\n\n self.is_non_special_pattern_char(self.token_index, \"@\")\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#is-a-non-special-pattern-char\n\n fn is_non_special_pattern_char(&self, index: usize, value: &str) -> bool {\n", "file_path": "src/constructor_parser.rs", "rank": 72, "score": 15.131650840735023 }, { "content": " }\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#try-to-consume-a-modifier-token\n\n #[inline]\n\n fn try_consume_modifier_token(&mut self) -> Option<Token> {\n\n self\n\n .try_consume_token(TokenType::OtherModifier)\n\n .or_else(|| self.try_consume_token(TokenType::Asterisk))\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#maybe-add-a-part-from-the-pending-fixed-value\n\n #[inline]\n\n fn maybe_add_part_from_pending_fixed_value(&mut self) -> Result<(), Error> {\n\n if self.pending_fixed_value.is_empty() {\n\n return Ok(());\n\n }\n\n let encoded_value = (self.encoding_callback)(&self.pending_fixed_value)?;\n\n self.pending_fixed_value = String::new();\n\n self.part_list.push(Part::new(\n", "file_path": "src/parser.rs", "rank": 73, "score": 14.843271976363663 }, { "content": " // Ref: https://wicg.github.io/urlpattern/#compile-a-component\n\n pub(crate) fn compile<F>(\n\n input: Option<&str>,\n\n encoding_callback: F,\n\n options: Options,\n\n ) -> Result<Self, Error>\n\n where\n\n F: Fn(&str) -> Result<String, Error>,\n\n {\n\n let part_list = crate::parser::parse_pattern_string(\n\n input.unwrap_or(\"*\"),\n\n &options,\n\n encoding_callback,\n\n )?;\n\n let (rust_regexp_string, _) =\n\n generate_regular_expression_and_name_list(&part_list, &options);\n\n let rust_regexp =\n\n regex::Regex::new(&rust_regexp_string).map_err(Error::RegEx);\n\n let options = options.with_syntax(crate::parser::RegexSyntax::EcmaScript);\n\n let (ecma_regexp_string, name_list) =\n", "file_path": "src/component.rs", "rank": 74, "score": 14.829208244135131 }, { "content": " assert!(!part.name.is_empty());\n\n name_list.push(part.name.clone());\n\n let regexp_value = if part.kind == PartType::SegmentWildcard {\n\n options.generate_segment_wildcard_regexp()\n\n } else if part.kind == PartType::FullWildcard {\n\n FULL_WILDCARD_REGEXP_VALUE.to_string()\n\n } else {\n\n part.value.clone()\n\n };\n\n\n\n if part.prefix.is_empty() && part.suffix.is_empty() {\n\n if matches!(part.modifier, PartModifier::None | PartModifier::Optional) {\n\n result.push_str(&format!(\"({}){}\", regexp_value, part.modifier));\n\n } else {\n\n result.push_str(&format!(\"((?:{}){})\", regexp_value, part.modifier));\n\n }\n\n continue;\n\n }\n\n if matches!(part.modifier, PartModifier::None | PartModifier::Optional) {\n\n result.push_str(&format!(\n", "file_path": "src/component.rs", "rank": 75, "score": 14.820466473092837 }, { "content": " regexp_or_wildcard_token =\n\n parser.try_consume_regexp_or_wildcard_token(name_token.is_none());\n\n let suffix = parser.consume_text();\n\n parser.consume_required_token(TokenType::Close)?;\n\n let modifier_token = parser.try_consume_modifier_token();\n\n parser.add_part(\n\n &prefix,\n\n name_token,\n\n regexp_or_wildcard_token,\n\n &suffix,\n\n modifier_token,\n\n )?;\n\n continue;\n\n }\n\n parser.maybe_add_part_from_pending_fixed_value()?;\n\n parser.consume_required_token(TokenType::End)?;\n\n }\n\n\n\n Ok(parser.part_list)\n\n}\n", "file_path": "src/parser.rs", "rank": 76, "score": 14.758628134077778 }, { "content": "\n\n Url(url::ParseError),\n\n RegEx(regex::Error),\n\n}\n\n\n\nimpl std::error::Error for Error {}\n\n\n\nimpl std::fmt::Debug for Error {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n std::fmt::Display::fmt(self, f)\n\n }\n\n}\n\n\n\n#[derive(Debug, Display)]\n\npub enum TokenizerError {\n\n #[display(fmt = \"incomplete escape code\")]\n\n IncompleteEscapeCode,\n\n #[display(fmt = \"invalid name; must be at least length 1\")]\n\n InvalidName,\n\n #[display(fmt = \"invalid regex: {}\", _0)]\n", "file_path": "src/error.rs", "rank": 77, "score": 14.010893878097695 }, { "content": "\n\n // Ref: https://wicg.github.io/urlpattern/#is-a-hash-prefix\n\n #[inline]\n\n fn is_hash_prefix(&self) -> bool {\n\n self.is_non_special_pattern_char(self.token_index, \"#\")\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#is-a-protocol-suffix\n\n #[inline]\n\n fn is_protocol_suffix(&self) -> bool {\n\n self.is_non_special_pattern_char(self.token_index, \":\")\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#is-a-search-prefix\n\n fn is_search_prefix(&self) -> bool {\n\n if self.is_non_special_pattern_char(self.token_index, \"?\") {\n\n return true;\n\n }\n\n if self.token_list[self.token_index].value != \"?\" {\n\n return false;\n", "file_path": "src/constructor_parser.rs", "rank": 78, "score": 13.958342036370214 }, { "content": "//! This module contains functions required to integrate this library into\n\n//! browsers. If you are not building a browser, you can ignore this module.\n\n\n\nuse serde::Deserialize;\n\nuse serde::Serialize;\n\nuse url::Url;\n\n\n\npub use crate::Error;\n\n\n\n#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)]\n\npub struct UrlPatternInit {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub protocol: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub username: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub password: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub hostname: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n", "file_path": "src/quirks.rs", "rank": 79, "score": 13.127461204129926 }, { "content": " Regexp,\n\n SegmentWildcard,\n\n FullWildcard,\n\n}\n\n\n\n// Ref: https://wicg.github.io/urlpattern/#part-modifier\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum PartModifier {\n\n None,\n\n Optional,\n\n ZeroOrMore,\n\n OneOrMore,\n\n}\n\n\n\nimpl std::fmt::Display for PartModifier {\n\n // Ref: https://wicg.github.io/urlpattern/#convert-a-modifier-to-a-string\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.write_str(match self {\n\n PartModifier::None => \"\",\n\n PartModifier::Optional => \"?\",\n", "file_path": "src/parser.rs", "rank": 80, "score": 12.968548291710217 }, { "content": " regexp_string: pattern.hostname.ecma_regexp_string,\n\n group_name_list: pattern.hostname.group_name_list,\n\n },\n\n port: UrlPatternComponent {\n\n pattern_string: pattern.port.pattern_string,\n\n regexp_string: pattern.port.ecma_regexp_string,\n\n group_name_list: pattern.port.group_name_list,\n\n },\n\n pathname: UrlPatternComponent {\n\n pattern_string: pattern.pathname.pattern_string,\n\n regexp_string: pattern.pathname.ecma_regexp_string,\n\n group_name_list: pattern.pathname.group_name_list,\n\n },\n\n search: UrlPatternComponent {\n\n pattern_string: pattern.search.pattern_string,\n\n regexp_string: pattern.search.ecma_regexp_string,\n\n group_name_list: pattern.search.group_name_list,\n\n },\n\n hash: UrlPatternComponent {\n\n pattern_string: pattern.hash.pattern_string,\n\n regexp_string: pattern.hash.ecma_regexp_string,\n\n group_name_list: pattern.hash.group_name_list,\n\n },\n\n };\n\n Ok(urlpattern)\n\n}\n\n\n\npub type Inputs = (StringOrInit, Option<String>);\n\n\n", "file_path": "src/quirks.rs", "rank": 81, "score": 12.958517090404543 }, { "content": " }\n\n false\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#create-a-component-match-result\n\n pub(crate) fn create_match_result(\n\n &self,\n\n input: String,\n\n exec_result: regex::Captures,\n\n ) -> crate::UrlPatternComponentResult {\n\n let mut iter = exec_result.iter();\n\n iter.next(); // first match is entire string\n\n crate::UrlPatternComponentResult {\n\n input,\n\n groups: self\n\n .group_name_list\n\n .clone()\n\n .into_iter()\n\n .zip(iter.map(|e| e.map(|e| e.as_str().to_string())))\n\n .map(|(name, key)| (name, key.unwrap_or_default()))\n", "file_path": "src/component.rs", "rank": 82, "score": 12.94595063226179 }, { "content": "}\n\n\n\nimpl Options {\n\n // Ref: https://wicg.github.io/urlpattern/#hostname-options\n\n #[inline]\n\n pub fn hostname() -> Self {\n\n Options {\n\n delimiter_code_point: String::from(\".\"),\n\n prefix_code_point: String::new(),\n\n regex_syntax: RegexSyntax::Rust,\n\n }\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#pathname-options\n\n #[inline]\n\n pub fn pathname() -> Self {\n\n Options {\n\n delimiter_code_point: String::from(\"/\"),\n\n prefix_code_point: String::from(\"/\"),\n\n regex_syntax: RegexSyntax::Rust,\n", "file_path": "src/parser.rs", "rank": 83, "score": 12.7910088273719 }, { "content": " PartType::FixedText,\n\n encoded_value,\n\n PartModifier::None,\n\n ));\n\n\n\n Ok(())\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#add-a-part\n\n fn add_part(\n\n &mut self,\n\n prefix: &str,\n\n name_token: Option<Token>,\n\n regexp_or_wildcard_token: Option<Token>,\n\n suffix: &str,\n\n modifier_token: Option<Token>,\n\n ) -> Result<(), Error> {\n\n let mut modifier = PartModifier::None;\n\n if let Some(modifier_token) = modifier_token {\n\n modifier = match modifier_token.value.as_ref() {\n", "file_path": "src/parser.rs", "rank": 84, "score": 12.612654755611725 }, { "content": " #[inline]\n\n pub fn generate_segment_wildcard_regexp(&self) -> String {\n\n // NOTE: this is a deliberate deviation from the spec. In rust-regex, you\n\n // can not have a negative character class without specifying any\n\n // characters.\n\n if self.delimiter_code_point.is_empty() {\n\n \".+?\".to_owned()\n\n } else {\n\n format!(\n\n \"[^{}]+?\",\n\n self.escape_regexp_string(&self.delimiter_code_point)\n\n )\n\n }\n\n }\n\n}\n\n\n\n// Ref: https://wicg.github.io/urlpattern/#part-type\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum PartType {\n\n FixedText,\n", "file_path": "src/parser.rs", "rank": 85, "score": 12.574709579258734 }, { "content": " continue;\n\n }\n\n parser.change_state(ConstructorStringParserState::Done, 0);\n\n break;\n\n }\n\n if parser.is_group_open() {\n\n parser.group_depth += 1;\n\n parser.token_index += parser.token_increment;\n\n continue;\n\n }\n\n if parser.group_depth > 0 {\n\n if parser.is_group_close() {\n\n parser.group_depth -= 1;\n\n } else {\n\n parser.token_index += parser.token_increment;\n\n continue;\n\n }\n\n }\n\n match parser.state {\n\n ConstructorStringParserState::Init => {\n", "file_path": "src/constructor_parser.rs", "rank": 86, "score": 12.568800182674178 }, { "content": " .collect(),\n\n }\n\n }\n\n\n\n pub(crate) fn optionally_transpose_regex_error(\n\n mut self,\n\n do_transpose: bool,\n\n ) -> Result<Self, Error> {\n\n if do_transpose {\n\n self.rust_regexp = Ok(self.rust_regexp?);\n\n }\n\n Ok(self)\n\n }\n\n}\n\n\n\n// Ref: https://wicg.github.io/urlpattern/#generate-a-regular-expression-and-name-list\n", "file_path": "src/component.rs", "rank": 87, "score": 12.290444349283067 }, { "content": " pub port: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub pathname: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub search: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub hash: Option<String>,\n\n #[serde(rename = \"baseURL\", skip_serializing_if = \"Option::is_none\")]\n\n pub base_url: Option<String>,\n\n}\n\n\n\n#[allow(clippy::large_enum_variant)]\n\n#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]\n\n#[serde(untagged)]\n\npub enum StringOrInit {\n\n String(String),\n\n Init(UrlPatternInit),\n\n}\n\n\n\n/// This function constructs a UrlPattern given a string or UrlPatternInit and\n\n/// optionally a base url.\n", "file_path": "src/quirks.rs", "rank": 88, "score": 12.075074273167044 }, { "content": "// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.\n\n\n\nuse crate::error::Error;\n\nuse crate::tokenizer::Token;\n\nuse crate::tokenizer::TokenType;\n\nuse crate::UrlPatternInit;\n\n\n\n// Ref: https://wicg.github.io/urlpattern/#constructor-string-parser-state\n\n#[derive(Debug, Eq, PartialEq)]\n", "file_path": "src/constructor_parser.rs", "rank": 89, "score": 11.98283540991934 }, { "content": " EcmaScript,\n\n}\n\n\n\n// Ref: https://wicg.github.io/urlpattern/#options-header\n\npub struct Options {\n\n delimiter_code_point: String, // TODO: It must contain one ASCII code point or the empty string. maybe Option<char>?\n\n pub prefix_code_point: String, // TODO: It must contain one ASCII code point or the empty string. maybe Option<char>?\n\n regex_syntax: RegexSyntax,\n\n}\n\n\n\nimpl std::default::Default for Options {\n\n // Ref: https://wicg.github.io/urlpattern/#default-options\n\n #[inline]\n\n fn default() -> Self {\n\n Options {\n\n delimiter_code_point: String::new(),\n\n prefix_code_point: String::new(),\n\n regex_syntax: RegexSyntax::Rust,\n\n }\n\n }\n", "file_path": "src/parser.rs", "rank": 90, "score": 11.94014895943922 }, { "content": " parser.token_increment = 1;\n\n if parser.token_list[parser.token_index].kind == TokenType::End {\n\n if parser.state == ConstructorStringParserState::Init {\n\n parser.rewind();\n\n if parser.is_hash_prefix() {\n\n parser.change_state(ConstructorStringParserState::Hash, 1);\n\n } else if parser.is_search_prefix() {\n\n parser.change_state(ConstructorStringParserState::Search, 1);\n\n parser.result.hash = Some(String::new());\n\n } else {\n\n parser.change_state(ConstructorStringParserState::Pathname, 0);\n\n parser.result.search = Some(String::new());\n\n parser.result.hash = Some(String::new());\n\n }\n\n parser.token_index += parser.token_increment;\n\n continue;\n\n }\n\n if parser.state == ConstructorStringParserState::Authority {\n\n parser.rewind_and_set_state(ConstructorStringParserState::Hostname);\n\n parser.token_index += parser.token_increment;\n", "file_path": "src/constructor_parser.rs", "rank": 91, "score": 11.920864203869627 }, { "content": " }\n\n result.push_str(&escape_pattern_string(&part.prefix));\n\n if custom_name {\n\n result.push(':');\n\n result.push_str(&part.name);\n\n }\n\n match part.kind {\n\n PartType::FixedText => unreachable!(),\n\n PartType::Regexp => result.push_str(&format!(\"({})\", part.value)),\n\n PartType::SegmentWildcard if !custom_name => result\n\n .push_str(&format!(\"({})\", options.generate_segment_wildcard_regexp())),\n\n PartType::SegmentWildcard => {}\n\n PartType::FullWildcard => {\n\n if custom_name\n\n || matches!(prev_part, Some(Part {kind, modifier: PartModifier::None, .. }) if kind != &PartType::FixedText)\n\n {\n\n result.push_str(&format!(\"({})\", FULL_WILDCARD_REGEXP_VALUE));\n\n } else {\n\n result.push('*');\n\n }\n", "file_path": "src/component.rs", "rank": 92, "score": 11.744581154449211 }, { "content": " \"?\" => PartModifier::Optional,\n\n \"*\" => PartModifier::ZeroOrMore,\n\n \"+\" => PartModifier::OneOrMore,\n\n _ => unreachable!(),\n\n };\n\n }\n\n if name_token.is_none()\n\n && regexp_or_wildcard_token.is_none()\n\n && modifier == PartModifier::None\n\n {\n\n self.pending_fixed_value.push_str(prefix);\n\n return Ok(());\n\n }\n\n self.maybe_add_part_from_pending_fixed_value()?;\n\n if name_token.is_none() && regexp_or_wildcard_token.is_none() {\n\n assert!(suffix.is_empty());\n\n if prefix.is_empty() {\n\n return Ok(());\n\n }\n\n let encoded_value = (self.encoding_callback)(prefix)?;\n", "file_path": "src/parser.rs", "rank": 93, "score": 11.677590244442577 }, { "content": " kind,\n\n value,\n\n modifier,\n\n name: String::new(),\n\n prefix: String::new(),\n\n suffix: String::new(),\n\n }\n\n }\n\n}\n\n\n\n// Ref: https://wicg.github.io/urlpattern/#pattern-parser\n", "file_path": "src/parser.rs", "rank": 94, "score": 11.419631061308053 }, { "content": " \"(?:{}({}){}){}\",\n\n options.escape_regexp_string(&part.prefix),\n\n regexp_value,\n\n options.escape_regexp_string(&part.suffix),\n\n part.modifier\n\n ));\n\n continue;\n\n }\n\n assert!(!part.prefix.is_empty() || !part.suffix.is_empty());\n\n result.push_str(&format!(\n\n \"(?:{}((?:{})(?:{}{}(?:{}))*){}){}\",\n\n options.escape_regexp_string(&part.prefix),\n\n regexp_value,\n\n options.escape_regexp_string(&part.suffix),\n\n options.escape_regexp_string(&part.prefix),\n\n regexp_value,\n\n options.escape_regexp_string(&part.suffix),\n\n if part.modifier == PartModifier::ZeroOrMore {\n\n \"?\" // TODO: https://github.com/WICG/urlpattern/issues/91\n\n } else {\n\n \"\"\n\n }\n\n ));\n\n }\n\n result.push('$');\n\n (result, name_list)\n\n}\n\n\n", "file_path": "src/component.rs", "rank": 95, "score": 11.360148498781843 }, { "content": " &self.port.pattern_string\n\n }\n\n\n\n /// The pattern used to match against the pathname of the URL.\n\n pub fn pathname(&self) -> &str {\n\n &self.pathname.pattern_string\n\n }\n\n\n\n /// The pattern used to match against the search string of the URL.\n\n pub fn search(&self) -> &str {\n\n &self.search.pattern_string\n\n }\n\n\n\n /// The pattern used to match against the hash fragment of the URL.\n\n pub fn hash(&self) -> &str {\n\n &self.hash.pattern_string\n\n }\n\n\n\n // Ref: https://wicg.github.io/urlpattern/#dom-urlpattern-test\n\n /// Test if a given [UrlPatternInput] (with optional base url), matches the\n", "file_path": "src/lib.rs", "rank": 96, "score": 11.1560032121863 }, { "content": " TwoArguments(quirks::StringOrInit, String),\n\n }\n\n\n\n let res = Option::<MatchResultInputs>::deserialize(deserializer)?;\n\n Ok(match res {\n\n Some(MatchResultInputs::OneArgument((a,))) => Some((a, None)),\n\n Some(MatchResultInputs::TwoArguments(a, b)) => Some((a, Some(b))),\n\n None => None,\n\n })\n\n }\n\n\n\n fn test_case(case: TestCase) {\n\n let input = case.pattern.get(0).cloned();\n\n let mut base_url = case.pattern.get(1).map(|input| match input {\n\n StringOrInit::String(str) => str.clone(),\n\n StringOrInit::Init(_) => unreachable!(),\n\n });\n\n\n\n println!(\"\\n=====\");\n\n println!(\n", "file_path": "src/lib.rs", "rank": 97, "score": 10.97054071489152 }, { "content": " use crate::quirks;\n\n use crate::quirks::StringOrInit;\n\n use crate::UrlPatternComponentResult;\n\n use crate::UrlPatternResult;\n\n\n\n use super::UrlPattern;\n\n use super::UrlPatternInit;\n\n\n\n #[derive(Deserialize)]\n\n #[serde(untagged)]\n\n #[allow(clippy::large_enum_variant)]\n\n enum ExpectedMatch {\n\n String(String),\n\n MatchResult(MatchResult),\n\n }\n\n\n\n #[derive(Debug, Deserialize)]\n\n struct ComponentResult {\n\n input: String,\n\n groups: HashMap<String, String>,\n", "file_path": "src/lib.rs", "rank": 98, "score": 10.693547249117685 }, { "content": " assert_field!(search);\n\n assert_field!(hash);\n\n\n\n let input = case.inputs.get(0).cloned();\n\n let base_url = case.inputs.get(1).map(|input| match input {\n\n StringOrInit::String(str) => str.clone(),\n\n StringOrInit::Init(_) => unreachable!(),\n\n });\n\n\n\n println!(\n\n \"Input: {}, {}\",\n\n serde_json::to_string(&input).unwrap(),\n\n serde_json::to_string(&base_url).unwrap(),\n\n );\n\n\n\n let input = input.unwrap_or_else(|| StringOrInit::Init(Default::default()));\n\n\n\n let expected_input = (input.clone(), base_url.clone());\n\n\n\n let match_input = quirks::process_match_input(input, base_url.as_deref());\n", "file_path": "src/lib.rs", "rank": 99, "score": 10.555525482945052 } ]
Rust
src/main.rs
Cxarli/minecrab
ddff0ab35c234c4413b2638c2615a2767c37c9ff
mod aabb; mod camera; mod geometry; mod geometry_buffers; mod hud; mod player; mod render_context; mod state; mod text_renderer; mod texture; mod time; mod utils; mod vertex; mod view; mod world; use std::time::{Duration, Instant}; use winit::{ dpi::{PhysicalSize, Size}, event::{ElementState, Event, KeyboardInput, MouseButton, VirtualKeyCode, WindowEvent}, event_loop::{ControlFlow, EventLoop}, window::{Window, WindowBuilder}, }; use crate::state::State; fn handle_window_event( event: &WindowEvent, state: &mut State, window: &Window, ) -> Option<ControlFlow> { match event { WindowEvent::CloseRequested => Some(ControlFlow::Exit), WindowEvent::KeyboardInput { input: KeyboardInput { state: ElementState::Pressed, virtual_keycode: Some(VirtualKeyCode::Escape), .. }, .. } => { let _ = window.set_cursor_grab(false); window.set_cursor_visible(true); state.mouse_grabbed = false; None } WindowEvent::Resized(physical_size) => { state.resize(*physical_size); None } WindowEvent::ScaleFactorChanged { new_inner_size, .. } => { state.resize(**new_inner_size); None } WindowEvent::MouseInput { state: mouse_state, button, .. } => { if !state.mouse_grabbed && *button == MouseButton::Left && *mouse_state == ElementState::Pressed { let _ = window.set_cursor_grab(true); window.set_cursor_visible(false); state.mouse_grabbed = true; } else { state.window_event(event); } None } WindowEvent::Focused(false) => { let _ = window.set_cursor_grab(false); window.set_cursor_visible(true); state.mouse_grabbed = false; None } event => { state.window_event(event); None } } } fn main() { env_logger::init(); let event_loop = EventLoop::new(); let window = WindowBuilder::new() .with_title("minecrab") .with_inner_size(Size::Physical(PhysicalSize { width: 1280, height: 720, })) .build(&event_loop) .unwrap(); let mut state = futures::executor::block_on(State::new(&window)); let mut frames = 0; let mut frame_instant = Instant::now(); let mut elapsed = Duration::from_secs(0); let mut frametime_min = Duration::from_secs(1000); let mut frametime_max = Duration::from_secs(0); let mut last_render_time = Instant::now(); let mut triangle_count = 0; event_loop.run(move |event, _, control_flow| { match event { Event::DeviceEvent { ref event, .. } => state.device_event(event), Event::WindowEvent { ref event, window_id, } if window_id == window.id() => { if let Some(cf) = handle_window_event(event, &mut state, &window) { *control_flow = cf } } Event::RedrawRequested(_) => { let frame_elapsed = frame_instant.elapsed(); frame_instant = Instant::now(); frametime_min = frametime_min.min(frame_elapsed); frametime_max = frametime_max.max(frame_elapsed); elapsed += frame_elapsed; frames += 1; if elapsed.as_secs() >= 1 { let frametime = elapsed / frames; let fps = 1_000_000 / frametime.as_micros(); let fps_max = 1_000_000 / frametime_min.as_micros(); let fps_min = 1_000_000 / frametime_max.as_micros(); print!("{:>4} frames | ", frames); print!( "frametime avg={:>5.2}ms min={:>5.2}ms max={:>5.2}ms | ", frametime.as_secs_f32() * 1000.0, frametime_min.as_secs_f32() * 1000.0, frametime_max.as_secs_f32() * 1000.0, ); print!( "fps avg={:>5} min={:>5} max={:>5} | ", fps, fps_min, fps_max ); println!( "{:>8} tris | {:>5} chunks", triangle_count, state.world.chunks.len() ); elapsed = Duration::from_secs(0); frames = 0; frametime_min = Duration::from_secs(1000); frametime_max = Duration::from_secs(0); } let dt = last_render_time.elapsed(); let now = Instant::now(); last_render_time = now; let render_time = match state.render() { Err(root_cause) => { match root_cause.downcast_ref::<wgpu::SurfaceError>() { Some(wgpu::SurfaceError::Lost) => { state.resize(state.window_size); } Some(wgpu::SurfaceError::OutOfMemory) => { *control_flow = ControlFlow::Exit; } Some(wgpu::SurfaceError::Timeout) => { eprintln!("TIMEOUT"); } Some(wgpu::SurfaceError::Outdated) => { eprintln!("OUTDATED"); } None => {} } return; } Ok((triangle_count_, render_time)) => { triangle_count = triangle_count_; render_time } }; state.update(dt, render_time); } Event::MainEventsCleared => { window.request_redraw(); } _ => {} } }); }
mod aabb; mod camera; mod geometry; mod geometry_buffers; mod hud; mod player; mod render_context; mod state; mod text_renderer; mod texture; mod time; mod utils; mod vertex; mod view; mod world; use std::time::{Duration, Instant}; use winit::{ dpi::{PhysicalSize, Size}, event::{ElementState, Event, KeyboardInput, MouseButton, VirtualKeyCode, WindowEvent}, event_loop::{ControlFlow, EventLoop}, window::{Window, WindowBuilder}, }; use crate::state::State; fn handle_window_event( event: &WindowEvent, state: &mut State, window: &Window, ) -> Option<ControlFlow> { match event { WindowEvent::CloseRequested => Some(ControlFlow::Exit), WindowEvent::KeyboardInput { input: KeyboardInput { state: ElementState::Pressed, virtual_keycode: Some(VirtualKeyCode::Escape), .. }, .. } => { let _ = window.set_cursor_grab(false); window.set_cursor_visible(true); state.mouse_grabbed = false; None } WindowEvent::Resized(physical_size) => { state.resize(*physical_size); None } WindowEvent::ScaleFactorChanged { new_inner_size, .. } => { state.resize(**new_inner_size); None } WindowEvent::MouseInput { state: mouse_state, button, .. } => { if !state.mouse_grabbed && *button == MouseButton::Left && *mouse_state == ElementState::Pressed { let _ = window.set_cursor_grab(true); window.set_cursor_visible(false); state.mouse_grabbed = true; } else { state.window_event(event); } None } WindowEvent::Focused(false) => { let _ = window.set_cursor_grab(false); window.set_cursor_visible(true); state.mouse_grabbed = false; None } event => { state.window_event(event); None } } } fn main() { env_logger::init(); let event_loop = EventLoop::new(); let window = WindowBuilder::new() .with_title("minecrab") .with_inner_size(Size::Physical(PhysicalSize { width: 1280, height: 720, })) .build(&event_loop) .unwrap(); let mut state = futures::executor::block_on(State::new(&window)); let mut frames = 0; let mut frame_instant = Instant::now(); let mut elapsed = Duration::from_secs(0); let mut frametime_min = Duration::from_secs(1000); let mut frametime_max = Duration::from_secs(0); let mut last_render_time = Instant::now(); let mut triangle_count = 0; event_loop.run(move |event, _, control_flow| { match event { Event::DeviceEvent { ref event, .. } => state.device_event(event), Event::WindowEvent { ref event, window_id, } if window_id == window.id() => { if let Some(cf) = handle_window_event(event, &mut state, &window) { *control_flow = cf } } Event::RedrawRequested(_) => { let frame_elapsed = frame_instant.elapsed(); frame_instant = Instant::now(); frametime_min = frametime_min.min(frame_elapsed); frametime_max = frametime_max.max(frame_elapsed); elapsed += frame_elapsed; frames += 1; if elapsed.as_secs() >= 1 { let frametime = elapsed / frames; let fps = 1_000_000 / frametime.as_micros(); let fps_max = 1_000_000 / frametime_min.as_micros(); let fps_min = 1_000_000 / frametime_max.as_micros(); print!("{:>4} frames | ", frames); print!( "frametime avg={:>5.2}ms min={:>5.2}ms max={:>5.2}ms | ", frametime.as_secs_f32() * 1000.0, frametime_min.as_secs_f32() * 1000.0, frametime_max.as_secs_f32() * 1000.0, );
print!( "fps avg={:>5} min={:>5} max={:>5} | ", fps, fps_min, fps_max ); println!( "{:>8} tris | {:>5} chunks", triangle_count, state.world.chunks.len() ); elapsed = Duration::from_secs(0); frames = 0; frametime_min = Duration::from_secs(1000); frametime_max = Duration::from_secs(0); } let dt = last_render_time.elapsed(); let now = Instant::now(); last_render_time = now; let render_time = match state.render() { Err(root_cause) => { match root_cause.downcast_ref::<wgpu::SurfaceError>() { Some(wgpu::SurfaceError::Lost) => { state.resize(state.window_size); } Some(wgpu::SurfaceError::OutOfMemory) => { *control_flow = ControlFlow::Exit; } Some(wgpu::SurfaceError::Timeout) => { eprintln!("TIMEOUT"); } Some(wgpu::SurfaceError::Outdated) => { eprintln!("OUTDATED"); } None => {} } return; } Ok((triangle_count_, render_time)) => { triangle_count = triangle_count_; render_time } }; state.update(dt, render_time); } Event::MainEventsCleared => { window.request_redraw(); } _ => {} } }); }
function_block-function_prefix_line
[ { "content": "use wgpu::{CommandEncoder, RenderPipeline};\n\n\n\nuse crate::{\n\n render_context::RenderContext,\n\n vertex::{HudVertex, Vertex},\n\n world::block::BlockType,\n\n};\n\n\n\nuse self::{debug_hud::DebugHud, hotbar_hud::HotbarHud, widgets_hud::WidgetsHud};\n\n\n\nuse std::borrow::Cow;\n\n\n\npub mod debug_hud;\n\npub mod hotbar_hud;\n\npub mod widgets_hud;\n\n\n\n// TODO update aspect ratio when resizing\n\npub const UI_SCALE_X: f32 = 0.0045;\n\npub const UI_SCALE_Y: f32 = 0.008;\n\n\n", "file_path": "src/hud/mod.rs", "rank": 2, "score": 50965.45283293996 }, { "content": " render_context: &RenderContext,\n\n encoder: &mut CommandEncoder,\n\n texture_view: &wgpu::TextureView,\n\n ) -> usize {\n\n let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {\n\n label: Some(\"HUD render pass\"),\n\n color_attachments: &[wgpu::RenderPassColorAttachment {\n\n view: texture_view,\n\n resolve_target: None,\n\n ops: wgpu::Operations {\n\n load: wgpu::LoadOp::Load,\n\n store: true,\n\n },\n\n }],\n\n depth_stencil_attachment: None,\n\n });\n\n render_pass.set_pipeline(&self.pipeline);\n\n\n\n self.widgets_hud.render(&mut render_pass)\n\n + self.debug_hud.render(&mut render_pass)\n\n + self.hotbar_hud.render(render_context, &mut render_pass)\n\n }\n\n\n\n pub fn selected_block(&self) -> Option<BlockType> {\n\n // TODO The hotbar widget should be rendered by HotbarHud\n\n self.hotbar_hud.blocks[self.widgets_hud.hotbar_cursor_position]\n\n }\n\n}\n", "file_path": "src/hud/mod.rs", "rank": 3, "score": 50965.244308712114 }, { "content": " write_mask: wgpu::ColorWrites::ALL,\n\n }],\n\n }),\n\n primitive: wgpu::PrimitiveState::default(),\n\n depth_stencil: None,\n\n multisample: wgpu::MultisampleState::default(),\n\n })\n\n }\n\n\n\n pub fn update(\n\n &mut self,\n\n render_context: &crate::render_context::RenderContext,\n\n camera: &crate::camera::Camera,\n\n ) {\n\n self.debug_hud.update(render_context, &camera.position);\n\n self.hotbar_hud.update(render_context);\n\n }\n\n\n\n pub fn render<'a>(\n\n &'a self,\n", "file_path": "src/hud/mod.rs", "rank": 4, "score": 50963.45195260811 }, { "content": " bind_group_layouts: &[&bind_group_layout],\n\n push_constant_ranges: &[],\n\n });\n\n\n\n render_context\n\n .device\n\n .create_render_pipeline(&wgpu::RenderPipelineDescriptor {\n\n label: Some(\"UI render pipeline\"),\n\n layout: Some(&pipeline_layout),\n\n vertex: wgpu::VertexState {\n\n module,\n\n entry_point: \"main\",\n\n buffers: &[HudVertex::descriptor()],\n\n },\n\n fragment: Some(wgpu::FragmentState {\n\n module,\n\n entry_point: \"main\",\n\n targets: &[wgpu::ColorTargetState {\n\n format: render_context.format,\n\n blend: Some(wgpu::BlendState::ALPHA_BLENDING),\n", "file_path": "src/hud/mod.rs", "rank": 5, "score": 50961.076046354334 }, { "content": " let bind_group_layout =\n\n render_context\n\n .device\n\n .create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n\n label: Some(\"GUI texture bind group layout\"),\n\n entries: &[\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 0,\n\n visibility: wgpu::ShaderStages::FRAGMENT,\n\n ty: wgpu::BindingType::Sampler {\n\n comparison: false,\n\n filtering: true,\n\n },\n\n count: None,\n\n },\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 1,\n\n visibility: wgpu::ShaderStages::FRAGMENT,\n\n ty: wgpu::BindingType::Texture {\n\n sample_type: wgpu::TextureSampleType::Float { filterable: true },\n", "file_path": "src/hud/mod.rs", "rank": 6, "score": 50956.86205105584 }, { "content": " view_dimension: wgpu::TextureViewDimension::D2Array,\n\n multisampled: false,\n\n },\n\n count: None,\n\n },\n\n ],\n\n });\n\n\n\n let module = &render_context\n\n .device\n\n .create_shader_module(&wgpu::ShaderModuleDescriptor {\n\n label: Some(\"UI shader\"),\n\n source: wgpu::ShaderSource::Wgsl(Cow::Borrowed(include_str!(\"../shaders/ui.wgsl\"))),\n\n });\n\n\n\n let pipeline_layout =\n\n render_context\n\n .device\n\n .create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {\n\n label: Some(\"UI render pipeline layout\"),\n", "file_path": "src/hud/mod.rs", "rank": 7, "score": 50956.23854843734 }, { "content": "pub struct Hud {\n\n pub widgets_hud: WidgetsHud,\n\n pub debug_hud: DebugHud,\n\n pub hotbar_hud: HotbarHud,\n\n\n\n pub pipeline: RenderPipeline,\n\n}\n\n\n\nimpl Hud {\n\n pub fn new(render_context: &RenderContext) -> Self {\n\n Self {\n\n widgets_hud: WidgetsHud::new(render_context),\n\n debug_hud: DebugHud::new(render_context),\n\n hotbar_hud: HotbarHud::new(render_context),\n\n\n\n pipeline: Self::create_render_pipeline(render_context),\n\n }\n\n }\n\n\n\n fn create_render_pipeline(render_context: &RenderContext) -> wgpu::RenderPipeline {\n", "file_path": "src/hud/mod.rs", "rank": 8, "score": 50954.20178384114 }, { "content": "pub mod block;\n\npub mod chunk;\n\npub mod face_flags;\n\npub mod npc;\n\npub mod quad;\n\n\n\nuse std::{\n\n borrow::Cow,\n\n collections::VecDeque,\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse crate::{\n\n camera::Camera,\n\n render_context::RenderContext,\n\n texture::Texture,\n\n time::Time,\n\n vertex::{BlockVertex, Vertex},\n\n view::View,\n\n world::{\n", "file_path": "src/world/mod.rs", "rank": 9, "score": 49532.04215283189 }, { "content": " depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachment {\n\n view: &self.depth_texture.view,\n\n depth_ops: Some(wgpu::Operations {\n\n load: wgpu::LoadOp::Clear(1.0),\n\n store: true,\n\n }),\n\n stencil_ops: None,\n\n }),\n\n });\n\n render_pass.set_pipeline(&self.render_pipeline);\n\n\n\n let texture_manager = render_context.texture_manager.as_ref().unwrap();\n\n render_pass.set_bind_group(0, texture_manager.bind_group.as_ref().unwrap(), &[]);\n\n render_pass.set_bind_group(1, &view.bind_group, &[]);\n\n render_pass.set_bind_group(2, &self.time_bind_group, &[]);\n\n\n\n let visible = self.chunks_visible.as_ref().unwrap();\n\n let mut triangle_count = 0;\n\n for position in visible {\n\n let chunk = self.chunks.get(position).unwrap();\n", "file_path": "src/world/mod.rs", "rank": 10, "score": 49523.09540326342 }, { "content": " triangle_count += chunk.render(&mut render_pass, position, view);\n\n }\n\n triangle_count += self.npc.render(&mut render_pass);\n\n triangle_count\n\n }\n\n\n\n pub fn new(render_context: &RenderContext, view: &View) -> Self {\n\n let chunks = FxHashMap::default();\n\n let mut npc = Npc::new();\n\n npc.load_geometry(render_context);\n\n\n\n let chunk_database = sled::Config::new()\n\n .path(\"chunks\")\n\n .mode(sled::Mode::HighThroughput)\n\n .use_compression(true)\n\n .open()\n\n .unwrap();\n\n\n\n let time = Time::new();\n\n\n", "file_path": "src/world/mod.rs", "rank": 11, "score": 49521.254233253385 }, { "content": " block::{Block, BlockType},\n\n chunk::{Chunk, CHUNK_ISIZE, CHUNK_SIZE},\n\n npc::Npc,\n\n },\n\n};\n\nuse cgmath::{EuclideanSpace, InnerSpace, Point3, Vector3};\n\nuse fxhash::FxHashMap;\n\nuse wgpu::{\n\n util::{BufferInitDescriptor, DeviceExt},\n\n BindGroup, Buffer, CommandEncoder, RenderPipeline,\n\n};\n\n\n\npub struct World {\n\n pub render_pipeline: RenderPipeline,\n\n pub depth_texture: Texture,\n\n\n\n pub time: Time,\n\n pub time_buffer: Buffer,\n\n pub time_bind_group: BindGroup,\n\n\n", "file_path": "src/world/mod.rs", "rank": 12, "score": 49519.610143703896 }, { "content": " &mut self,\n\n render_context: &RenderContext,\n\n camera: &Camera,\n\n block_type: BlockType,\n\n ) {\n\n if let Some((pos, face_normal)) = self.raycast(camera.position, camera.direction()) {\n\n let new_pos = (pos.cast().unwrap() + face_normal).cast().unwrap();\n\n self.set_block(new_pos.x, new_pos.y, new_pos.z, Some(Block { block_type }));\n\n self.update_chunk_geometry(render_context, pos / CHUNK_ISIZE);\n\n }\n\n }\n\n\n\n pub fn get_block(&self, point: Point3<isize>) -> Option<&Block> {\n\n let chunk = match self.chunks.get(&point.map(|x| x.div_euclid(CHUNK_ISIZE))) {\n\n Some(chunk) => chunk,\n\n None => return None,\n\n };\n\n\n\n let b = point.map(|x| x.rem_euclid(CHUNK_ISIZE) as usize);\n\n chunk.blocks[b.y][b.z][b.x].as_ref()\n", "file_path": "src/world/mod.rs", "rank": 13, "score": 49519.45292692447 }, { "content": "impl World {\n\n #[allow(clippy::collapsible_else_if)]\n\n pub fn update(\n\n &mut self,\n\n render_context: &RenderContext,\n\n dt: Duration,\n\n render_time: Duration,\n\n camera: &Camera,\n\n ) {\n\n self.time.time += dt.as_secs_f32();\n\n render_context\n\n .queue\n\n .write_buffer(&self.time_buffer, 0, bytemuck::cast_slice(&[self.time]));\n\n\n\n self.update_highlight(render_context, camera);\n\n\n\n // Queue up new chunks for loading, if necessary\n\n let camera_pos: Point3<isize> = camera.position.cast().unwrap();\n\n let camera_chunk: Point3<isize> = camera_pos.map(|n| n.div_euclid(CHUNK_ISIZE));\n\n let mut load_queue = Vec::new();\n", "file_path": "src/world/mod.rs", "rank": 14, "score": 49518.57661373179 }, { "content": " count: None,\n\n }],\n\n label: Some(\"time_bind_group_layout\"),\n\n });\n\n\n\n let time_bind_group = render_context\n\n .device\n\n .create_bind_group(&wgpu::BindGroupDescriptor {\n\n layout: &time_bind_group_layout,\n\n entries: &[wgpu::BindGroupEntry {\n\n binding: 0,\n\n resource: time_buffer.as_entire_binding(),\n\n }],\n\n label: Some(\"time_bind_group\"),\n\n });\n\n\n\n let texture_manager = render_context.texture_manager.as_ref().unwrap();\n\n let render_pipeline_layout =\n\n render_context\n\n .device\n", "file_path": "src/world/mod.rs", "rank": 15, "score": 49517.51781033568 }, { "content": " } else {\n\n eprintln!(\"Tried to save unloaded chunk {:?}\", position);\n\n }\n\n } else {\n\n break;\n\n }\n\n\n\n chunk_updates += 1;\n\n }\n\n\n\n if chunk_updates > 0 {\n\n self.chunk_occlusion_position = None;\n\n }\n\n }\n\n\n\n pub fn render<'a>(\n\n &'a mut self,\n\n render_context: &RenderContext,\n\n render_encoder: &mut CommandEncoder,\n\n texture_view: &wgpu::TextureView,\n", "file_path": "src/world/mod.rs", "rank": 16, "score": 49517.045254138306 }, { "content": " unload_timer: Duration::ZERO,\n\n }\n\n }\n\n\n\n pub fn update_occlusion(&mut self, view: &View) {\n\n let initial_position = view\n\n .camera\n\n .position\n\n .map(|x| (x.floor() as isize).div_euclid(CHUNK_ISIZE));\n\n\n\n if self.chunk_occlusion_position == Some(initial_position) {\n\n return;\n\n }\n\n\n\n self.chunk_occlusion_position = Some(initial_position);\n\n let mut queue = VecDeque::from(vec![initial_position]);\n\n\n\n assert_eq!(CHUNK_SIZE, 32);\n\n let mut visited = [0u32; CHUNK_SIZE * CHUNK_SIZE];\n\n let mut render_queue = Vec::new();\n", "file_path": "src/world/mod.rs", "rank": 17, "score": 49516.99961772067 }, { "content": " view: &View,\n\n ) -> usize {\n\n // TODO Move this to update\n\n self.update_occlusion(view);\n\n\n\n let mut render_pass = render_encoder.begin_render_pass(&wgpu::RenderPassDescriptor {\n\n label: Some(\"render_pass\"),\n\n color_attachments: &[wgpu::RenderPassColorAttachment {\n\n view: texture_view,\n\n resolve_target: None,\n\n ops: wgpu::Operations {\n\n load: wgpu::LoadOp::Clear(wgpu::Color {\n\n r: 0.502,\n\n g: 0.663,\n\n b: 0.965,\n\n a: 1.0,\n\n }),\n\n store: true,\n\n },\n\n }],\n", "file_path": "src/world/mod.rs", "rank": 18, "score": 49516.92414826657 }, { "content": " let start = Instant::now() - render_time;\n\n let mut chunk_updates = 0;\n\n while chunk_updates == 0 || start.elapsed() < Duration::from_millis(15) {\n\n if let Some(position) = self.chunk_load_queue.pop_front() {\n\n let chunk = self.chunks.entry(position).or_default();\n\n match chunk.load(position, &self.chunk_database) {\n\n Err(error) => {\n\n eprintln!(\"Failed to load/generate chunk {:?}: {:?}\", position, error)\n\n }\n\n Ok(true) => {\n\n self.update_chunk_geometry(render_context, position);\n\n self.enqueue_chunk_save(position, false);\n\n if DEBUG_IO {\n\n println!(\"Generated chunk {:?}\", position);\n\n }\n\n }\n\n Ok(false) => {\n\n self.update_chunk_geometry(render_context, position);\n\n if DEBUG_IO {\n\n println!(\"Loaded chunk {:?}\", position);\n", "file_path": "src/world/mod.rs", "rank": 19, "score": 49515.90335355057 }, { "content": " pub fn update_chunk_geometry(\n\n &mut self,\n\n render_context: &RenderContext,\n\n chunk_position: Point3<isize>,\n\n ) {\n\n let chunk = self.chunks.get_mut(&chunk_position).unwrap();\n\n chunk.update_geometry(render_context, chunk_position, self.highlighted);\n\n }\n\n\n\n fn update_highlight(&mut self, render_context: &RenderContext, camera: &Camera) {\n\n let old = self.highlighted;\n\n let new = self.raycast(camera.position, camera.direction());\n\n\n\n let old_chunk = old.map(|(pos, _)| pos.map(|n| n.div_euclid(CHUNK_ISIZE)));\n\n let new_chunk = new.map(|(pos, _)| pos.map(|n| n.div_euclid(CHUNK_ISIZE)));\n\n\n\n if old != new {\n\n self.highlighted = new;\n\n\n\n if let Some(old_chunk_) = old_chunk {\n", "file_path": "src/world/mod.rs", "rank": 20, "score": 49515.397628380924 }, { "content": " self.update_chunk_geometry(render_context, old_chunk_);\n\n }\n\n\n\n if let Some(new_chunk_) = new_chunk {\n\n // Don't update the same chunk twice\n\n if old_chunk != new_chunk {\n\n self.update_chunk_geometry(render_context, new_chunk_);\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn break_at_crosshair(&mut self, render_context: &RenderContext, camera: &Camera) {\n\n if let Some((pos, _)) = self.raycast(camera.position, camera.direction()) {\n\n self.set_block(pos.x as isize, pos.y as isize, pos.z as isize, None);\n\n self.update_chunk_geometry(render_context, pos / CHUNK_ISIZE);\n\n }\n\n }\n\n\n\n pub fn place_at_crosshair(\n", "file_path": "src/world/mod.rs", "rank": 21, "score": 49515.3418619158 }, { "content": " render_context\n\n .device\n\n .create_render_pipeline(&wgpu::RenderPipelineDescriptor {\n\n label: Some(\"Render Pipeline\"),\n\n layout: Some(&render_pipeline_layout),\n\n vertex: wgpu::VertexState {\n\n module: &shader,\n\n entry_point: \"main\",\n\n buffers: &[BlockVertex::descriptor()],\n\n },\n\n fragment: Some(wgpu::FragmentState {\n\n module: &shader,\n\n entry_point: \"main\",\n\n targets: &[wgpu::ColorTargetState {\n\n format: render_context.format,\n\n blend: Some(wgpu::BlendState {\n\n alpha: wgpu::BlendComponent::REPLACE,\n\n color: wgpu::BlendComponent::REPLACE,\n\n }),\n\n write_mask: wgpu::ColorWrites::ALL,\n", "file_path": "src/world/mod.rs", "rank": 22, "score": 49514.46939526927 }, { "content": " render_pipeline,\n\n\n\n time,\n\n time_buffer,\n\n time_bind_group,\n\n\n\n depth_texture,\n\n\n\n npc,\n\n\n\n chunks,\n\n chunk_database,\n\n chunk_load_queue: VecDeque::new(),\n\n chunk_save_queue: VecDeque::new(),\n\n chunk_generate_queue: VecDeque::new(),\n\n chunk_occlusion_position: None,\n\n chunks_visible: None,\n\n\n\n highlighted: None,\n\n\n", "file_path": "src/world/mod.rs", "rank": 23, "score": 49514.35829243764 }, { "content": " }],\n\n }),\n\n primitive: wgpu::PrimitiveState {\n\n cull_mode: Some(wgpu::Face::Back),\n\n polygon_mode: wgpu::PolygonMode::Fill,\n\n ..wgpu::PrimitiveState::default()\n\n },\n\n depth_stencil: Some(wgpu::DepthStencilState {\n\n format: Texture::DEPTH_FORMAT,\n\n depth_write_enabled: true,\n\n depth_compare: wgpu::CompareFunction::Less,\n\n stencil: wgpu::StencilState::default(),\n\n bias: wgpu::DepthBiasState::default(),\n\n }),\n\n multisample: wgpu::MultisampleState::default(),\n\n });\n\n\n\n let depth_texture = Texture::create_depth_texture(render_context, \"depth_texture\");\n\n\n\n Self {\n", "file_path": "src/world/mod.rs", "rank": 24, "score": 49514.34480306875 }, { "content": " if self.unload_timer.as_secs() >= 10 {\n\n self.unload_timer = Duration::ZERO;\n\n\n\n let camera_pos = camera.position.to_vec();\n\n let unload_distance = (RENDER_DISTANCE * CHUNK_ISIZE) as f32 * 1.5;\n\n\n\n let mut unload_chunks = Vec::new();\n\n for point in self.chunks.keys() {\n\n let pos: Point3<f32> = (point * CHUNK_ISIZE).cast().unwrap();\n\n if (pos.x - camera_pos.x).abs() > unload_distance\n\n || (pos.z - camera_pos.z).abs() > unload_distance\n\n {\n\n unload_chunks.push(*point);\n\n }\n\n }\n\n for point in unload_chunks {\n\n self.enqueue_chunk_save(point, true);\n\n }\n\n }\n\n\n", "file_path": "src/world/mod.rs", "rank": 25, "score": 49514.257932968394 }, { "content": " let time_buffer = render_context\n\n .device\n\n .create_buffer_init(&BufferInitDescriptor {\n\n label: Some(\"time_buffer\"),\n\n contents: bytemuck::cast_slice(&[time]),\n\n usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,\n\n });\n\n\n\n let time_bind_group_layout =\n\n render_context\n\n .device\n\n .create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n\n entries: &[wgpu::BindGroupLayoutEntry {\n\n binding: 0,\n\n visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT,\n\n ty: wgpu::BindingType::Buffer {\n\n ty: wgpu::BufferBindingType::Uniform,\n\n has_dynamic_offset: false,\n\n min_binding_size: None,\n\n },\n", "file_path": "src/world/mod.rs", "rank": 26, "score": 49513.163373180585 }, { "content": " .create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {\n\n label: Some(\"render_pipeline_layout\"),\n\n push_constant_ranges: &[],\n\n bind_group_layouts: &[\n\n &texture_manager.bind_group_layout,\n\n &view.bind_group_layout,\n\n &time_bind_group_layout,\n\n ],\n\n });\n\n\n\n let shader = render_context.device.create_shader_module(\n\n &(wgpu::ShaderModuleDescriptor {\n\n label: Some(\"shader\"),\n\n source: wgpu::ShaderSource::Wgsl(Cow::Borrowed(include_str!(\n\n \"../shaders/world.wgsl\"\n\n ))),\n\n }),\n\n );\n\n\n\n let render_pipeline =\n", "file_path": "src/world/mod.rs", "rank": 27, "score": 49512.654840009054 }, { "content": " for (x, y, z) in itertools::iproduct!(\n\n -RENDER_DISTANCE..RENDER_DISTANCE,\n\n 0..WORLD_HEIGHT,\n\n -RENDER_DISTANCE..RENDER_DISTANCE\n\n ) {\n\n let point: Point3<isize> = Point3::new(x + camera_chunk.x, y, z + camera_chunk.z);\n\n if !self.chunks.contains_key(&point) && !self.chunk_load_queue.contains(&point) {\n\n load_queue.push(point);\n\n }\n\n }\n\n\n\n // TODO Sort based on where camera is looking\n\n load_queue.sort_unstable_by_key(|f| {\n\n (f.x * CHUNK_ISIZE - camera_pos.x).abs() + (f.y * CHUNK_ISIZE - camera_pos.y).abs()\n\n });\n\n\n\n self.chunk_load_queue.extend(load_queue);\n\n\n\n // Unload chunks that are far away\n\n self.unload_timer += dt;\n", "file_path": "src/world/mod.rs", "rank": 28, "score": 49512.429718338746 }, { "content": "\n\n if self.get_block(position.cast().unwrap()).is_some() {\n\n // Intersection occurred\n\n return Some((position.cast().unwrap(), face));\n\n }\n\n }\n\n\n\n None\n\n }\n\n}\n", "file_path": "src/world/mod.rs", "rank": 29, "score": 49512.28644908641 }, { "content": "\n\n while !queue.is_empty() {\n\n let position = queue.pop_front().unwrap();\n\n\n\n let b = position.map(|x| x.rem_euclid(CHUNK_ISIZE) as usize);\n\n if (visited[b.x * CHUNK_SIZE + b.y] >> b.z) & 1 == 1 {\n\n continue;\n\n }\n\n visited[b.x * CHUNK_SIZE + b.y] |= 1 << b.z;\n\n\n\n if let Some(chunk) = self.chunks.get(&position) {\n\n render_queue.push(position);\n\n if !chunk.full {\n\n queue.extend([\n\n position + Vector3::unit_x(),\n\n position - Vector3::unit_x(),\n\n position + Vector3::unit_y(),\n\n position - Vector3::unit_y(),\n\n position + Vector3::unit_z(),\n\n position - Vector3::unit_z(),\n", "file_path": "src/world/mod.rs", "rank": 30, "score": 49510.08061517101 }, { "content": " };\n\n\n\n let mut face;\n\n\n\n while lengths.magnitude2() < 100.0_f32.powi(2) {\n\n if lengths.x < lengths.y && lengths.x < lengths.z {\n\n lengths.x += scale.x;\n\n position.x += step.x;\n\n face = Vector3::unit_x() * -step.x;\n\n } else if lengths.y < lengths.x && lengths.y < lengths.z {\n\n lengths.y += scale.y;\n\n position.y += step.y;\n\n face = Vector3::unit_y() * -step.y;\n\n } else if lengths.z < lengths.x && lengths.z < lengths.y {\n\n lengths.z += scale.z;\n\n position.z += step.z;\n\n face = Vector3::unit_z() * -step.z;\n\n } else {\n\n return None;\n\n }\n", "file_path": "src/world/mod.rs", "rank": 31, "score": 49509.44426226408 }, { "content": " pub npc: Npc,\n\n\n\n pub chunks: FxHashMap<Point3<isize>, Chunk>,\n\n pub chunk_database: sled::Db,\n\n pub chunk_save_queue: VecDeque<(Point3<isize>, bool)>,\n\n pub chunk_load_queue: VecDeque<Point3<isize>>,\n\n pub chunk_generate_queue: VecDeque<Point3<isize>>,\n\n pub chunk_occlusion_position: Option<Point3<isize>>,\n\n pub chunks_visible: Option<Vec<Point3<isize>>>,\n\n\n\n pub highlighted: Option<(Point3<isize>, Vector3<i32>)>,\n\n\n\n pub unload_timer: Duration,\n\n}\n\n\n\npub const RENDER_DISTANCE: isize = 8;\n\npub const WORLD_HEIGHT: isize = 16 * 16 / CHUNK_ISIZE;\n\n\n\nconst DEBUG_IO: bool = false;\n\n\n", "file_path": "src/world/mod.rs", "rank": 32, "score": 49508.43856234304 }, { "content": " ]);\n\n }\n\n }\n\n }\n\n\n\n self.chunks_visible = Some(render_queue);\n\n }\n\n\n\n pub fn enqueue_chunk_save(&mut self, position: Point3<isize>, unload: bool) {\n\n if let Some((_, unload_)) = self\n\n .chunk_save_queue\n\n .iter_mut()\n\n .find(|(pos, _)| pos == &position)\n\n {\n\n *unload_ = *unload_ || unload;\n\n } else {\n\n self.chunk_save_queue.push_back((position, unload));\n\n }\n\n }\n\n\n", "file_path": "src/world/mod.rs", "rank": 33, "score": 49508.35864549752 }, { "content": " let mut position: Point3<i32> = origin.map(|x| x.floor() as i32);\n\n let step = direction.map(|x| x.signum() as i32);\n\n\n\n // Truncate the origin\n\n let mut lengths = Vector3 {\n\n x: if direction.x < 0.0 {\n\n (origin.x - position.x as f32) * scale.x\n\n } else {\n\n (position.x as f32 + 1.0 - origin.x) * scale.x\n\n },\n\n y: if direction.y < 0.0 {\n\n (origin.y - position.y as f32) * scale.y\n\n } else {\n\n (position.y as f32 + 1.0 - origin.y) * scale.y\n\n },\n\n z: if direction.z < 0.0 {\n\n (origin.z - position.z as f32) * scale.z\n\n } else {\n\n (position.z as f32 + 1.0 - origin.z) * scale.z\n\n },\n", "file_path": "src/world/mod.rs", "rank": 34, "score": 49508.11806046528 }, { "content": " }\n\n\n\n pub fn set_block(&mut self, x: isize, y: isize, z: isize, block: Option<Block>) {\n\n let chunk_position = Point3::new(\n\n x.div_euclid(CHUNK_ISIZE),\n\n y.div_euclid(CHUNK_ISIZE),\n\n z.div_euclid(CHUNK_ISIZE),\n\n );\n\n\n\n if let Some(chunk) = self.chunks.get_mut(&chunk_position) {\n\n let bx = x.rem_euclid(CHUNK_ISIZE) as usize;\n\n let by = y.rem_euclid(CHUNK_ISIZE) as usize;\n\n let bz = z.rem_euclid(CHUNK_ISIZE) as usize;\n\n chunk.blocks[by][bz][bx] = block;\n\n }\n\n\n\n self.enqueue_chunk_save(chunk_position, false);\n\n }\n\n\n\n fn calc_scale(vector: Vector3<f32>, scalar: f32) -> f32 {\n", "file_path": "src/world/mod.rs", "rank": 35, "score": 49507.720816950234 }, { "content": " if scalar == 0.0 {\n\n f32::INFINITY\n\n } else {\n\n (vector / scalar).magnitude()\n\n }\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn raycast(\n\n &self,\n\n origin: Point3<f32>,\n\n direction: Vector3<f32>,\n\n ) -> Option<(Point3<isize>, Vector3<i32>)> {\n\n let direction = direction.normalize();\n\n let scale = Vector3::new(\n\n Self::calc_scale(direction, direction.x),\n\n Self::calc_scale(direction, direction.y),\n\n Self::calc_scale(direction, direction.z),\n\n );\n\n\n", "file_path": "src/world/mod.rs", "rank": 36, "score": 49505.32593750773 }, { "content": " }\n\n }\n\n }\n\n } else if let Some((position, unload)) = self.chunk_save_queue.pop_front() {\n\n if let Some(chunk) = self.chunks.get(&position) {\n\n if let Err(err) = chunk.save(position, &self.chunk_database) {\n\n eprintln!(\"Failed to save chunk {:?}: {:?}\", position, err);\n\n } else {\n\n if unload {\n\n self.chunks.remove(&position);\n\n\n\n if DEBUG_IO {\n\n println!(\"Saved and unloaded chunk {:?}\", position);\n\n }\n\n } else {\n\n if DEBUG_IO {\n\n println!(\"Saved chunk {:?}\", position);\n\n }\n\n }\n\n }\n", "file_path": "src/world/mod.rs", "rank": 37, "score": 49505.32593750773 }, { "content": "/// Returns `x` incremented with the lowest possible value that a\n\n/// single-precision floating point with `x`'s value can represent.\n\npub fn f32_successor(x: f32) -> f32 {\n\n let x = x.to_bits();\n\n let x = if (x >> 31) == 0 { x + 1 } else { x - 1 };\n\n f32::from_bits(x)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 38, "score": 45356.36104831536 }, { "content": "/// Returns `x` decremented with the lowest possible value that a\n\n/// single-precision floating point with `x`'s value can represent.\n\npub fn f32_predecessor(x: f32) -> f32 {\n\n let x = x.to_bits();\n\n let x = if (x >> 31) == 0 { x - 1 } else { x + 1 };\n\n f32::from_bits(x)\n\n}\n", "file_path": "src/utils.rs", "rank": 39, "score": 45356.36104831536 }, { "content": "pub trait Vertex {\n\n fn descriptor() -> wgpu::VertexBufferLayout<'static>;\n\n}\n\n\n\n/// Represents a vertex in HUD geometry.\n\n///\n\n/// Used to bind vertex information to shaders with a 2D position, texture\n\n/// coordinates and index (for texture arrays) and a value (for dimming e.g.\n\n/// the sides on blocks in inventories)\n\n#[repr(C)]\n\n#[derive(Copy, Clone, Debug, Default, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct HudVertex {\n\n pub position: [f32; 2],\n\n pub texture_coordinates: [f32; 2],\n\n pub texture_index: i32,\n\n pub color: [f32; 4],\n\n}\n\n\n\nconst HUD_VERTEX_ATTRIBUTES: &[VertexAttribute] = &wgpu::vertex_attr_array![\n\n 0 => Float32x2,\n", "file_path": "src/vertex.rs", "rank": 40, "score": 40547.855572252694 }, { "content": " let texture_manager = render_context.texture_manager.as_ref().unwrap();\n\n\n\n render_pass.set_bind_group(0, texture_manager.bind_group.as_ref().unwrap(), &[]);\n\n self.geometry_buffers.apply_buffers(render_pass);\n\n self.geometry_buffers.draw_indexed(render_pass)\n\n }\n\n\n\n fn block_vertices(&self) -> Geometry<HudVertex, u16> {\n\n let mut vertices = Vec::new();\n\n let mut indices = Vec::new();\n\n\n\n let mut index_offset = 0;\n\n for slot in 0..9 {\n\n if let Some(block) = self.blocks[slot as usize] {\n\n let x = (-92 + 20 * slot as i32) as f32;\n\n let texture_indices = block.texture_indices();\n\n let color = block.color();\n\n\n\n let color_left = color\n\n .mul_element_wise(Vector4::new(0.5, 0.5, 0.5, 1.0))\n", "file_path": "src/hud/hotbar_hud.rs", "rank": 41, "score": 32506.662240542508 }, { "content": "use std::time::{Duration, Instant};\n\n\n\nuse cgmath::Point3;\n\nuse wgpu::RenderPass;\n\n\n\nuse crate::{\n\n geometry_buffers::GeometryBuffers,\n\n render_context::RenderContext,\n\n text_renderer::{self, TextRenderer},\n\n};\n\n\n\npub struct DebugHud {\n\n text_renderer: TextRenderer,\n\n\n\n fps_instant: Instant,\n\n fps_elapsed: Duration,\n\n fps_frames: u32,\n\n fps_geometry_buffers: GeometryBuffers<u16>,\n\n\n\n coordinates_last: Point3<f32>,\n", "file_path": "src/hud/debug_hud.rs", "rank": 42, "score": 32505.798190638656 }, { "content": "// TODO Might want to move the hotbar outside\n\nuse wgpu::{BindGroup, BufferUsages, RenderPass};\n\n\n\nuse crate::{\n\n geometry::Geometry,\n\n geometry_buffers::GeometryBuffers,\n\n hud::{UI_SCALE_X, UI_SCALE_Y},\n\n render_context::RenderContext,\n\n texture::Texture,\n\n vertex::{HudVertex, Vertex},\n\n};\n\n\n\npub struct WidgetsHud {\n\n texture_bind_group: BindGroup,\n\n geometry_buffers: GeometryBuffers<u16>,\n\n pub hotbar_cursor_position: usize,\n\n}\n\n\n\nimpl WidgetsHud {\n\n pub fn new(render_context: &RenderContext) -> Self {\n", "file_path": "src/hud/widgets_hud.rs", "rank": 43, "score": 32504.860610595715 }, { "content": "use cgmath::{ElementWise, Vector4};\n\nuse wgpu::{BufferUsages, RenderPass};\n\n\n\nuse crate::{\n\n geometry::Geometry,\n\n geometry_buffers::GeometryBuffers,\n\n hud::{UI_SCALE_X, UI_SCALE_Y},\n\n render_context::RenderContext,\n\n vertex::HudVertex,\n\n world::block::BlockType,\n\n};\n\n\n\npub struct HotbarHud {\n\n pub blocks: [Option<BlockType>; 9],\n\n pub last_blocks: [Option<BlockType>; 9],\n\n\n\n pub geometry_buffers: GeometryBuffers<u16>,\n\n}\n\n\n\nimpl HotbarHud {\n", "file_path": "src/hud/hotbar_hud.rs", "rank": 44, "score": 32504.233017848237 }, { "content": " HudVertex { position: [UI_SCALE_X * (x + 24.0), -1.0 + UI_SCALE_Y * 23.0], texture_coordinates: [ 24.0 / 256.0, 22.0 / 256.0], texture_index, color },\n\n HudVertex { position: [UI_SCALE_X * (x + 24.0), -1.0 + UI_SCALE_Y * -1.0], texture_coordinates: [ 24.0 / 256.0, 46.0 / 256.0], texture_index, color },\n\n HudVertex { position: [UI_SCALE_X * (x ), -1.0 + UI_SCALE_Y * -1.0], texture_coordinates: [ 0.0 / 256.0, 46.0 / 256.0], texture_index, color },\n\n ];\n\n\n\n render_context.queue.write_buffer(\n\n &self.geometry_buffers.vertices,\n\n HudVertex::descriptor().array_stride * 8,\n\n bytemuck::cast_slice(&vertices),\n\n );\n\n }\n\n\n\n pub fn render<'a>(&'a self, render_pass: &mut RenderPass<'a>) -> usize {\n\n // Render the HUD elements\n\n self.geometry_buffers.apply_buffers(render_pass);\n\n render_pass.set_bind_group(0, &self.texture_bind_group, &[]);\n\n self.geometry_buffers.draw_indexed(render_pass);\n\n render_pass.draw_indexed(0..self.geometry_buffers.index_count as u32, 0, 0..1);\n\n\n\n INDICES.len() / 3\n", "file_path": "src/hud/widgets_hud.rs", "rank": 45, "score": 32503.02218497396 }, { "content": " }\n\n }\n\n\n\n pub fn update(&mut self, render_context: &RenderContext, position: &Point3<f32>) {\n\n let elapsed = self.fps_instant.elapsed();\n\n self.fps_instant = Instant::now();\n\n self.fps_elapsed += elapsed;\n\n self.fps_frames += 1;\n\n\n\n if self.fps_elapsed.as_millis() >= 500 {\n\n let frametime = self.fps_elapsed / self.fps_frames;\n\n let fps = 1.0 / frametime.as_secs_f32();\n\n\n\n let string = format!(\"{:<5.0} fps\", fps);\n\n self.fps_geometry_buffers =\n\n self.text_renderer\n\n .string_to_buffers(render_context, -0.98, 0.97, &string);\n\n\n\n self.fps_elapsed = Duration::from_secs(0);\n\n self.fps_frames = 0;\n", "file_path": "src/hud/debug_hud.rs", "rank": 46, "score": 32502.479212509363 }, { "content": " coordinates_geometry_buffers: GeometryBuffers<u16>,\n\n}\n\n\n\nimpl DebugHud {\n\n pub fn new(render_context: &RenderContext) -> Self {\n\n let text_renderer = TextRenderer::new(render_context).unwrap();\n\n let fps_geometry_buffers = text_renderer.string_to_buffers(render_context, -0.98, 0.97, \"\");\n\n let coordinates_geometry_buffers =\n\n text_renderer.string_to_buffers(render_context, -0.98, 0.97 - text_renderer::DY, \"\");\n\n\n\n Self {\n\n text_renderer,\n\n\n\n fps_instant: Instant::now(),\n\n fps_elapsed: Duration::default(),\n\n fps_frames: 0,\n\n fps_geometry_buffers,\n\n\n\n coordinates_last: Point3::new(0.0, 0.0, 0.0),\n\n coordinates_geometry_buffers,\n", "file_path": "src/hud/debug_hud.rs", "rank": 47, "score": 32500.770026603364 }, { "content": " binding: 0,\n\n visibility: wgpu::ShaderStages::FRAGMENT,\n\n ty: wgpu::BindingType::Sampler {\n\n comparison: false,\n\n filtering: true,\n\n },\n\n count: None,\n\n },\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 1,\n\n visibility: wgpu::ShaderStages::FRAGMENT,\n\n ty: wgpu::BindingType::Texture {\n\n sample_type: wgpu::TextureSampleType::Float { filterable: true },\n\n view_dimension: wgpu::TextureViewDimension::D2Array,\n\n multisampled: false,\n\n },\n\n count: None,\n\n },\n\n ],\n\n });\n", "file_path": "src/hud/widgets_hud.rs", "rank": 48, "score": 32500.662426782357 }, { "content": " }\n\n}\n\n\n\n#[rustfmt::skip]\n\npub const VERTICES: [HudVertex; 12] = [\n\n // Crosshair\n\n HudVertex { position: [UI_SCALE_X * -8.0, UI_SCALE_Y * 8.0], texture_coordinates: [240.0 / 256.0, 0.0 / 256.0], texture_index: 0, color: [1.0; 4] },\n\n HudVertex { position: [UI_SCALE_X * 8.0, UI_SCALE_Y * 8.0], texture_coordinates: [ 1.0, 0.0 / 256.0], texture_index: 0, color: [1.0; 4] },\n\n HudVertex { position: [UI_SCALE_X * 8.0, UI_SCALE_Y * -8.0], texture_coordinates: [ 1.0, 16.0 / 256.0], texture_index: 0, color: [1.0; 4] },\n\n HudVertex { position: [UI_SCALE_X * -8.0, UI_SCALE_Y * -8.0], texture_coordinates: [240.0 / 256.0, 16.0 / 256.0], texture_index: 0, color: [1.0; 4] },\n\n\n\n // Hotbar\n\n HudVertex { position: [UI_SCALE_X * -91.0, -1.0 + UI_SCALE_Y * 22.0], texture_coordinates: [ 0.0 / 256.0, 0.0 / 256.0], texture_index: 0, color: [1.0; 4] },\n\n HudVertex { position: [UI_SCALE_X * 91.0, -1.0 + UI_SCALE_Y * 22.0], texture_coordinates: [182.0 / 256.0, 0.0 / 256.0], texture_index: 0, color: [1.0; 4] },\n\n HudVertex { position: [UI_SCALE_X * 91.0, -1.0 ], texture_coordinates: [182.0 / 256.0, 22.0 / 256.0], texture_index: 0, color: [1.0; 4] },\n\n HudVertex { position: [UI_SCALE_X * -91.0, -1.0 ], texture_coordinates: [ 0.0 / 256.0, 22.0 / 256.0], texture_index: 0, color: [1.0; 4] },\n\n\n\n // Hotbar cursor\n\n HudVertex { position: [UI_SCALE_X * -92.0, -1.0 + UI_SCALE_Y * 23.0], texture_coordinates: [ 0.0 / 256.0, 22.0 / 256.0], texture_index: 0, color: [1.0; 4] },\n\n HudVertex { position: [UI_SCALE_X * -68.0, -1.0 + UI_SCALE_Y * 23.0], texture_coordinates: [ 24.0 / 256.0, 22.0 / 256.0], texture_index: 0, color: [1.0; 4] },\n", "file_path": "src/hud/widgets_hud.rs", "rank": 49, "score": 32500.654632510516 }, { "content": " HudVertex {\n\n position: [UI_SCALE_X * (x + 5.0), -1.0 + UI_SCALE_Y * 15.5],\n\n texture_coordinates: [0.0, 0.0],\n\n texture_index: texture_indices.0 as i32,\n\n color: color_left,\n\n },\n\n HudVertex {\n\n position: [UI_SCALE_X * (x + 12.0), -1.0 + UI_SCALE_Y * 12.5],\n\n texture_coordinates: [1.0, 0.0],\n\n texture_index: texture_indices.0 as i32,\n\n color: color_left,\n\n },\n\n // Front face\n\n HudVertex {\n\n position: [UI_SCALE_X * (x + 19.0), -1.0 + UI_SCALE_Y * 15.5],\n\n texture_coordinates: [1.0, 0.0],\n\n texture_index: texture_indices.3 as i32,\n\n color: color_front,\n\n },\n\n HudVertex {\n", "file_path": "src/hud/hotbar_hud.rs", "rank": 50, "score": 32500.11689647836 }, { "content": " texture_coordinates: [1.0, 0.0],\n\n texture_index: texture_indices.5 as i32,\n\n color: color_top,\n\n },\n\n HudVertex {\n\n position: [UI_SCALE_X * (x + 12.0), -1.0 + UI_SCALE_Y * 18.5],\n\n texture_coordinates: [0.0, 0.0],\n\n texture_index: texture_indices.5 as i32,\n\n color: color_top,\n\n },\n\n HudVertex {\n\n position: [UI_SCALE_X * (x + 5.0), -1.0 + UI_SCALE_Y * 15.5],\n\n texture_coordinates: [0.0, 1.0],\n\n texture_index: texture_indices.5 as i32,\n\n color: color_top,\n\n },\n\n HudVertex {\n\n position: [UI_SCALE_X * (x + 12.0), -1.0 + UI_SCALE_Y * 12.5],\n\n texture_coordinates: [1.0, 1.0],\n\n texture_index: texture_indices.5 as i32,\n", "file_path": "src/hud/hotbar_hud.rs", "rank": 51, "score": 32499.530636170726 }, { "content": " pub fn new(render_context: &RenderContext) -> Self {\n\n let hotbar_blocks = [\n\n Some(BlockType::Dirt),\n\n Some(BlockType::Stone),\n\n Some(BlockType::Sand),\n\n None,\n\n Some(BlockType::Grass),\n\n Some(BlockType::Cobblestone),\n\n Some(BlockType::OakPlanks),\n\n Some(BlockType::OakLog),\n\n Some(BlockType::OakLeaves),\n\n ];\n\n\n\n Self {\n\n blocks: hotbar_blocks,\n\n last_blocks: [None; 9],\n\n\n\n geometry_buffers: GeometryBuffers::from_geometry(\n\n render_context,\n\n &Geometry::<HudVertex, _>::default(),\n", "file_path": "src/hud/hotbar_hud.rs", "rank": 52, "score": 32499.3106732283 }, { "content": " position: [UI_SCALE_X * (x + 12.0), -1.0 + UI_SCALE_Y * 12.5],\n\n texture_coordinates: [0.0, 0.0],\n\n texture_index: texture_indices.3 as i32,\n\n color: color_front,\n\n },\n\n HudVertex {\n\n position: [UI_SCALE_X * (x + 12.0), -1.0 + UI_SCALE_Y * 3.5],\n\n texture_coordinates: [0.0, 1.0],\n\n texture_index: texture_indices.3 as i32,\n\n color: color_front,\n\n },\n\n HudVertex {\n\n position: [UI_SCALE_X * (x + 19.0), -1.0 + UI_SCALE_Y * 6.5],\n\n texture_coordinates: [1.0, 1.0],\n\n texture_index: texture_indices.3 as i32,\n\n color: color_front,\n\n },\n\n // Top face\n\n HudVertex {\n\n position: [UI_SCALE_X * (x + 19.0), -1.0 + UI_SCALE_Y * 15.5],\n", "file_path": "src/hud/hotbar_hud.rs", "rank": 53, "score": 32499.276386489746 }, { "content": " HudVertex { position: [UI_SCALE_X * -68.0, -1.0 + UI_SCALE_Y * -1.0], texture_coordinates: [ 24.0 / 256.0, 46.0 / 256.0], texture_index: 0, color: [1.0; 4] },\n\n HudVertex { position: [UI_SCALE_X * -92.0, -1.0 + UI_SCALE_Y * -1.0], texture_coordinates: [ 0.0 / 256.0, 46.0 / 256.0], texture_index: 0, color: [1.0; 4] },\n\n];\n\n\n\n#[rustfmt::skip]\n\npub const INDICES: [u16; 18] = [\n\n // Crosshair\n\n 1, 0, 3,\n\n 1, 3, 2,\n\n\n\n // Hotbar\n\n 5, 4, 7,\n\n 5, 7, 6,\n\n\n\n // Hotbar cursor\n\n 9, 8, 11,\n\n 9, 11, 10,\n\n];\n", "file_path": "src/hud/widgets_hud.rs", "rank": 54, "score": 32499.14697850453 }, { "content": " .into();\n\n let color_front = color\n\n .mul_element_wise(Vector4::new(0.15, 0.15, 0.15, 1.0))\n\n .into();\n\n let color_top = color.into();\n\n\n\n vertices.extend([\n\n // Left face\n\n HudVertex {\n\n position: [UI_SCALE_X * (x + 12.0), -1.0 + UI_SCALE_Y * 3.5],\n\n texture_coordinates: [1.0, 1.0],\n\n texture_index: texture_indices.0 as i32,\n\n color: color_left,\n\n },\n\n HudVertex {\n\n position: [UI_SCALE_X * (x + 5.0), -1.0 + UI_SCALE_Y * 6.5],\n\n texture_coordinates: [0.0, 1.0],\n\n texture_index: texture_indices.0 as i32,\n\n color: color_left,\n\n },\n", "file_path": "src/hud/hotbar_hud.rs", "rank": 55, "score": 32498.57285630408 }, { "content": "\n\n pub fn set_hotbar_cursor(&mut self, render_context: &RenderContext, i: usize) {\n\n self.hotbar_cursor_position = i;\n\n self.redraw_hotbar_cursor(render_context);\n\n }\n\n\n\n pub fn move_hotbar_cursor(&mut self, render_context: &RenderContext, delta: i32) {\n\n self.hotbar_cursor_position =\n\n (self.hotbar_cursor_position as i32 + delta).rem_euclid(9) as usize;\n\n self.redraw_hotbar_cursor(render_context);\n\n }\n\n\n\n pub fn redraw_hotbar_cursor(&self, render_context: &RenderContext) {\n\n let x = (-92 + 20 * self.hotbar_cursor_position as i32) as f32;\n\n let texture_index = 0;\n\n let color = [1.0; 4];\n\n\n\n #[rustfmt::skip]\n\n let vertices = [\n\n HudVertex { position: [UI_SCALE_X * (x ), -1.0 + UI_SCALE_Y * 23.0], texture_coordinates: [ 0.0 / 256.0, 22.0 / 256.0], texture_index, color },\n", "file_path": "src/hud/widgets_hud.rs", "rank": 56, "score": 32497.393165454254 }, { "content": " let (_, texture_bind_group) = Self::create_textures(render_context);\n\n\n\n let geometry = Geometry {\n\n vertices: VERTICES.to_vec(),\n\n indices: INDICES.to_vec(),\n\n };\n\n let geometry_buffers =\n\n GeometryBuffers::from_geometry(render_context, &geometry, BufferUsages::COPY_DST);\n\n\n\n Self {\n\n texture_bind_group,\n\n geometry_buffers,\n\n hotbar_cursor_position: 0,\n\n }\n\n }\n\n\n\n fn create_textures(render_context: &RenderContext) -> (wgpu::BindGroupLayout, wgpu::BindGroup) {\n\n let texture = Texture::from_bytes(\n\n render_context,\n\n include_bytes!(\"../../assets/gui/widgets.png\"),\n", "file_path": "src/hud/widgets_hud.rs", "rank": 57, "score": 32496.921883503965 }, { "content": " BufferUsages::empty(),\n\n ),\n\n }\n\n }\n\n\n\n pub fn update(&mut self, render_context: &RenderContext) {\n\n if self.blocks != self.last_blocks {\n\n self.geometry_buffers = GeometryBuffers::from_geometry(\n\n render_context,\n\n &self.block_vertices(),\n\n wgpu::BufferUsages::empty(),\n\n );\n\n }\n\n }\n\n\n\n pub fn render<'a>(\n\n &'a self,\n\n render_context: &'a RenderContext,\n\n render_pass: &mut RenderPass<'a>,\n\n ) -> usize {\n", "file_path": "src/hud/hotbar_hud.rs", "rank": 58, "score": 32495.79971638772 }, { "content": "\n\n let bind_group = render_context\n\n .device\n\n .create_bind_group(&wgpu::BindGroupDescriptor {\n\n label: Some(\"GUI texture bind group\"),\n\n layout: &bind_group_layout,\n\n entries: &[\n\n wgpu::BindGroupEntry {\n\n binding: 0,\n\n resource: wgpu::BindingResource::Sampler(&sampler),\n\n },\n\n wgpu::BindGroupEntry {\n\n binding: 1,\n\n resource: wgpu::BindingResource::TextureView(&texture.view),\n\n },\n\n ],\n\n });\n\n\n\n (bind_group_layout, bind_group)\n\n }\n", "file_path": "src/hud/widgets_hud.rs", "rank": 59, "score": 32495.23273148313 }, { "content": " }\n\n\n\n if position != &self.coordinates_last {\n\n let string = format!(\"({:.1},{:.1},{:.1})\", position.x, position.y, position.z,);\n\n self.coordinates_geometry_buffers = self.text_renderer.string_to_buffers(\n\n render_context,\n\n -0.98,\n\n 0.97 - text_renderer::DY * 1.3,\n\n &string,\n\n );\n\n }\n\n }\n\n\n\n pub fn render<'a>(&'a self, render_pass: &mut RenderPass<'a>) -> usize {\n\n let mut triangle_count = 0;\n\n\n\n // Render the FPS text\n\n self.fps_geometry_buffers.apply_buffers(render_pass);\n\n render_pass.set_bind_group(0, &self.text_renderer.bind_group, &[]);\n\n triangle_count += self.fps_geometry_buffers.draw_indexed(render_pass);\n", "file_path": "src/hud/debug_hud.rs", "rank": 60, "score": 32494.98007897781 }, { "content": " \"Texture GUI widgets\",\n\n )\n\n .unwrap();\n\n\n\n let sampler = render_context\n\n .device\n\n .create_sampler(&wgpu::SamplerDescriptor {\n\n label: Some(\"widgets sampler\"),\n\n mag_filter: wgpu::FilterMode::Nearest,\n\n min_filter: wgpu::FilterMode::Linear,\n\n ..wgpu::SamplerDescriptor::default()\n\n });\n\n\n\n let bind_group_layout =\n\n render_context\n\n .device\n\n .create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n\n label: Some(\"GUI texture bind group layout\"),\n\n entries: &[\n\n wgpu::BindGroupLayoutEntry {\n", "file_path": "src/hud/widgets_hud.rs", "rank": 61, "score": 32493.674780742745 }, { "content": "\n\n // Render the coordinates text\n\n self.coordinates_geometry_buffers.apply_buffers(render_pass);\n\n render_pass.set_bind_group(0, &self.text_renderer.bind_group, &[]);\n\n triangle_count += self.coordinates_geometry_buffers.draw_indexed(render_pass);\n\n\n\n triangle_count\n\n }\n\n}\n", "file_path": "src/hud/debug_hud.rs", "rank": 62, "score": 32492.610243013638 }, { "content": " color: color_top,\n\n },\n\n ]);\n\n\n\n #[rustfmt::skip]\n\n indices.extend([\n\n // Left face\n\n 2 + index_offset, index_offset, 1 + index_offset,\n\n 3 + index_offset, index_offset, 2 + index_offset,\n\n\n\n // Right face\n\n 6 + index_offset, 4 + index_offset, 5 + index_offset,\n\n 7 + index_offset, 4 + index_offset, 6 + index_offset,\n\n\n\n // Top face\n\n 10 + index_offset, 8 + index_offset, 9 + index_offset,\n\n 11 + index_offset, 8 + index_offset, 10 + index_offset,\n\n ]);\n\n\n\n index_offset += 12;\n\n }\n\n }\n\n\n\n Geometry::new(vertices, indices)\n\n }\n\n}\n", "file_path": "src/hud/hotbar_hud.rs", "rank": 63, "score": 32491.212015104156 }, { "content": "#[repr(C)]\n\n#[derive(Debug, Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct Time {\n\n pub time: f32,\n\n}\n\n\n\nimpl Time {\n\n pub fn new() -> Self {\n\n Self { time: 0.0 }\n\n }\n\n}\n", "file_path": "src/time.rs", "rank": 64, "score": 28387.76979425419 }, { "content": "use cgmath::Point3;\n\n\n\n#[derive(Debug)]\n\npub struct Aabb {\n\n pub min: Point3<f32>,\n\n pub max: Point3<f32>,\n\n}\n\n\n\nimpl Aabb {\n\n pub fn intersects(&self, other: &Self) -> bool {\n\n (self.min.x <= other.max.x && self.max.x >= other.min.x)\n\n && (self.min.y <= other.max.y && self.max.y >= other.min.y)\n\n && (self.min.z <= other.max.z && self.max.z >= other.min.z)\n\n }\n\n\n\n /// Gets the corners of the AABB that should be checked when checking\n\n /// collision with the world.\n\n ///\n\n /// Returns a `Vec` of all `Point3`s that cover the faces of `self` with\n\n /// no more than 1 unit of distance between them.\n", "file_path": "src/aabb.rs", "rank": 65, "score": 28326.91806551471 }, { "content": " pub fn get_corners(&self) -> Vec<Point3<f32>> {\n\n let mut corners = Vec::new();\n\n\n\n let mut x = self.min.x;\n\n while x < self.max.x.ceil() {\n\n let mut y = self.min.y;\n\n while y < self.max.y.ceil() {\n\n let mut z = self.min.z;\n\n while z < self.max.z.ceil() {\n\n corners.push(Point3::new(\n\n x.min(self.max.x),\n\n y.min(self.max.y),\n\n z.min(self.max.z),\n\n ));\n\n z += 1.0;\n\n }\n\n y += 1.0;\n\n }\n\n x += 1.0;\n\n }\n", "file_path": "src/aabb.rs", "rank": 66, "score": 28323.102014062188 }, { "content": "\n\n corners\n\n }\n\n}\n\n\n\nimpl Default for Aabb {\n\n fn default() -> Self {\n\n Self {\n\n min: Point3::new(0.0, 0.0, 0.0),\n\n max: Point3::new(0.0, 0.0, 0.0),\n\n }\n\n }\n\n}\n", "file_path": "src/aabb.rs", "rank": 67, "score": 28322.52412867899 }, { "content": "/// Returns `x` incremented with the lowest possible value that a\n\n/// single-precision floating point with `x`'s value can represent.\n", "file_path": "src/utils.rs", "rank": 68, "score": 28319.363255141754 }, { "content": "use crate::vertex::Vertex;\n\n\n\n/// Represents a set of triangles by its vertices and indices.\n\n#[derive(Default)]\n\npub struct Geometry<V: Vertex, I> {\n\n pub vertices: Vec<V>,\n\n pub indices: Vec<I>,\n\n}\n\n\n\nimpl<T: Vertex, I> Geometry<T, I> {\n\n pub fn new(vertices: Vec<T>, indices: Vec<I>) -> Self {\n\n Self { vertices, indices }\n\n }\n\n\n\n /// Moves all the vertices and indices of `other` into `Self`, leaving `other` empty.\n\n pub fn append(&mut self, other: &mut Self) {\n\n self.vertices.append(&mut other.vertices);\n\n self.indices.append(&mut other.indices);\n\n }\n\n\n\n /// Returns the number of indices in the vertex.\n\n pub fn index_count(&self) -> usize {\n\n self.indices.len()\n\n }\n\n}\n", "file_path": "src/geometry.rs", "rank": 69, "score": 28299.83303761165 }, { "content": "\n\n pub fn resize(&mut self, width: u32, height: u32) {\n\n self.aspect_ratio = width as f32 / height as f32;\n\n }\n\n\n\n pub fn calculate_matrix(&self) -> Matrix4<f32> {\n\n OPENGL_TO_WGPU_MATRIX\n\n * cgmath::perspective(self.fov_y, self.aspect_ratio, self.z_near, self.z_far)\n\n }\n\n}\n", "file_path": "src/camera.rs", "rank": 70, "score": 28298.10400928429 }, { "content": " pub fov_y: Rad<f32>,\n\n pub z_near: f32,\n\n pub z_far: f32,\n\n}\n\n\n\nimpl Projection {\n\n pub fn new<Fov: Into<Rad<f32>>>(\n\n width: u32,\n\n height: u32,\n\n fov_y: Fov,\n\n z_near: f32,\n\n z_far: f32,\n\n ) -> Self {\n\n Self {\n\n aspect_ratio: width as f32 / height as f32,\n\n fov_y: fov_y.into(),\n\n z_near,\n\n z_far,\n\n }\n\n }\n", "file_path": "src/camera.rs", "rank": 71, "score": 28295.143796859793 }, { "content": "use cgmath::{Matrix4, Point3, Rad, Vector3};\n\n\n\n#[rustfmt::skip]\n\npub const OPENGL_TO_WGPU_MATRIX: Matrix4<f32> = Matrix4::new(\n\n 1.0, 0.0, 0.0, 0.0,\n\n 0.0, 1.0, 0.0, 0.0,\n\n 0.0, 0.0, 0.5, 0.0,\n\n 0.0, 0.0, 0.5, 1.0,\n\n);\n\n\n\npub struct Camera {\n\n pub position: Point3<f32>,\n\n pub yaw: Rad<f32>,\n\n pub pitch: Rad<f32>,\n\n}\n\n\n\nimpl Camera {\n\n pub fn new(position: Point3<f32>, yaw: Rad<f32>, pitch: Rad<f32>) -> Self {\n\n Self {\n\n position,\n", "file_path": "src/camera.rs", "rank": 72, "score": 28293.891287226896 }, { "content": " yaw,\n\n pitch,\n\n }\n\n }\n\n\n\n pub fn direction(&self) -> Vector3<f32> {\n\n Vector3::new(\n\n self.yaw.0.cos() * self.pitch.0.cos(),\n\n self.pitch.0.sin(),\n\n self.yaw.0.sin() * self.pitch.0.cos(),\n\n )\n\n }\n\n\n\n pub fn calculate_matrix(&self) -> Matrix4<f32> {\n\n Matrix4::look_to_rh(self.position, self.direction(), Vector3::unit_y())\n\n }\n\n}\n\n\n\npub struct Projection {\n\n pub aspect_ratio: f32,\n", "file_path": "src/camera.rs", "rank": 73, "score": 28288.855704413243 }, { "content": " 1 => Float32x2,\n\n 2 => Sint32,\n\n 3 => Float32x4,\n\n];\n\n\n\nimpl Vertex for HudVertex {\n\n fn descriptor() -> wgpu::VertexBufferLayout<'static> {\n\n wgpu::VertexBufferLayout {\n\n array_stride: size_of::<Self>() as wgpu::BufferAddress,\n\n step_mode: wgpu::VertexStepMode::Vertex,\n\n attributes: HUD_VERTEX_ATTRIBUTES,\n\n }\n\n }\n\n}\n\n\n\n/// Represents a vertex in world geometry.\n\n///\n\n/// Aside from the usual vertex position, texture coordinates and normal, this \"vertex\" also\n\n/// contains whether the block is highlighted (i.e. the player is pointing at the block), its\n\n/// texture index (to address the texture arrays) and a color multiplier.\n", "file_path": "src/vertex.rs", "rank": 74, "score": 28275.352751617174 }, { "content": "use std::mem::size_of;\n\n\n\nuse wgpu::VertexAttribute;\n\n\n", "file_path": "src/vertex.rs", "rank": 75, "score": 28269.169717973036 }, { "content": "impl Vertex for BlockVertex {\n\n fn descriptor() -> wgpu::VertexBufferLayout<'static> {\n\n wgpu::VertexBufferLayout {\n\n array_stride: size_of::<Self>() as wgpu::BufferAddress,\n\n step_mode: wgpu::VertexStepMode::Vertex,\n\n attributes: BLOCK_VERTEX_ATTRIBUTES,\n\n }\n\n }\n\n}\n", "file_path": "src/vertex.rs", "rank": 76, "score": 28265.165046747014 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone, Debug, Default, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct BlockVertex {\n\n pub position: [f32; 3],\n\n pub texture_coordinates: [f32; 2],\n\n pub normal: [f32; 3],\n\n pub highlighted: i32,\n\n pub texture_id: i32,\n\n pub color: [f32; 4],\n\n}\n\n\n\nconst BLOCK_VERTEX_ATTRIBUTES: &[VertexAttribute] = &wgpu::vertex_attr_array![\n\n 0 => Float32x3,\n\n 1 => Float32x2,\n\n 2 => Float32x3,\n\n 3 => Sint32,\n\n 4 => Sint32,\n\n 5 => Float32x4,\n\n];\n\n\n", "file_path": "src/vertex.rs", "rank": 77, "score": 28264.79519147754 }, { "content": "use std::time::Duration;\n\n\n\nuse cgmath::{InnerSpace, Point3, Rad, Vector3};\n\n\n\nuse crate::{aabb::Aabb, render_context::RenderContext, utils, view::View, world::World};\n\n\n\npub struct Player {\n\n pub sprinting: bool,\n\n pub grounded: bool,\n\n pub creative: bool,\n\n\n\n pub forward_pressed: bool,\n\n pub backward_pressed: bool,\n\n pub left_pressed: bool,\n\n pub right_pressed: bool,\n\n pub up_speed: f32,\n\n\n\n pub view: View,\n\n}\n\n\n", "file_path": "src/player.rs", "rank": 78, "score": 28215.69189623397 }, { "content": " new_position += velocity;\n\n }\n\n self.view.camera.position = new_position;\n\n }\n\n\n\n fn check_collision(&self, position: Point3<f32>, world: &World) -> Option<Aabb> {\n\n let aabb = Aabb {\n\n min: position + Vector3::new(-0.3, -1.62, -0.3),\n\n max: position + Vector3::new(0.3, 0.18, 0.3),\n\n };\n\n\n\n for corner in &aabb.get_corners() {\n\n let block = world.get_block(corner.map(|x| x.floor() as isize));\n\n if block.is_some() {\n\n return Some(aabb);\n\n }\n\n }\n\n\n\n None\n\n }\n\n}\n", "file_path": "src/player.rs", "rank": 79, "score": 28211.77580343074 }, { "content": "use std::mem::size_of;\n\n\n\nuse cgmath::{EuclideanSpace, Matrix4, Point3, SquareMatrix, Vector4, Zero};\n\nuse wgpu::{BindGroup, BindGroupLayout, Buffer, BufferDescriptor, BufferUsages};\n\n\n\nuse crate::{\n\n aabb::Aabb,\n\n camera::{Camera, Projection, OPENGL_TO_WGPU_MATRIX},\n\n render_context::RenderContext,\n\n};\n\n\n\npub struct View {\n\n position_vector: Vector4<f32>,\n\n projection_matrix: Matrix4<f32>,\n\n pub frustrum_aabb: Aabb,\n\n\n\n pub camera: Camera,\n\n pub projection: Projection,\n\n\n\n pub buffer: Buffer,\n", "file_path": "src/view.rs", "rank": 80, "score": 28211.727840471154 }, { "content": " pub fn update_camera(&mut self, dx: f64, dy: f64) {\n\n let camera = &mut self.view.camera;\n\n camera.yaw += Rad(dx as f32 * 0.003);\n\n camera.pitch -= Rad(dy as f32 * 0.003);\n\n\n\n if camera.pitch < Rad::from(cgmath::Deg(-80.0)) {\n\n camera.pitch = Rad::from(cgmath::Deg(-80.0));\n\n } else if camera.pitch > Rad::from(cgmath::Deg(89.9)) {\n\n camera.pitch = Rad::from(cgmath::Deg(89.9));\n\n }\n\n }\n\n\n\n /// Updates the player's position by their velocity, checks for and\n\n /// resolves any subsequent collisions, and then adds the jumping speed to\n\n /// the velocity.\n\n pub fn update_position(&mut self, dt: Duration, world: &World) {\n\n let (yaw_sin, yaw_cos) = self.view.camera.yaw.0.sin_cos();\n\n\n\n let speed = 10.0 * (self.sprinting as i32 * 2 + 1) as f32 * dt.as_secs_f32();\n\n\n", "file_path": "src/player.rs", "rank": 81, "score": 28211.03964712078 }, { "content": " });\n\n\n\n Self {\n\n position_vector: Vector4::zero(),\n\n projection_matrix: Matrix4::identity(),\n\n frustrum_aabb: Aabb::default(),\n\n camera,\n\n projection,\n\n\n\n buffer,\n\n bind_group_layout,\n\n bind_group,\n\n }\n\n }\n\n\n\n pub fn update_view_projection(&mut self, render_context: &RenderContext) {\n\n self.position_vector = self.camera.position.to_homogeneous();\n\n self.projection_matrix =\n\n self.projection.calculate_matrix() * self.camera.calculate_matrix();\n\n self.frustrum_aabb = self.frustrum_aabb();\n", "file_path": "src/view.rs", "rank": 82, "score": 28208.9481753034 }, { "content": " new_position.y = aabb.min.y.ceil() + 1.62;\n\n new_position.y = utils::f32_successor(new_position.y);\n\n } else if self.up_speed > 0.0 {\n\n new_position.y = aabb.max.y.floor() - 0.18;\n\n new_position.y = utils::f32_predecessor(new_position.y);\n\n }\n\n\n\n self.up_speed = 0.0;\n\n self.grounded = true;\n\n } else if self.up_speed.abs() > 0.05 {\n\n self.grounded = false;\n\n }\n\n\n\n // x component\n\n new_position.x += velocity.x;\n\n if let Some(aabb) = self.check_collision(new_position, world) {\n\n if velocity.x < 0.0 {\n\n new_position.x = aabb.min.x.ceil() + 0.3;\n\n new_position.x = utils::f32_successor(new_position.x);\n\n } else if velocity.x > 0.0 {\n", "file_path": "src/player.rs", "rank": 83, "score": 28208.52562824087 }, { "content": " render_context.size.width,\n\n render_context.size.height,\n\n cgmath::Deg(45.0),\n\n 0.1,\n\n 300.0,\n\n );\n\n\n\n let buffer = render_context.device.create_buffer(&BufferDescriptor {\n\n label: Some(\"view buffer\"),\n\n size: size_of::<ViewRaw>() as u64,\n\n usage: BufferUsages::UNIFORM | BufferUsages::COPY_DST,\n\n mapped_at_creation: false,\n\n });\n\n\n\n let bind_group_layout =\n\n render_context\n\n .device\n\n .create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n\n entries: &[wgpu::BindGroupLayoutEntry {\n\n binding: 0,\n", "file_path": "src/view.rs", "rank": 84, "score": 28207.642155409485 }, { "content": " let forward_speed = self.forward_pressed as i32 - self.backward_pressed as i32;\n\n let forward = Vector3::new(yaw_cos, 0.0, yaw_sin) * forward_speed as f32;\n\n\n\n let right_speed = self.right_pressed as i32 - self.left_pressed as i32;\n\n let right = Vector3::new(-yaw_sin, 0.0, yaw_cos) * right_speed as f32;\n\n\n\n let mut velocity = forward + right;\n\n if velocity.magnitude2() > 1.0 {\n\n velocity = velocity.normalize();\n\n }\n\n velocity *= speed;\n\n velocity.y = self.up_speed * 10.0 * dt.as_secs_f32();\n\n\n\n let mut new_position = self.view.camera.position;\n\n\n\n if !self.creative {\n\n // y component (jumping)\n\n new_position.y += velocity.y;\n\n if let Some(aabb) = self.check_collision(new_position, world) {\n\n if self.up_speed < 0.0 {\n", "file_path": "src/player.rs", "rank": 85, "score": 28206.804248426037 }, { "content": " new_position.x = aabb.max.x.floor() - 0.3;\n\n new_position.x = utils::f32_predecessor(new_position.x);\n\n }\n\n }\n\n\n\n // z component\n\n new_position.z += velocity.z;\n\n if let Some(aabb) = self.check_collision(new_position, world) {\n\n if velocity.z < 0.0 {\n\n new_position.z = aabb.min.z.ceil() + 0.3;\n\n new_position.z = utils::f32_successor(new_position.z);\n\n } else if velocity.z > 0.0 {\n\n new_position.z = aabb.max.z.floor() - 0.3;\n\n new_position.z = utils::f32_predecessor(new_position.z);\n\n }\n\n }\n\n\n\n self.up_speed -= 1.6 * dt.as_secs_f32();\n\n self.up_speed *= 0.98_f32.powf(dt.as_secs_f32() / 20.0);\n\n } else {\n", "file_path": "src/player.rs", "rank": 86, "score": 28206.524471062945 }, { "content": " visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT,\n\n ty: wgpu::BindingType::Buffer {\n\n ty: wgpu::BufferBindingType::Uniform,\n\n has_dynamic_offset: false,\n\n min_binding_size: None,\n\n },\n\n count: None,\n\n }],\n\n label: Some(\"view_bind_group_layout\"),\n\n });\n\n\n\n let bind_group = render_context\n\n .device\n\n .create_bind_group(&wgpu::BindGroupDescriptor {\n\n layout: &bind_group_layout,\n\n entries: &[wgpu::BindGroupEntry {\n\n binding: 0,\n\n resource: buffer.as_entire_binding(),\n\n }],\n\n label: Some(\"view_bind_group\"),\n", "file_path": "src/view.rs", "rank": 87, "score": 28206.379788330396 }, { "content": "impl Player {\n\n pub fn new(render_context: &RenderContext) -> Self {\n\n let view = View::new(render_context);\n\n\n\n Self {\n\n sprinting: false,\n\n grounded: false,\n\n creative: false,\n\n\n\n forward_pressed: false,\n\n backward_pressed: false,\n\n left_pressed: false,\n\n right_pressed: false,\n\n up_speed: 0.0,\n\n\n\n view,\n\n }\n\n }\n\n\n\n /// Update the camera based on mouse dx and dy.\n", "file_path": "src/player.rs", "rank": 88, "score": 28205.68617225314 }, { "content": "\n\n let mut min = Vector4::new(f32::INFINITY, f32::INFINITY, f32::INFINITY, 1.0);\n\n let mut max = Vector4::new(0.0, 0.0, 0.0, 1.0);\n\n for corner in corners {\n\n let corner = inverse_matrix * corner;\n\n let corner = corner / corner.w;\n\n\n\n min = min.zip(corner, f32::min);\n\n max = max.zip(corner, f32::max);\n\n }\n\n\n\n Aabb {\n\n min: Point3::from_vec(min.truncate()),\n\n max: Point3::from_vec(max.truncate()),\n\n }\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct ViewRaw {\n\n view_position: [f32; 4],\n\n view_projection: [[f32; 4]; 4],\n\n}\n", "file_path": "src/view.rs", "rank": 89, "score": 28205.062363000226 }, { "content": " pub bind_group_layout: BindGroupLayout,\n\n pub bind_group: BindGroup,\n\n}\n\n\n\nimpl View {\n\n pub fn to_raw(&self) -> ViewRaw {\n\n ViewRaw {\n\n view_position: self.position_vector.into(),\n\n view_projection: self.projection_matrix.into(),\n\n }\n\n }\n\n\n\n pub fn new(render_context: &RenderContext) -> Self {\n\n let camera = Camera::new(\n\n (10.0, 140.0, 10.0).into(),\n\n cgmath::Deg(45.0).into(),\n\n cgmath::Deg(-20.0).into(),\n\n );\n\n\n\n let projection = Projection::new(\n", "file_path": "src/view.rs", "rank": 90, "score": 28204.66700186964 }, { "content": "\n\n render_context\n\n .queue\n\n .write_buffer(&self.buffer, 0, bytemuck::cast_slice(&[self.to_raw()]));\n\n }\n\n\n\n fn frustrum_aabb(&self) -> Aabb {\n\n let projection = OPENGL_TO_WGPU_MATRIX.invert().unwrap() * self.projection_matrix;\n\n let inverse_matrix = projection.invert().unwrap();\n\n\n\n let corners = [\n\n Vector4::new(-1.0, -1.0, 1.0, 1.0),\n\n Vector4::new(-1.0, -1.0, -1.0, 1.0),\n\n Vector4::new(-1.0, 1.0, 1.0, 1.0),\n\n Vector4::new(-1.0, 1.0, -1.0, 1.0),\n\n Vector4::new(1.0, -1.0, 1.0, 1.0),\n\n Vector4::new(1.0, -1.0, -1.0, 1.0),\n\n Vector4::new(1.0, 1.0, 1.0, 1.0),\n\n Vector4::new(1.0, 1.0, -1.0, 1.0),\n\n ];\n", "file_path": "src/view.rs", "rank": 91, "score": 28203.85515346016 } ]
Rust
src/serial.rs
IamfromSpace/stm32f3xx-hal
c68c36c03e0e33699b3b0c9acc3f8d80f5a25cd4
use crate::{ gpio::{gpioa, gpiob, gpioc, AF7}, hal::{blocking, serial}, pac::{USART1, USART2, USART3}, rcc::{Clocks, APB1, APB2}, time::Bps, }; use cfg_if::cfg_if; use core::{convert::Infallible, marker::PhantomData, ptr}; cfg_if! { if #[cfg(any(feature = "stm32f302", feature = "stm32f303"))] { use crate::dma; use cortex_m::interrupt; } } pub enum Event { Rxne, Txe, } #[derive(Debug)] #[non_exhaustive] pub enum Error { Framing, Noise, Overrun, Parity, } pub unsafe trait TxPin<USART> {} pub unsafe trait RxPin<USART> {} unsafe impl TxPin<USART1> for gpioa::PA9<AF7> {} unsafe impl TxPin<USART1> for gpiob::PB6<AF7> {} unsafe impl TxPin<USART1> for gpioc::PC4<AF7> {} unsafe impl RxPin<USART1> for gpioa::PA10<AF7> {} unsafe impl RxPin<USART1> for gpiob::PB7<AF7> {} unsafe impl RxPin<USART1> for gpioc::PC5<AF7> {} unsafe impl TxPin<USART2> for gpioa::PA2<AF7> {} unsafe impl TxPin<USART2> for gpiob::PB3<AF7> {} unsafe impl RxPin<USART2> for gpioa::PA3<AF7> {} unsafe impl RxPin<USART2> for gpiob::PB4<AF7> {} unsafe impl TxPin<USART3> for gpiob::PB10<AF7> {} unsafe impl TxPin<USART3> for gpioc::PC10<AF7> {} unsafe impl RxPin<USART3> for gpioc::PC11<AF7> {} cfg_if! { if #[cfg(any(feature = "gpio-f303", feature = "gpio-f303e", feature = "gpio-f373"))] { use crate::gpio::{gpiod, gpioe}; unsafe impl TxPin<USART1> for gpioe::PE0<AF7> {} unsafe impl RxPin<USART1> for gpioe::PE1<AF7> {} unsafe impl TxPin<USART2> for gpiod::PD5<AF7> {} unsafe impl RxPin<USART2> for gpiod::PD6<AF7> {} unsafe impl TxPin<USART3> for gpiod::PD8<AF7> {} unsafe impl RxPin<USART3> for gpiod::PD9<AF7> {} unsafe impl RxPin<USART3> for gpioe::PE15<AF7> {} } } cfg_if! { if #[cfg(not(feature = "gpio-f373"))] { unsafe impl TxPin<USART2> for gpioa::PA14<AF7> {} unsafe impl RxPin<USART2> for gpioa::PA15<AF7> {} unsafe impl RxPin<USART3> for gpiob::PB11<AF7> {} } } pub struct Serial<USART, PINS> { usart: USART, pins: PINS, } pub struct Rx<USART> { _usart: PhantomData<USART>, } pub struct Tx<USART> { _usart: PhantomData<USART>, } macro_rules! hal { ($( $USARTX:ident: ($usartX:ident, $APB:ident, $usartXen:ident, $usartXrst:ident, $pclkX:ident), )+) => { $( impl<TX, RX> Serial<$USARTX, (TX, RX)> { pub fn $usartX( usart: $USARTX, pins: (TX, RX), baud_rate: Bps, clocks: Clocks, apb: &mut $APB, ) -> Self where TX: TxPin<$USARTX>, RX: RxPin<$USARTX>, { apb.enr().modify(|_, w| w.$usartXen().set_bit()); apb.rstr().modify(|_, w| w.$usartXrst().set_bit()); apb.rstr().modify(|_, w| w.$usartXrst().clear_bit()); let brr = clocks.$pclkX().0 / baud_rate.0; crate::assert!(brr >= 16, "impossible baud rate"); usart.brr.write(|w| unsafe { w.bits(brr) }); usart.cr1.modify(|_, w| { w.ue().enabled(); w.re().enabled(); w.te().enabled() }); Serial { usart, pins } } pub fn listen(&mut self, event: Event) { match event { Event::Rxne => { self.usart.cr1.modify(|_, w| w.rxneie().set_bit()) }, Event::Txe => { self.usart.cr1.modify(|_, w| w.txeie().set_bit()) }, } } pub fn unlisten(&mut self, event: Event) { match event { Event::Rxne => { self.usart.cr1.modify(|_, w| w.rxneie().clear_bit()) }, Event::Txe => { self.usart.cr1.modify(|_, w| w.txeie().clear_bit()) }, } } pub fn split(self) -> (Tx<$USARTX>, Rx<$USARTX>) { ( Tx { _usart: PhantomData, }, Rx { _usart: PhantomData, }, ) } pub fn free(self) -> ($USARTX, (TX, RX)) { (self.usart, self.pins) } } impl serial::Read<u8> for Rx<$USARTX> { type Error = Error; fn read(&mut self) -> nb::Result<u8, Error> { let isr = unsafe { (*$USARTX::ptr()).isr.read() }; let icr = unsafe { &(*$USARTX::ptr()).icr }; Err(if isr.pe().bit_is_set() { icr.write(|w| w.pecf().clear()); nb::Error::Other(Error::Parity) } else if isr.fe().bit_is_set() { icr.write(|w| w.fecf().clear()); nb::Error::Other(Error::Framing) } else if isr.nf().bit_is_set() { icr.write(|w| w.ncf().clear()); nb::Error::Other(Error::Noise) } else if isr.ore().bit_is_set() { icr.write(|w| w.orecf().clear()); nb::Error::Other(Error::Overrun) } else if isr.rxne().bit_is_set() { return Ok(unsafe { ptr::read_volatile(&(*$USARTX::ptr()).rdr as *const _ as *const _) }); } else { nb::Error::WouldBlock }) } } impl serial::Write<u8> for Tx<$USARTX> { type Error = Infallible; fn flush(&mut self) -> nb::Result<(), Infallible> { let isr = unsafe { (*$USARTX::ptr()).isr.read() }; if isr.tc().bit_is_set() { Ok(()) } else { Err(nb::Error::WouldBlock) } } fn write(&mut self, byte: u8) -> nb::Result<(), Infallible> { let isr = unsafe { (*$USARTX::ptr()).isr.read() }; if isr.txe().bit_is_set() { unsafe { ptr::write_volatile(&(*$USARTX::ptr()).tdr as *const _ as *mut _, byte) } Ok(()) } else { Err(nb::Error::WouldBlock) } } } impl blocking::serial::write::Default<u8> for Tx<$USARTX> {} #[cfg(any(feature = "stm32f302", feature = "stm32f303"))] impl Rx<$USARTX> { pub fn read_exact<B, C>( self, buffer: B, mut channel: C ) -> dma::Transfer<B, C, Self> where Self: dma::OnChannel<C>, B: dma::WriteBuffer<Word = u8> + 'static, C: dma::Channel, { let pa = unsafe { &(*$USARTX::ptr()).rdr } as *const _ as u32; unsafe { channel.set_peripheral_address(pa, dma::Increment::Disable) }; dma::Transfer::start_write(buffer, channel, self) } } #[cfg(any(feature = "stm32f302", feature = "stm32f303"))] impl Tx<$USARTX> { pub fn write_all<B, C>( self, buffer: B, mut channel: C ) -> dma::Transfer<B, C, Self> where Self: dma::OnChannel<C>, B: dma::ReadBuffer<Word = u8> + 'static, C: dma::Channel, { let pa = unsafe { &(*$USARTX::ptr()).tdr } as *const _ as u32; unsafe { channel.set_peripheral_address(pa, dma::Increment::Disable) }; dma::Transfer::start_read(buffer, channel, self) } } #[cfg(any(feature = "stm32f302", feature = "stm32f303"))] impl dma::Target for Rx<$USARTX> { fn enable_dma(&mut self) { interrupt::free(|_| unsafe { let cr3 = &(*$USARTX::ptr()).cr3; cr3.modify(|_, w| w.dmar().enabled()); }); } fn disable_dma(&mut self) { interrupt::free(|_| unsafe { let cr3 = &(*$USARTX::ptr()).cr3; cr3.modify(|_, w| w.dmar().disabled()); }); } } #[cfg(any(feature = "stm32f302", feature = "stm32f303"))] impl dma::Target for Tx<$USARTX> { fn enable_dma(&mut self) { interrupt::free(|_| unsafe { let cr3 = &(*$USARTX::ptr()).cr3; cr3.modify(|_, w| w.dmat().enabled()); }); } fn disable_dma(&mut self) { interrupt::free(|_| unsafe { let cr3 = &(*$USARTX::ptr()).cr3; cr3.modify(|_, w| w.dmat().disabled()); }); } } )+ } } hal! { USART1: (usart1, APB2, usart1en, usart1rst, pclk2), USART2: (usart2, APB1, usart2en, usart2rst, pclk1), USART3: (usart3, APB1, usart3en, usart3rst, pclk1), }
use crate::{ gpio::{gpioa, gpiob, gpioc, AF7}, hal::{blocking, serial}, pac::{USART1, USART2, USART3}, rcc::{Clocks, APB1, APB2}, time::Bps, }; use cfg_if::cfg_if; use core::{convert::Infallible, marker::PhantomData, ptr}; cfg_if! { if #[cfg(any(feature = "stm32f302", feature = "stm32f303"))] { use crate::dma; use cortex_m::interrupt; } } pub enum Event { Rxne, Txe, } #[derive(Debug)] #[non_exhaustive] pub enum Error { Framing, Noise, Overrun, Parity, } pub unsafe trait TxPin<USART> {} pub unsafe trait RxPin<USART> {} unsafe impl TxPin<USART1> for gpioa::PA9<AF7> {} unsafe impl TxPin<USART1> for gpiob::PB6<AF7> {} unsafe impl TxPin<USART1> for gpioc::PC4<AF7> {} unsafe impl RxPin<USART1> for gpioa::PA10<AF7> {} unsafe impl RxPin<USART1> for gpiob::PB7<AF7> {} unsafe impl RxPin<USART1> for gpioc::PC5<AF7> {} unsafe impl TxPin<USART2> for gpioa::PA2<AF7> {} unsafe impl TxPin<USART2> for gpiob::PB3<AF7> {} unsafe impl RxPin<USART2> for gpioa::PA3<AF7> {} unsafe impl RxPin<USART2> for gpiob::PB4<AF7> {} unsafe impl TxPin<USART3> for gpiob::PB10<AF7> {} unsafe impl TxPin<USART3> for gpioc::PC10<AF7> {} unsafe impl RxPin<USART3> for gpioc::PC11<AF7> {} cfg_if! { if #[cfg(any(feature = "gpio-f303", feature = "gpio-f303e", feature = "gpio-f373"))] { use crate::gpio::{gpiod, gpioe}; unsafe impl TxPin<USART1> for gpioe::PE0<AF7> {} unsafe impl RxPin<USART1> for gpioe::PE1<AF7> {} unsafe impl TxPin<USART2> for gpiod::PD5<AF7> {} unsafe impl RxPin<USART2> for gpiod::PD6<AF7> {} unsafe impl TxPin<USART3> for gpiod::PD8<AF7> {} unsafe impl RxPin<USART3> for gpiod::PD9<AF7> {} unsafe impl RxPin<USART3> for gpioe::PE15<AF7> {} } } cfg_if! { if #[cfg(not(feature = "gpio-f373"))] { unsafe impl TxPin<USART2> for gpioa::PA14<AF7> {} unsafe impl RxPin<USART2> for gpioa::PA15<AF7> {} unsafe impl RxPin<USART3> for gpiob::PB11<AF7> {} } } pub struct Serial<USART, PINS> { usart: USART, pins: PINS, } pub struct Rx<USART> { _usart: PhantomData<USART>, } pub struct Tx<USART> { _usart: PhantomData<USART>, } macro_rules! hal { ($( $USARTX:ident: ($usartX:ident, $APB:ident, $usartXen:ident, $usartXrst:ident, $pclkX:ident), )+) => { $( impl<TX, RX> Serial<$USARTX, (TX, RX)> { pub fn $usartX( usart: $USARTX, pins: (TX, RX), baud_rate: Bps, clocks: Clocks, apb: &mut $APB, ) -> Self where TX: TxPin<$USARTX>, RX: RxPin<$USARTX>, { apb.enr().modify(|_, w| w.$usartXen().set_bit()); apb.rstr().modify(|_, w| w.$usartXrst().set_bit()); apb.rstr().modify(|_, w| w.$usartXrst().clear_bit()); let brr = clocks.$pclkX().0 / baud_rate.0; crate::assert!(brr >= 16, "impossible baud rate"); usart.brr.write(|w| unsafe { w.bits(brr) }); usart.cr1.modify(|_, w| { w.ue().enabled(); w.re().enabled(); w.te().enabled() }); Serial { usart, pins }
self.usart.cr1.modify(|_, w| w.rxneie().set_bit()) }, Event::Txe => { self.usart.cr1.modify(|_, w| w.txeie().set_bit()) }, } } pub fn unlisten(&mut self, event: Event) { match event { Event::Rxne => { self.usart.cr1.modify(|_, w| w.rxneie().clear_bit()) }, Event::Txe => { self.usart.cr1.modify(|_, w| w.txeie().clear_bit()) }, } } pub fn split(self) -> (Tx<$USARTX>, Rx<$USARTX>) { ( Tx { _usart: PhantomData, }, Rx { _usart: PhantomData, }, ) } pub fn free(self) -> ($USARTX, (TX, RX)) { (self.usart, self.pins) } } impl serial::Read<u8> for Rx<$USARTX> { type Error = Error; fn read(&mut self) -> nb::Result<u8, Error> { let isr = unsafe { (*$USARTX::ptr()).isr.read() }; let icr = unsafe { &(*$USARTX::ptr()).icr }; Err(if isr.pe().bit_is_set() { icr.write(|w| w.pecf().clear()); nb::Error::Other(Error::Parity) } else if isr.fe().bit_is_set() { icr.write(|w| w.fecf().clear()); nb::Error::Other(Error::Framing) } else if isr.nf().bit_is_set() { icr.write(|w| w.ncf().clear()); nb::Error::Other(Error::Noise) } else if isr.ore().bit_is_set() { icr.write(|w| w.orecf().clear()); nb::Error::Other(Error::Overrun) } else if isr.rxne().bit_is_set() { return Ok(unsafe { ptr::read_volatile(&(*$USARTX::ptr()).rdr as *const _ as *const _) }); } else { nb::Error::WouldBlock }) } } impl serial::Write<u8> for Tx<$USARTX> { type Error = Infallible; fn flush(&mut self) -> nb::Result<(), Infallible> { let isr = unsafe { (*$USARTX::ptr()).isr.read() }; if isr.tc().bit_is_set() { Ok(()) } else { Err(nb::Error::WouldBlock) } } fn write(&mut self, byte: u8) -> nb::Result<(), Infallible> { let isr = unsafe { (*$USARTX::ptr()).isr.read() }; if isr.txe().bit_is_set() { unsafe { ptr::write_volatile(&(*$USARTX::ptr()).tdr as *const _ as *mut _, byte) } Ok(()) } else { Err(nb::Error::WouldBlock) } } } impl blocking::serial::write::Default<u8> for Tx<$USARTX> {} #[cfg(any(feature = "stm32f302", feature = "stm32f303"))] impl Rx<$USARTX> { pub fn read_exact<B, C>( self, buffer: B, mut channel: C ) -> dma::Transfer<B, C, Self> where Self: dma::OnChannel<C>, B: dma::WriteBuffer<Word = u8> + 'static, C: dma::Channel, { let pa = unsafe { &(*$USARTX::ptr()).rdr } as *const _ as u32; unsafe { channel.set_peripheral_address(pa, dma::Increment::Disable) }; dma::Transfer::start_write(buffer, channel, self) } } #[cfg(any(feature = "stm32f302", feature = "stm32f303"))] impl Tx<$USARTX> { pub fn write_all<B, C>( self, buffer: B, mut channel: C ) -> dma::Transfer<B, C, Self> where Self: dma::OnChannel<C>, B: dma::ReadBuffer<Word = u8> + 'static, C: dma::Channel, { let pa = unsafe { &(*$USARTX::ptr()).tdr } as *const _ as u32; unsafe { channel.set_peripheral_address(pa, dma::Increment::Disable) }; dma::Transfer::start_read(buffer, channel, self) } } #[cfg(any(feature = "stm32f302", feature = "stm32f303"))] impl dma::Target for Rx<$USARTX> { fn enable_dma(&mut self) { interrupt::free(|_| unsafe { let cr3 = &(*$USARTX::ptr()).cr3; cr3.modify(|_, w| w.dmar().enabled()); }); } fn disable_dma(&mut self) { interrupt::free(|_| unsafe { let cr3 = &(*$USARTX::ptr()).cr3; cr3.modify(|_, w| w.dmar().disabled()); }); } } #[cfg(any(feature = "stm32f302", feature = "stm32f303"))] impl dma::Target for Tx<$USARTX> { fn enable_dma(&mut self) { interrupt::free(|_| unsafe { let cr3 = &(*$USARTX::ptr()).cr3; cr3.modify(|_, w| w.dmat().enabled()); }); } fn disable_dma(&mut self) { interrupt::free(|_| unsafe { let cr3 = &(*$USARTX::ptr()).cr3; cr3.modify(|_, w| w.dmat().disabled()); }); } } )+ } } hal! { USART1: (usart1, APB2, usart1en, usart1rst, pclk2), USART2: (usart2, APB1, usart2en, usart2rst, pclk1), USART3: (usart3, APB1, usart3en, usart3rst, pclk1), }
} pub fn listen(&mut self, event: Event) { match event { Event::Rxne => {
random
[ { "content": "fn unlock(apb1: &mut APB1, pwr: &mut PWR) {\n\n apb1.enr().modify(|_, w| {\n\n w\n\n // Enable the backup interface by setting PWREN\n\n .pwren()\n\n .set_bit()\n\n });\n\n pwr.cr.modify(|_, w| {\n\n w\n\n // Enable access to the backup registers\n\n .dbp()\n\n .set_bit()\n\n });\n\n\n\n while pwr.cr.read().dbp().bit_is_clear() {}\n\n}\n\n\n", "file_path": "src/rtc.rs", "rank": 0, "score": 140667.74206337074 }, { "content": "/// Configuration trait for the Word Size\n\n/// used by the SPI peripheral\n\npub trait Word {\n\n /// Returns the register configuration\n\n /// to set the word size\n\n fn register_config() -> (FRXTH_A, DS_A);\n\n}\n\n\n\nimpl Word for u8 {\n\n fn register_config() -> (FRXTH_A, DS_A) {\n\n (FRXTH_A::QUARTER, DS_A::EIGHTBIT)\n\n }\n\n}\n\n\n\nimpl Word for u16 {\n\n fn register_config() -> (FRXTH_A, DS_A) {\n\n (FRXTH_A::HALF, DS_A::SIXTEENBIT)\n\n }\n\n}\n\n\n\n/// SPI peripheral operating in full duplex master mode\n\npub struct Spi<SPI, PINS, WORD = u8> {\n", "file_path": "src/spi.rs", "rank": 1, "score": 101701.07035705725 }, { "content": "/// Trait implemented by DMA targets.\n\npub trait Target {\n\n /// Enable DMA on the target\n\n fn enable_dma(&mut self) {}\n\n /// Disable DMA on the target\n\n fn disable_dma(&mut self) {}\n\n}\n\n\n\n/// An in-progress one-shot DMA transfer\n\npub struct Transfer<B, C: Channel, T: Target> {\n\n // This is always a `Some` outside of `drop`.\n\n inner: Option<TransferInner<B, C, T>>,\n\n}\n\n\n\nimpl<B, C: Channel, T: Target> Transfer<B, C, T> {\n\n /// Start a DMA write transfer.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if the buffer is longer than 65535 words.\n\n pub fn start_write(mut buffer: B, mut channel: C, target: T) -> Self\n", "file_path": "src/dma.rs", "rank": 2, "score": 101697.22975010119 }, { "content": " /// Channel methods private to this module\n\n pub trait Channel {\n\n /// Return the register block for this channel\n\n fn ch(&self) -> &pac::dma1::CH;\n\n }\n\n}\n\n\n\nmacro_rules! dma {\n\n (\n\n $DMAx:ident, $dmax:ident, $dmaxen:ident,\n\n channels: {\n\n $( $Ci:ident: (\n\n $chi:ident,\n\n $htifi:ident, $tcifi:ident, $teifi:ident, $gifi:ident,\n\n $chtifi:ident, $ctcifi:ident, $cteifi:ident, $cgifi:ident\n\n ), )+\n\n },\n\n ) => {\n\n paste::paste! {\n\n #[doc = \"All associated types, traits and methods of the `\" $DMAx \"` peripheral.\"]\n\n pub mod $dmax {\n", "file_path": "src/dma.rs", "rank": 3, "score": 101692.79144788219 }, { "content": "/// Extension trait to split a GPIO peripheral in independent pins and registers\n\npub trait GpioExt {\n\n /// The Parts to split the GPIO peripheral into\n\n type Parts;\n\n\n\n /// Splits the GPIO block into independent pins and registers\n\n fn split(self, ahb: &mut AHB) -> Self::Parts;\n\n}\n\n\n\n/// Input mode (type state)\n\npub struct Input<MODE> {\n\n _mode: PhantomData<MODE>,\n\n}\n\n\n\n/// Floating input (type state)\n\npub struct Floating;\n\n/// Pulled down input (type state)\n\npub struct PullDown;\n\n/// Pulled up input (type state)\n\npub struct PullUp;\n\n\n", "file_path": "src/gpio.rs", "rank": 4, "score": 98266.5934193036 }, { "content": "/// Associated clocks with timers\n\npub trait PclkSrc {\n\n /// Get the current frequency of the associated clock\n\n fn get_clk(clocks: &Clocks) -> Hertz;\n\n}\n\n\n\n/// Hardware timers\n\npub struct Timer<TIM> {\n\n clocks: Clocks,\n\n tim: TIM,\n\n}\n\n\n\n/// Interrupt events\n\npub enum Event {\n\n /// Timer timed out / count down ended\n\n Update,\n\n}\n\n\n\nmacro_rules! hal {\n\n ($({\n\n $TIMX:ident: ($tim:ident, $timXen:ident, $timXrst:ident),\n", "file_path": "src/timer.rs", "rank": 5, "score": 98262.89249358376 }, { "content": "/// Extension trait to constrain the FLASH peripheral\n\npub trait FlashExt {\n\n /// Constrains the FLASH peripheral to play nicely with the other abstractions\n\n fn constrain(self) -> Parts;\n\n}\n\n\n\nimpl FlashExt for FLASH {\n\n fn constrain(self) -> Parts {\n\n Parts {\n\n acr: ACR { _0: () },\n\n }\n\n }\n\n}\n\n\n\n/// Constrained FLASH peripheral\n\npub struct Parts {\n\n /// Opaque ACR register\n\n pub acr: ACR,\n\n}\n\n\n\n/// Opaque ACR register\n", "file_path": "src/flash.rs", "rank": 6, "score": 98262.86315213508 }, { "content": "/// Extension trait that constrains the `RCC` peripheral\n\npub trait RccExt {\n\n /// Constrains the `RCC` peripheral so it plays nicely with the other abstractions\n\n fn constrain(self) -> Rcc;\n\n}\n\n\n\nimpl RccExt for RCC {\n\n fn constrain(self) -> Rcc {\n\n Rcc {\n\n ahb: AHB { _0: () },\n\n apb1: APB1 { _0: () },\n\n apb2: APB2 { _0: () },\n\n bdcr: BDCR { _0: () },\n\n cfgr: CFGR::default(),\n\n }\n\n }\n\n}\n\n\n\n/// Constrained RCC peripheral\n\n///\n\n/// An instance of this struct is acquired by calling the\n", "file_path": "src/rcc.rs", "rank": 7, "score": 98262.86315213508 }, { "content": "/// Extension trait that adds convenience methods to the `u32` type\n\npub trait U32Ext {\n\n /// Wrap in `Bps`\n\n fn bps(self) -> Bps;\n\n\n\n /// Wrap in `Hertz`\n\n fn hz(self) -> Hertz;\n\n\n\n /// Wrap in `KiloHertz`\n\n fn khz(self) -> KiloHertz;\n\n\n\n /// Wrap in `MegaHertz`\n\n fn mhz(self) -> MegaHertz;\n\n\n\n /// Wrap in `MilliSeconds`\n\n fn ms(self) -> MilliSeconds;\n\n}\n\n\n\nimpl U32Ext for u32 {\n\n fn bps(self) -> Bps {\n\n Bps(self)\n", "file_path": "src/time.rs", "rank": 8, "score": 98262.73771277259 }, { "content": "/// Extension trait to split a DMA peripheral into independent channels\n\npub trait DmaExt {\n\n /// The type to split the DMA into\n\n type Channels;\n\n\n\n /// Split the DMA into independent channels\n\n fn split(self, ahb: &mut AHB) -> Self::Channels;\n\n}\n\n\n", "file_path": "src/dma.rs", "rank": 9, "score": 98262.73771277259 }, { "content": "fn gen_pin(pin: &gpio::Pin) -> Result<()> {\n\n let nr = pin.number()?;\n\n let reset_mode = get_pin_reset_mode(pin)?;\n\n let afr = if nr < 8 { 'L' } else { 'H' };\n\n let af_numbers = get_pin_af_numbers(pin)?;\n\n\n\n println!(\n\n \" {} => {{ reset: {}, afr: {}/{}, af: {:?} }},\",\n\n nr,\n\n reset_mode,\n\n afr,\n\n afr.to_lowercase(),\n\n af_numbers,\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "codegen/src/codegen/gpio.rs", "rank": 10, "score": 96783.83601473691 }, { "content": "/// Determine the [greatest common divisor](https://en.wikipedia.org/wiki/Greatest_common_divisor)\n\n///\n\n/// This function is based on the [Euclidean algorithm](https://en.wikipedia.org/wiki/Euclidean_algorithm).\n\nfn gcd(mut a: u32, mut b: u32) -> u32 {\n\n while b != 0 {\n\n let r = a % b;\n\n a = b;\n\n b = r;\n\n }\n\n a\n\n}\n\n\n", "file_path": "src/rcc.rs", "rank": 11, "score": 87660.0081188659 }, { "content": "/// Trait implemented by all DMA channels\n\npub trait Channel: private::Channel {\n\n /// Is the interrupt flag for the given event set?\n\n fn event_occurred(&self, event: Event) -> bool;\n\n\n\n /// Clear the interrupt flag for the given event.\n\n ///\n\n /// Passing `Event::Any` clears all interrupt flags.\n\n ///\n\n /// Note that the the global interrupt flag is not automatically cleared\n\n /// even when all other flags are cleared. The only way to clear it is to\n\n /// call this method with `Event::Any`.\n\n fn clear_event(&mut self, event: Event);\n\n\n\n /// Reset the control registers of this channel.\n\n /// This stops any ongoing transfers.\n\n fn reset(&mut self) {\n\n self.ch().cr.reset();\n\n self.ch().ndtr.reset();\n\n self.ch().par.reset();\n\n self.ch().mar.reset();\n", "file_path": "src/dma.rs", "rank": 12, "score": 85659.04127967867 }, { "content": "fn enable(bdcr: &mut BDCR) {\n\n bdcr.bdcr().modify(|_, w| w.bdrst().enabled());\n\n bdcr.bdcr().modify(|_, w| {\n\n w.rtcsel().lse();\n\n w.rtcen().enabled();\n\n w.bdrst().disabled()\n\n });\n\n}\n", "file_path": "src/rtc.rs", "rank": 13, "score": 85065.81551796416 }, { "content": "fn get_pin_af_numbers(pin: &gpio::Pin) -> Result<Vec<u8>> {\n\n let mut numbers = Vec::new();\n\n for signal in &pin.pin_signals {\n\n numbers.push(signal.af()?);\n\n }\n\n\n\n numbers.sort();\n\n numbers.dedup();\n\n\n\n Ok(numbers)\n\n}\n", "file_path": "codegen/src/codegen/gpio.rs", "rank": 14, "score": 84456.38948175026 }, { "content": "fn get_pin_reset_mode(pin: &gpio::Pin) -> Result<&'static str> {\n\n // Debug pins default to their debug function (AF0), everything else\n\n // defaults to floating input.\n\n let mode = match (pin.port()?, pin.number()?) {\n\n ('A', 13) | ('A', 14) | ('A', 15) | ('B', 3) | ('B', 4) => \"AF0\",\n\n _ => \"Input<Floating>\",\n\n };\n\n Ok(mode)\n\n}\n\n\n", "file_path": "codegen/src/codegen/gpio.rs", "rank": 15, "score": 84456.38948175026 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut flash = dp.FLASH.constrain();\n\n let mut rcc = dp.RCC.constrain();\n\n\n\n let clocks = rcc\n\n .cfgr\n\n .use_hse(8.mhz())\n\n .sysclk(48.mhz())\n\n .pclk1(24.mhz())\n\n .pclk2(24.mhz())\n\n .freeze(&mut flash.acr);\n\n\n\n assert!(clocks.usbclk_valid());\n\n\n\n // Configure the on-board LED (LD10, south red)\n\n let mut gpioe = dp.GPIOE.split(&mut rcc.ahb);\n\n let mut led = gpioe\n\n .pe13\n", "file_path": "examples/usb_serial.rs", "rank": 16, "score": 84127.60988151969 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut flash = dp.FLASH.constrain();\n\n let mut rcc = dp.RCC.constrain();\n\n let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n\n\n let mut gpioa = dp.GPIOA.split(&mut rcc.ahb);\n\n\n\n let pins = (\n\n gpioa.pa9.into_af7(&mut gpioa.moder, &mut gpioa.afrh),\n\n gpioa.pa10.into_af7(&mut gpioa.moder, &mut gpioa.afrh),\n\n );\n\n let serial = Serial::usart1(dp.USART1, pins, 9600.bps(), clocks, &mut rcc.apb2);\n\n let (tx, rx) = serial.split();\n\n\n\n let dma1 = dp.DMA1.split(&mut rcc.ahb);\n\n\n\n // the data we are going to send over serial\n\n let tx_buf = singleton!(: [u8; 9] = *b\"hello DMA\").unwrap();\n", "file_path": "examples/serial_dma.rs", "rank": 17, "score": 84127.60988151969 }, { "content": "fn merge_pins_by_port(pins: &[gpio::Pin]) -> Result<Vec<Port>> {\n\n let mut pins_by_port = HashMap::new();\n\n for pin in pins.iter() {\n\n pins_by_port\n\n .entry(pin.port()?)\n\n .and_modify(|e: &mut Vec<_>| e.push(pin))\n\n .or_insert_with(|| vec![pin]);\n\n }\n\n\n\n let mut ports = Vec::new();\n\n for (id, mut pins) in pins_by_port {\n\n pins.sort_by_key(|p| p.number().unwrap_or_default());\n\n pins.dedup_by_key(|p| p.number().unwrap_or_default());\n\n ports.push(Port { id, pins });\n\n }\n\n ports.sort_by_key(|p| p.id);\n\n\n\n Ok(ports)\n\n}\n\n\n", "file_path": "codegen/src/codegen/gpio.rs", "rank": 18, "score": 78929.7254002452 }, { "content": "pub fn gen_autogen_comment(package: &Package) {\n\n println!(\"// auto-generated using codegen\");\n\n println!(\n\n \"// STM32CubeMX DB release: {}\",\n\n package.pack_description.release\n\n );\n\n}\n", "file_path": "codegen/src/codegen/mod.rs", "rank": 19, "score": 74986.07055950857 }, { "content": "fn hours_to_u8(hours: Hours) -> Result<u8, Error> {\n\n if let Hours::H24(h) = hours {\n\n Ok(h)\n\n } else {\n\n Err(Error::InvalidInputData)\n\n }\n\n}\n\n\n", "file_path": "src/rtc.rs", "rank": 20, "score": 71808.74887962808 }, { "content": "/// Enable the low frequency external oscillator. This is the only mode currently\n\n/// supported, to avoid exposing the `CR` and `CRS` registers.\n\nfn enable_lse(bdcr: &mut BDCR, bypass: bool) {\n\n bdcr.bdcr()\n\n .modify(|_, w| w.lseon().set_bit().lsebyp().bit(bypass));\n\n while bdcr.bdcr().read().lserdy().bit_is_clear() {}\n\n}\n\n\n", "file_path": "src/rtc.rs", "rank": 21, "score": 71783.57056262626 }, { "content": "pub fn load(db: &Db) -> Result<Families> {\n\n db.load_mcu(\"families\")\n\n}\n\n\n", "file_path": "codegen/src/cubemx/families.rs", "rank": 22, "score": 69302.64218410755 }, { "content": "pub fn load(db: &Db) -> Result<Package> {\n\n db.load(\"package\")\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct Package {\n\n pub pack_description: PackDescription,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct PackDescription {\n\n pub release: String,\n\n}\n", "file_path": "codegen/src/cubemx/package.rs", "rank": 23, "score": 69302.64218410755 }, { "content": "fn hours_to_register(hours: Hours) -> Result<(u8, u8), Error> {\n\n match hours {\n\n Hours::H24(h) => Ok(bcd2_encode(h as u32))?,\n\n Hours::AM(h) => Ok(bcd2_encode((h - 1) as u32))?,\n\n Hours::PM(h) => Ok(bcd2_encode((h + 11) as u32))?,\n\n }\n\n}\n\n\n", "file_path": "src/rtc.rs", "rank": 24, "score": 67617.95832586598 }, { "content": "// Two 32-bit registers (RTC_TR and RTC_DR) contain the seconds, minutes, hours (12- or 24-hour format), day (day\n\n// of week), date (day of month), month, and year, expressed in binary coded decimal format\n\n// (BCD). The sub-seconds value is also available in binary format.\n\n//\n\n// The following helper functions encode into BCD format from integer and\n\n// decode to an integer from a BCD value respectively.\n\nfn bcd2_encode(word: u32) -> Result<(u8, u8), Error> {\n\n let l = match (word / 10).try_into() {\n\n Ok(v) => v,\n\n Err(_) => {\n\n return Err(Error::InvalidInputData);\n\n }\n\n };\n\n let r = match (word % 10).try_into() {\n\n Ok(v) => v,\n\n Err(_) => {\n\n return Err(Error::InvalidInputData);\n\n }\n\n };\n\n\n\n Ok((l, r))\n\n}\n\n\n", "file_path": "src/rtc.rs", "rank": 25, "score": 67617.95832586598 }, { "content": "pub fn load_f3(db: &Db) -> Result<Family> {\n\n load(db)?\n\n .families\n\n .into_iter()\n\n .find(|f| f.name == \"STM32F3\")\n\n .context(\"STM32F3 family not found\")\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct Families {\n\n #[serde(rename = \"Family\")]\n\n pub families: Vec<Family>,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct Family {\n\n pub name: String,\n\n #[serde(rename = \"SubFamily\")]\n", "file_path": "codegen/src/cubemx/families.rs", "rank": 26, "score": 67405.51609662539 }, { "content": "pub fn gen_mappings(gpio_ips: &[gpio::Ip]) -> Result<()> {\n\n for ip in gpio_ips.iter() {\n\n println!();\n\n gen_gpio_ip(ip)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "codegen/src/codegen/gpio.rs", "rank": 27, "score": 65659.79625168297 }, { "content": "pub fn load(db: &Db, name: &str) -> Result<Mcu> {\n\n db.load_mcu(name)\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct Mcu {\n\n pub ref_name: String,\n\n #[serde(rename = \"IP\")]\n\n pub ips: Vec<Ip>,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct Ip {\n\n pub name: String,\n\n pub version: String,\n\n}\n", "file_path": "codegen/src/cubemx/mcu.rs", "rank": 28, "score": 61820.868711570605 }, { "content": "pub fn load(db: &Db, version: &str) -> Result<Ip> {\n\n let name = format!(\"GPIO-{}_Modes\", version);\n\n let ip_path: PathBuf = [\"IP\", &name].iter().collect();\n\n db.load_mcu(&ip_path)\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct Ip {\n\n pub version: String,\n\n #[serde(rename = \"GPIO_Pin\")]\n\n pub pins: Vec<Pin>,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct Pin {\n\n pub port_name: String,\n\n pub name: String,\n\n #[serde(rename = \"PinSignal\", default)]\n", "file_path": "codegen/src/cubemx/ip/gpio.rs", "rank": 29, "score": 60209.125432403496 }, { "content": "pub fn load_f3_mcus(db: &Db) -> Result<Vec<mcu::Mcu>> {\n\n families::load_f3(db)?\n\n .sub_families\n\n .into_iter()\n\n .flat_map(|subfamily| subfamily.mcus.into_iter())\n\n .map(|mcu_| mcu::load(db, &mcu_.name))\n\n .collect()\n\n}\n\n\n", "file_path": "codegen/src/cubemx/mod.rs", "rank": 30, "score": 58716.50548375995 }, { "content": "fn gen_port(port: &Port, feature: &str) -> Result<()> {\n\n let pac_module = get_port_pac_module(port, feature);\n\n\n\n println!(\" {{\");\n\n println!(\n\n \" port: ({}/{}, pac: {}),\",\n\n port.id,\n\n port.id.to_lowercase(),\n\n pac_module,\n\n );\n\n println!(\" pins: [\");\n\n\n\n for pin in &port.pins {\n\n gen_pin(pin)?;\n\n }\n\n\n\n println!(\" ],\");\n\n println!(\" }},\");\n\n Ok(())\n\n}\n\n\n", "file_path": "codegen/src/codegen/gpio.rs", "rank": 31, "score": 55459.58762911171 }, { "content": "#[derive(StructOpt)]\n\n#[structopt(about = \"Code generation for the stm32f3xx-hal crate\")]\n\nenum Command {\n\n #[structopt(about = \"Generate GPIO mappings from an STM32CubeMX database\")]\n\n Gpio {\n\n #[structopt(parse(from_os_str), help = \"Path of the STM32CubeMX MCU database\")]\n\n db_path: PathBuf,\n\n },\n\n}\n\n\n", "file_path": "codegen/src/main.rs", "rank": 32, "score": 55210.30838335503 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = stm32::Peripherals::take().unwrap();\n\n\n\n let mut flash = dp.FLASH.constrain();\n\n let mut rcc = dp.RCC.constrain();\n\n let mut gpiob = dp.GPIOB.split(&mut rcc.ahb);\n\n let mut gpioa = dp.GPIOA.split(&mut rcc.ahb);\n\n\n\n let _clocks = rcc\n\n .cfgr\n\n .use_hse(32.mhz())\n\n .sysclk(32.mhz())\n\n .pclk1(16.mhz())\n\n .pclk2(16.mhz())\n\n .freeze(&mut flash.acr);\n\n\n\n // Configure CAN RX and TX pins (AF9)\n\n let can_rx = gpioa.pa11.into_af9(&mut gpioa.moder, &mut gpioa.afrh);\n\n let can_tx = gpioa.pa12.into_af9(&mut gpioa.moder, &mut gpioa.afrh);\n\n\n", "file_path": "examples/can.rs", "rank": 33, "score": 55139.971509831295 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.constrain();\n\n let mut gpioe = dp.GPIOE.split(&mut rcc.ahb);\n\n\n\n let mut led = gpioe\n\n .pe13\n\n .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n\n\n led.set_low().unwrap();\n\n\n\n loop {\n\n led.toggle().unwrap();\n\n cortex_m::asm::delay(8_000_000);\n\n // Toggle by hand.\n\n // Uses `StatefulOutputPin` instead of `ToggleableOutputPin`.\n\n // Logically it is the same.\n\n if led.is_set_low().unwrap() {\n\n led.set_high().unwrap();\n\n } else {\n\n led.set_low().unwrap();\n\n }\n\n cortex_m::asm::delay(8_000_000);\n\n }\n\n}\n", "file_path": "examples/toggle.rs", "rank": 34, "score": 55139.971509831295 }, { "content": "/// Main Thread\n\nfn main() -> ! {\n\n // Get peripherals, clocks and freeze them\n\n let mut dp = pac::Peripherals::take().unwrap();\n\n let mut rcc = dp.RCC.constrain();\n\n let clocks = rcc.cfgr.freeze(&mut dp.FLASH.constrain().acr);\n\n\n\n // set up adc1\n\n let mut adc1 = adc::Adc::adc1(\n\n dp.ADC1, // The ADC we are going to control\n\n // The following is only needed to make sure the clock signal for the ADC is set up\n\n // correctly.\n\n &mut dp.ADC1_2,\n\n &mut rcc.ahb,\n\n adc::CkMode::default(),\n\n clocks,\n\n );\n\n\n\n // Set up pin PA0 as analog pin.\n\n // This pin is connected to the user button on the stm32f3discovery board.\n\n let mut gpio_a = dp.GPIOA.split(&mut rcc.ahb);\n", "file_path": "examples/adc.rs", "rank": 35, "score": 55139.971509831295 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut flash = dp.FLASH.constrain();\n\n let mut rcc = dp.RCC.constrain();\n\n let mut gpioa = dp.GPIOA.split(&mut rcc.ahb);\n\n\n\n let clocks = rcc\n\n .cfgr\n\n .use_hse(8.mhz())\n\n .sysclk(48.mhz())\n\n .pclk1(24.mhz())\n\n .freeze(&mut flash.acr);\n\n\n\n // Configure pins for SPI\n\n let sck = gpioa.pa5.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n let miso = gpioa.pa6.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n let mosi = gpioa.pa7.into_af5(&mut gpioa.moder, &mut gpioa.afrl);\n\n\n\n let spi_mode = Mode {\n", "file_path": "examples/spi.rs", "rank": 36, "score": 55139.971509831295 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n // Get our peripherals\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n // Configure our clocks\n\n let mut flash = dp.FLASH.constrain();\n\n let mut rcc = dp.RCC.constrain();\n\n let clocks = rcc.cfgr.sysclk(16.mhz()).freeze(&mut flash.acr);\n\n\n\n // Prep the pins we need in their correct alternate function\n\n let mut gpioa = dp.GPIOA.split(&mut rcc.ahb);\n\n let pa4 = gpioa.pa4.into_af2(&mut gpioa.moder, &mut gpioa.afrl);\n\n let pa6 = gpioa.pa6.into_af2(&mut gpioa.moder, &mut gpioa.afrl);\n\n let pa7 = gpioa.pa7.into_af2(&mut gpioa.moder, &mut gpioa.afrl);\n\n\n\n let mut gpiob = dp.GPIOB.split(&mut rcc.ahb);\n\n let pb0 = gpiob.pb0.into_af2(&mut gpiob.moder, &mut gpiob.afrl);\n\n let pb1 = gpiob.pb1.into_af2(&mut gpiob.moder, &mut gpiob.afrl);\n\n let pb4 = gpiob.pb4.into_af2(&mut gpiob.moder, &mut gpiob.afrl);\n\n let pb5 = gpiob.pb5.into_af2(&mut gpiob.moder, &mut gpiob.afrl);\n", "file_path": "examples/pwm.rs", "rank": 37, "score": 55139.971509831295 }, { "content": "fn ip_version_to_feature(ip_version: &str) -> Result<String> {\n\n static VERSION: Lazy<Regex> =\n\n Lazy::new(|| Regex::new(r\"^STM32(?P<version>\\w+)_gpio_v1_0$\").unwrap());\n\n\n\n let captures = VERSION\n\n .captures(&ip_version)\n\n .with_context(|| format!(\"invalid GPIO IP version: {}\", ip_version))?;\n\n\n\n let version = captures.name(\"version\").unwrap().as_str();\n\n let feature = format!(\"gpio-{}\", version.to_lowercase());\n\n Ok(feature)\n\n}\n\n\n", "file_path": "codegen/src/codegen/gpio.rs", "rank": 38, "score": 54953.224767735286 }, { "content": "pub fn load_f3_gpio_ips(db: &Db) -> Result<Vec<ip::gpio::Ip>> {\n\n load_f3_ips(db, \"GPIO\")?\n\n .into_iter()\n\n .map(|ip_| ip::gpio::load(db, &ip_.version))\n\n .collect()\n\n}\n", "file_path": "codegen/src/cubemx/mod.rs", "rank": 39, "score": 54552.1265097207 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut flash = dp.FLASH.constrain();\n\n let mut rcc = dp.RCC.constrain();\n\n let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n\n\n let mut gpiob = dp.GPIOB.split(&mut rcc.ahb);\n\n\n\n // Configure I2C1\n\n let pins = (\n\n gpiob.pb6.into_af4(&mut gpiob.moder, &mut gpiob.afrl), // SCL\n\n gpiob.pb7.into_af4(&mut gpiob.moder, &mut gpiob.afrl), // SDA\n\n );\n\n let mut i2c = hal::i2c::I2c::new(dp.I2C1, pins, 100.khz(), clocks, &mut rcc.apb1);\n\n\n\n hprintln!(\"Start i2c scanning...\").expect(\"Error using hprintln.\");\n\n hprintln!().unwrap();\n\n\n\n for addr in 0x00_u8..0x80 {\n", "file_path": "examples/i2c_scanner.rs", "rank": 40, "score": 53226.14627286506 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.constrain();\n\n let mut gpiob = dp.GPIOB.split(&mut rcc.ahb);\n\n let mut gpioc = dp.GPIOC.split(&mut rcc.ahb);\n\n let mut gpiod = dp.GPIOD.split(&mut rcc.ahb);\n\n\n\n let mut pin_array: [gpio::PXx<Input<Floating>>; 4] = [\n\n gpiob\n\n .pb11\n\n .into_floating_input(&mut gpiob.moder, &mut gpiob.pupdr)\n\n .downgrade()\n\n .downgrade(),\n\n gpioc\n\n .pc4\n\n .into_floating_input(&mut gpioc.moder, &mut gpioc.pupdr)\n\n .downgrade()\n\n .downgrade(),\n\n gpiod\n", "file_path": "examples/gpio_erased.rs", "rank": 41, "score": 53226.14627286506 }, { "content": "fn gen_gpio_macro_call(ports: &[Port], feature: &str) -> Result<()> {\n\n println!(\"gpio!([\");\n\n for port in ports {\n\n gen_port(port, feature)?;\n\n }\n\n println!(\"]);\");\n\n Ok(())\n\n}\n\n\n", "file_path": "codegen/src/codegen/gpio.rs", "rank": 42, "score": 52093.48026742516 }, { "content": "pub fn load_f3_ips(db: &Db, ip_name: &str) -> Result<Vec<mcu::Ip>> {\n\n let f3_mcus = load_f3_mcus(db)?;\n\n let mut ips: Vec<_> = f3_mcus\n\n .into_iter()\n\n .flat_map(|mcu| mcu.ips.into_iter())\n\n .filter(|ip| ip.name == ip_name)\n\n .collect();\n\n\n\n ips.sort();\n\n ips.dedup();\n\n\n\n Ok(ips)\n\n}\n\n\n", "file_path": "codegen/src/cubemx/mod.rs", "rank": 43, "score": 52067.5149063905 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\nstruct SpecificParameter {\n\n name: String,\n\n possible_value: String,\n\n}\n", "file_path": "codegen/src/cubemx/ip/gpio.rs", "rank": 44, "score": 50450.25232848966 }, { "content": "struct Port<'a> {\n\n id: char,\n\n pins: Vec<&'a gpio::Pin>,\n\n}\n\n\n", "file_path": "codegen/src/codegen/gpio.rs", "rank": 45, "score": 49591.737798433394 }, { "content": "fn get_port_pac_module(port: &Port, feature: &str) -> &'static str {\n\n // The registers in ports A and B have different reset values due to the\n\n // presence of debug pins, so they get dedicated PAC modules.\n\n match port.id {\n\n 'A' => \"gpioa\",\n\n 'B' => \"gpiob\",\n\n 'D' if feature == \"gpio-f373\" => \"gpiod\",\n\n _ => \"gpioc\",\n\n }\n\n}\n\n\n", "file_path": "codegen/src/codegen/gpio.rs", "rank": 46, "score": 49572.82800226611 }, { "content": "fn main() -> Result<()> {\n\n match Command::from_args() {\n\n Command::Gpio { db_path } => handle_gpio(db_path),\n\n }\n\n}\n\n\n", "file_path": "codegen/src/main.rs", "rank": 47, "score": 47968.608637116464 }, { "content": "/// This only exists so we can implement `Drop` for `Transfer`.\n\nstruct TransferInner<B, C, T> {\n\n buffer: B,\n\n channel: C,\n\n target: T,\n\n}\n\n\n\nimpl<B, C: Channel, T: Target> TransferInner<B, C, T> {\n\n /// Stop this transfer\n\n fn stop(&mut self) {\n\n self.channel.disable();\n\n self.target.disable_dma();\n\n\n\n atomic::compiler_fence(Ordering::SeqCst);\n\n }\n\n}\n\n\n\n/// DMA address increment mode\n\npub enum Increment {\n\n /// Enable increment\n\n Enable,\n", "file_path": "src/dma.rs", "rank": 48, "score": 41810.7642730085 }, { "content": "fn emit_autogen_comment(db: &Db) -> Result<()> {\n\n let package = cubemx::package::load(&db)?;\n\n codegen::gen_autogen_comment(&package);\n\n\n\n Ok(())\n\n}\n", "file_path": "codegen/src/main.rs", "rank": 49, "score": 38155.30974596396 }, { "content": "/// Convert pll multiplier into equivalent register field type\n\nfn into_pll_mul(mul: u8) -> cfgr::PLLMUL_A {\n\n match mul {\n\n 2 => cfgr::PLLMUL_A::MUL2,\n\n 3 => cfgr::PLLMUL_A::MUL3,\n\n 4 => cfgr::PLLMUL_A::MUL4,\n\n 5 => cfgr::PLLMUL_A::MUL5,\n\n 6 => cfgr::PLLMUL_A::MUL6,\n\n 7 => cfgr::PLLMUL_A::MUL7,\n\n 8 => cfgr::PLLMUL_A::MUL8,\n\n 9 => cfgr::PLLMUL_A::MUL9,\n\n 10 => cfgr::PLLMUL_A::MUL10,\n\n 11 => cfgr::PLLMUL_A::MUL11,\n\n 12 => cfgr::PLLMUL_A::MUL12,\n\n 13 => cfgr::PLLMUL_A::MUL13,\n\n 14 => cfgr::PLLMUL_A::MUL14,\n\n 15 => cfgr::PLLMUL_A::MUL15,\n\n 16 => cfgr::PLLMUL_A::MUL16,\n\n _ => crate::unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/rcc.rs", "rank": 50, "score": 37741.30168695852 }, { "content": "/// Convert pll divisor into equivalent register field type\n\nfn into_pre_div(div: u8) -> cfgr2::PREDIV_A {\n\n match div {\n\n 1 => cfgr2::PREDIV_A::DIV1,\n\n 2 => cfgr2::PREDIV_A::DIV2,\n\n 3 => cfgr2::PREDIV_A::DIV3,\n\n 4 => cfgr2::PREDIV_A::DIV4,\n\n 5 => cfgr2::PREDIV_A::DIV5,\n\n 6 => cfgr2::PREDIV_A::DIV6,\n\n 7 => cfgr2::PREDIV_A::DIV7,\n\n 8 => cfgr2::PREDIV_A::DIV8,\n\n 9 => cfgr2::PREDIV_A::DIV9,\n\n 10 => cfgr2::PREDIV_A::DIV10,\n\n 11 => cfgr2::PREDIV_A::DIV11,\n\n 12 => cfgr2::PREDIV_A::DIV12,\n\n 13 => cfgr2::PREDIV_A::DIV13,\n\n 14 => cfgr2::PREDIV_A::DIV14,\n\n 15 => cfgr2::PREDIV_A::DIV15,\n\n 16 => cfgr2::PREDIV_A::DIV16,\n\n _ => crate::unreachable!(),\n\n }\n", "file_path": "src/rcc.rs", "rank": 51, "score": 37741.30168695852 }, { "content": "fn handle_gpio(db_path: PathBuf) -> Result<()> {\n\n let db = cubemx::Db::new(db_path);\n\n\n\n emit_autogen_comment(&db)?;\n\n\n\n let gpio_ips = cubemx::load_f3_gpio_ips(&db)?;\n\n codegen::gpio::gen_mappings(&gpio_ips)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "codegen/src/main.rs", "rank": 52, "score": 37124.53654812115 }, { "content": "fn parse_af(s: &str) -> Result<u8> {\n\n static AF: Lazy<Regex> = Lazy::new(|| Regex::new(r\"^GPIO_AF(?P<nr>\\d{1,2})_\\w+$\").unwrap());\n\n\n\n let captures = AF\n\n .captures(s)\n\n .with_context(|| format!(\"invalid PinSignal AF: {}\", s))?;\n\n\n\n let nr = captures.name(\"nr\").unwrap().as_str().parse()?;\n\n Ok(nr)\n\n}\n\n\n", "file_path": "codegen/src/cubemx/ip/gpio.rs", "rank": 53, "score": 35581.8664590125 }, { "content": "fn bcd2_decode(fst: u8, snd: u8) -> u32 {\n\n (fst * 10 + snd).into()\n\n}\n\n\n", "file_path": "src/rtc.rs", "rank": 54, "score": 35368.788084212734 }, { "content": "fn gen_gpio_ip(ip: &gpio::Ip) -> Result<()> {\n\n let feature = ip_version_to_feature(&ip.version)?;\n\n let ports = merge_pins_by_port(&ip.pins)?;\n\n\n\n println!(r#\"#[cfg(feature = \"{}\")]\"#, feature);\n\n gen_gpio_macro_call(&ports, &feature)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "codegen/src/codegen/gpio.rs", "rank": 72, "score": 34636.77654269798 }, { "content": " // the buffer we are going to receive the transmitted data in\n\n let rx_buf = singleton!(: [u8; 9] = [0; 9]).unwrap();\n\n\n\n // DMA channel selection depends on the peripheral:\n\n // - USART1: TX = 4, RX = 5\n\n // - USART2: TX = 6, RX = 7\n\n // - USART3: TX = 3, RX = 2\n\n let (tx_channel, rx_channel) = (dma1.ch4, dma1.ch5);\n\n\n\n // start separate DMAs for sending and receiving the data\n\n let sending = tx.write_all(tx_buf, tx_channel);\n\n let receiving = rx.read_exact(rx_buf, rx_channel);\n\n\n\n // block until all data was transmitted and received\n\n let (tx_buf, tx_channel, tx) = sending.wait();\n\n let (rx_buf, rx_channel, rx) = receiving.wait();\n\n\n\n assert_eq!(tx_buf, rx_buf);\n\n\n\n // After a transfer is finished its parts can be re-used for another one.\n", "file_path": "examples/serial_dma.rs", "rank": 73, "score": 32841.93390032634 }, { "content": "//! Example of transmitting data over serial interface using DMA.\n\n//! For this to work, the PA9 and PA10 pins must be connected.\n\n//! Target board: STM32F3DISCOVERY\n\n\n\n#![no_std]\n\n#![no_main]\n\n\n\nuse panic_semihosting as _;\n\n\n\nuse cortex_m::{asm, singleton};\n\nuse cortex_m_rt::entry;\n\nuse stm32f3xx_hal::{pac, prelude::*, serial::Serial};\n\n\n\n#[entry]\n", "file_path": "examples/serial_dma.rs", "rank": 74, "score": 32840.719161277186 }, { "content": "//! CDC-ACM serial port example using polling in a busy loop.\n\n//! Target board: STM32F3DISCOVERY\n\n#![no_std]\n\n#![no_main]\n\n\n\nuse panic_semihosting as _;\n\n\n\nuse stm32f3xx_hal as hal;\n\n\n\nuse cortex_m::asm::delay;\n\nuse cortex_m_rt::entry;\n\n\n\nuse hal::pac;\n\nuse hal::prelude::*;\n\nuse hal::usb::{Peripheral, UsbBus};\n\n\n\nuse usb_device::prelude::*;\n\nuse usbd_serial::{SerialPort, USB_CLASS_CDC};\n\n\n\n#[entry]\n", "file_path": "examples/usb_serial.rs", "rank": 75, "score": 32839.56790860212 }, { "content": " pin_dm: usb_dm,\n\n pin_dp: usb_dp,\n\n };\n\n let usb_bus = UsbBus::new(usb);\n\n\n\n let mut serial = SerialPort::new(&usb_bus);\n\n\n\n let mut usb_dev = UsbDeviceBuilder::new(&usb_bus, UsbVidPid(0x16c0, 0x27dd))\n\n .manufacturer(\"Fake company\")\n\n .product(\"Serial port\")\n\n .serial_number(\"TEST\")\n\n .device_class(USB_CLASS_CDC)\n\n .build();\n\n\n\n loop {\n\n if !usb_dev.poll(&mut [&mut serial]) {\n\n continue;\n\n }\n\n\n\n let mut buf = [0u8; 64];\n", "file_path": "examples/usb_serial.rs", "rank": 76, "score": 32837.92794707083 }, { "content": " .into_push_pull_output(&mut gpioe.moder, &mut gpioe.otyper);\n\n led.set_low().ok(); // Turn off\n\n\n\n let mut gpioa = dp.GPIOA.split(&mut rcc.ahb);\n\n\n\n // F3 Discovery board has a pull-up resistor on the D+ line.\n\n // Pull the D+ pin down to send a RESET condition to the USB bus.\n\n // This forced reset is needed only for development, without it host\n\n // will not reset your device when you upload new firmware.\n\n let mut usb_dp = gpioa\n\n .pa12\n\n .into_push_pull_output(&mut gpioa.moder, &mut gpioa.otyper);\n\n usb_dp.set_low().ok();\n\n delay(clocks.sysclk().0 / 100);\n\n\n\n let usb_dm = gpioa.pa11.into_af14(&mut gpioa.moder, &mut gpioa.afrh);\n\n let usb_dp = usb_dp.into_af14(&mut gpioa.moder, &mut gpioa.afrh);\n\n\n\n let usb = Peripheral {\n\n usb: dp.USB,\n", "file_path": "examples/usb_serial.rs", "rank": 77, "score": 32836.71569549669 }, { "content": " tx_buf.copy_from_slice(b\"hi again!\");\n\n\n\n let sending = tx.write_all(tx_buf, tx_channel);\n\n let receiving = rx.read_exact(rx_buf, rx_channel);\n\n\n\n let (tx_buf, ..) = sending.wait();\n\n let (rx_buf, ..) = receiving.wait();\n\n\n\n assert_eq!(tx_buf, rx_buf);\n\n\n\n loop {\n\n asm::wfi();\n\n }\n\n}\n", "file_path": "examples/serial_dma.rs", "rank": 78, "score": 32835.858783881464 }, { "content": "\n\n match serial.read(&mut buf) {\n\n Ok(count) if count > 0 => {\n\n led.set_high().ok(); // Turn on\n\n\n\n // Echo back in upper case\n\n for c in buf[0..count].iter_mut() {\n\n if 0x61 <= *c && *c <= 0x7a {\n\n *c &= !0x20;\n\n }\n\n }\n\n\n\n let mut write_offset = 0;\n\n while write_offset < count {\n\n match serial.write(&buf[write_offset..count]) {\n\n Ok(len) if len > 0 => {\n\n write_offset += len;\n\n }\n\n _ => {}\n\n }\n\n }\n\n }\n\n _ => {}\n\n }\n\n\n\n led.set_low().ok(); // Turn off\n\n }\n\n}\n", "file_path": "examples/usb_serial.rs", "rank": 79, "score": 32833.38733482536 }, { "content": "### Breaking Changes\n\n\n\n- Removed impl for `SckPin<SPI2>` for `PB13<AF5>` from `stm32f328` and\n\n `stm32f378` targets. ([#99])\n\n- Removed SPI1 support for `stm32f302x6` and `stm32f302x8` sub-targets\n\n and `stm32f318` target. ([#99])\n\n- This release requires 1.48, as intra-doc-links are now used internally.\n\n Until now, no MSRV was tracked. This has changed now. This however does\n\n not mean, that we guarantee any MSRV policy. It is rather for documentation\n\n purposes and if a new useful feature arises, we will increase the MSRV.\n\n ([#170])\n\n- Removed I2C2 support for `stm32f303x6`, `stm32f303x8` and `stm32f328` targets.\n\n ([#164])\n\n- `I2c::i2c1` and `I2c::i2c2` functions are renamed to `I2c::new`.\n\n ([#164])\n\n\n\n## [v0.5.0] - 2020-07-21\n\n\n\n### Added\n\n\n\n- Implement `InputPin` for `Output<OpenDrain>` pins ([#114])\n\n- Support for safe one-shot DMA transfers ([#86])\n\n- DMA support for serial reception and transmission ([#86])\n\n- ADC support for `stm32f303` devices ([#47])\n\n\n\n### Fixed\n\n\n\n- `PLL` was calculated wrong for devices, which do not divide `HSI` ([#67])\n\n\n\n### Changed\n\n\n\n- The system clock calculation is more fine grained now. ([#67])\n\n Now the system clock can be some value, like 14 MHz, which can not a\n\n be represented as a multiple of the oscillator clock:\n\n\n\n```rust\n\nlet clocks = rcc\n\n .cfgr\n\n .use_hse(8.mhz())\n\n .sysclk(14.mhz())\n\n\n\n// or\n\nlet clocks = rcc\n\n .cfgr\n\n .use_hse(32.mhz())\n\n .sysclk(72.mhz())\n\n```\n\n\n\n This is possible through utilizing the divider, which can divide the\n\n external oscillator clock on most devices. Some devices have even the\n\n possibility to divide the internal oscillator clock.\n\n\n", "file_path": "CHANGELOG.md", "rank": 80, "score": 19600.422580038863 }, { "content": "# Change Log\n\n\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](http://keepachangelog.com/)\n\nand this project adheres to [Semantic Versioning](http://semver.org/).\n\n\n\n## [Unreleased]\n\n\n\n## [v0.6.1] - 2020-12-10\n\n\n\n### Changed\n\n\n\n- Removed `doc-comment` dependency ([#184])\n\n\n\n## [v0.6.0] - 2020-12-10\n\n\n\n### Added\n\n\n\n- Support for 16-bit words with SPI ([#107])\n\n- SPI support for reclock after initialization ([#98])\n\n- Support for `stm32f302x6` and `stm32f302x8` devices ([#132])\n\n- Support for the onboard real-time clock (RTC) ([#136])\n\n- Enable DMA for USART on `stm32f302` devices ([#139])\n\n- Basic CAN bus support ([#100])\n\n- Impls for all SPI pins for all `stm32f302` sub-targets, `stm32f303`\n\n subtargets, `stm32f3x8` targets, `stm32f334`, and `stm32f373`\n\n ([#99])\n\n- SPI4 peripheral for supported\n\n devices. ([#99])\n\n- Support for I2C transfer of more than 255 bytes, and 0 byte write ([#154])\n\n- Support for HSE bypass and CSS ([#156])\n\n- Impls for missing I2C pin definitions ([#164])\n\n- Support I2C3 ([#164])\n\n- Support for [`defmt`][defmt] ([#172])\n\n - Now [defmt][] features are available.\n\n - Currently these are only used for panicking calls, like\n\n `assert!` `panic!` or `unwrap()`. These are enabled using the [defmt][]\n\n [filter][].\n\n - For now [defmt][] is mostly intended for internal development and testing\n\n to further reduce panicking calls in this crate.\n\n The support of this feature is subject to change as the development\n\n of [defmt][] is advancing.\n\n\n\n[defmt]: https://github.com/knurling-rs/defmt\n\n[filter]: https://defmt.ferrous-systems.com/filtering.html\n\n\n\n### Changed\n\n\n\n- Introduced auto-generated GPIO mappings based on the STM32CubeMX database\n\n ([#129])\n\n\n\n### Fixed\n\n\n\n- Fixed [#151] not being able to generate 72 MHz HCLK for stm32f303xc devices\n\n ([#152])\n\n- Wrong I2C clock source ([#164])\n\n\n\n[#151]: https://github.com/stm32-rs/stm32f3xx-hal/issues/151\n\n\n", "file_path": "CHANGELOG.md", "rank": 81, "score": 19599.52424934583 }, { "content": "### Breaking changes\n\n\n\n- The feature gate requires you to select a subvariant if possible. ([#75])\n\n- Split up `stm32f302` into sub-targets `stm32f302xb`,`stm32f302xc`,`stm32f302xd`,`stm32f302xe`\n\n- Bump `stm32f3` dependency to `0.11.0` ([#97])\n\n- The `stm32f3` reexport is now renamed from `stm32` to `pac` ([#101])\n\n- The correct `stm32f3` modules are now used for the `stm32f318` and `stm32f738`\n\n targets. As a result, some previously (wrongly) supported peripherals have\n\n been removed from these targets. ([#116])\n\n\n\n## [v0.4.3] - 2020-04-11\n\n\n\n### Added\n\n\n\n- Independent Watchdog ([#58])\n\n\n\n### Fixed\n\n\n\n- Wrong default modes for debug GPIO pins ([#82])\n\n- Wrong calculation of HCLK prescaler, if using a prescaler value equal or\n\n higher than 64 ([#42])\n\n- UART reception error flags not cleared ([#91])\n\n\n\n## [v0.4.2] - 2020-03-21\n\n\n\n### Fixed\n\n\n\n- Wrong frequency reported by `MonoTimer` ([#56])\n\n- Use automatic mode with I2C autoend on `Read` ([#72])\n\n\n\n### Changed\n\n\n\n- Bump `stm32f3` dependency to `0.10.0` ([#70])\n\n\n\n## [v0.4.1] - 2020-03-07\n\n\n\n### Added\n\n\n\n- Use Infallible error type for UART ([#50])\n\n- Implement blocking Write for UART ([#50])\n\n- Implement blocking Read for I2C ([#52])\n\n\n\n### Fixed\n\n\n\n- Regression in v0.4.0 that set SPI to LSB-first ordering ([#60])\n\n\n\n## [v0.4.0] - 2019-12-27\n\n\n\n### Added\n\n\n\n- USB Driver for all devices except `stm32f301` and `stm32f334` as they have no\n\n USB peripheral. ([#24])\n\n- `StatefulOutputPin` and `ToggleableOutputPin` ([#25])\n\n- Support devices with 2-bit PLLSRC fields ([#31])\n\n - This allows using 72 MHz `sysclk` on the `stm32f303`\n\n- Analog gpio trait ([#33])\n\n- Add PWM Channels ([#34])\n\n- SPI embedded hal modes are now public\n\n ([#35])\n\n\n\n### Breaking changes\n\n\n\n- Alternate gpio functions are now **only** made available for devices, which\n\n have them. ([#21])\n\n- `stm32f303` is now split into `stm32f303xd` and `stm32f303xe` as they provide\n\n different alternate gpio functions. `stm32f303` is still available.\n\n- Bump `stm32f3` dependency to `0.9.0`\n\n ([#39])\n\n\n\n### Fixed\n\n\n\n- Fixed wrong initialization of the SPI ([#35])\n\n\n", "file_path": "CHANGELOG.md", "rank": 82, "score": 19598.164682660892 }, { "content": "# `stm32f3xx-hal`\n\n\n\n[![Build Status](https://github.com/stm32-rs/stm32f3xx-hal/workflows/CI/badge.svg)](https://github.com/stm32-rs/stm32f3xx-hal/actions)\n\n[![Crate](https://img.shields.io/crates/v/stm32f3xx-hal.svg)](https://crates.io/crates/stm32f3xx-hal)\n\n[![Docs](https://docs.rs/stm32f3xx-hal/badge.svg)](https://docs.rs/stm32f3xx-hal)\n\n\n\n`stm32f3xx-hal` contains a multi device hardware abstraction on top of the\n\nperipheral access API for the STMicro STM32F3 series microcontrollers. The\n\nselection of the MCU is done by feature gates, typically specified by board\n\nsupport crates. Currently supported configurations are:\n\n\n\n* stm32f301\n\n* stm32f318\n\n* stm32f302\n\n* stm32f303\n\n* stm32f373\n\n* stm32f378\n\n* stm32f334\n\n* stm32f328\n\n* stm32f358\n\n* stm32f398\n\n\n\nThe idea behind this crate is to gloss over the slight differences in the\n\nvarious peripherals available on those MCUs so a HAL can be written for all\n\nchips in that same family without having to cut and paste crates for every\n\nsingle model.\n\n\n\nCollaboration on this crate is highly welcome as are pull requests!\n\n\n\nThis crate relies on Adam Greigs fantastic [`stm32f3`][] crate to provide\n\nappropriate register definitions and implements a partial set of the\n\n[`embedded-hal`][] traits.\n\n\n\nAlmost all of the implementation was shamelessly adapted from the\n\n[`stm32f30x-hal`][] crate by Jorge Aparicio.\n\n\n\n[`stm32f3`]: https://crates.io/crates/stm32f3\n\n[`stm32f30x-hal`]: https://github.com/japaric/stm32f30x-hal\n\n[`embedded-hal`]: https://github.com/japaric/embedded-hal\n\n\n\n## [Changelog](CHANGELOG.md)\n\n\n\n## Selecting the right chip\n\n\n\nThis crate requires you to specify your target chip as a feature.\n\n\n\n*Example: The STM32F3Discovery board has a STM32F303VCT6 chip.\n\nSo you want to expand your call to `cargo` with `--features stm32f303xc`.*\n\n\n", "file_path": "README.md", "rank": 83, "score": 19598.046494921444 }, { "content": "## [v0.3.0] - 2019-08-26\n\n\n\n### Added\n\n\n\n- HSE and USB clock are now supported ([#18])\n\n\n\n### Changed\n\n\n\n- Bump `stm32f3` version to `0.8.0` ([#19])\n\n\n\n## [v0.2.3] - 2019-07-07\n\n\n\n### Fixed\n\n\n\n- Fix timer initialization ([#17])\n\n\n\n## [v0.2.2] - 2019-07-06\n\n\n\n## Fixed\n\n\n\n- Missing `stm32f303` timers ([#16])\n\n\n\n## [v0.2.1] - 2019-07-06\n\n\n\n### Added\n\n\n\n- Fully erased pin ([#14])\n\n\n\n## [v0.2.0] - 2019-07-02\n\n\n\n### Changed\n\n\n\n- Bump `stm32f3` version to `0.7.1` ([#12])\n\n- Bump `embedded-hal` version to `0.2.3` ([#11])\n\n\n\n### Fixed\n\n\n\n- Various peripheral mappings for some devices ([#12])\n\n\n\n### Breaking changes\n\n\n\n- Switch to the `embedded-hal` v2 digital pin trait.\n\n\n\n## [v0.1.5] - 2019-06-11\n\n\n\n### Added\n\n\n\n- Support for GPIO AF14 ([#6])\n\n\n\n## [v0.1.4] - 2019-05-04\n\n\n\n### Fixed\n\n\n\n- Fixed I2C address ([#4])\n\n\n\n## [v0.1.3] - 2019-04-12\n\n\n\n### Added\n\n\n\n- Implement GPIO `InputPin` traits ([#2])\n\n\n\n## [v0.1.2] - 2019-04-06\n\n\n\n### Added\n\n\n\n- Support `stm32f328`, `stm32f358` and `stm32f398` devices\n\n- Support `stm32f334` device\n\n- Support `stm32f378` device\n\n- Support `stm32f373` device\n\n\n\n## [v0.1.1] - 2019-03-31\n\n\n\n- Support `stm32f301` and `stm32f318` devices\n\n- Support `stm32f302` device\n\n\n", "file_path": "CHANGELOG.md", "rank": 84, "score": 19597.735957314908 }, { "content": "### Possible chips\n\n\n\n[comment]: # (Any changes here should be mirrored in src/lib.rs)\n\n\n\nNote: `x` denotes any character in [a-z]\n\n\n\n* stm32f301xb\n\n* stm32f301xc\n\n* stm32f301xd\n\n* stm32f301xe\n\n* stm32f318\n\n* stm32f302xb\n\n* stm32f302xc\n\n* stm32f302xd\n\n* stm32f302xe\n\n* stm32f302x6\n\n* stm32f302x8\n\n* stm32f303xb\n\n* stm32f303xc\n\n* stm32f303xd\n\n* stm32f303xe\n\n* stm32f303x6\n\n* stm32f303x8\n\n* stm32f373\n\n* stm32f378\n\n* stm32f334\n\n* stm32f328\n\n* stm32f358\n\n* stm32f398\n\n\n\n### Background\n\n\n\nFor some of the stm32f3xx chips there are sub-variants that differ in\n\nfunctionality, peripheral use and hence 'under the hood' implementation. To\n\nallow the full use of all peripherals on certain subvariants without\n\nallowing for code that just doesn't run on other sub-vairants, they are\n\ndistinct features that need to be specified.\n\n\n\nAs this crate is still under fundamental development, expect more\n\nsub-variants replacing the plain variants in the future as we are\n\nimplementing more stuff. It is not desired to allow the plain variants to\n\nbe used as this leads to confusion.\n\n*Example: the stm32f303xc has a gpio_e bank while the stm32f303x6 does\n\nnot. Hence we don't want to expose the gpoio_e bank on all stm32f303 (i.e.\n\nwhen specifying the feature stm32f303) albeit a stm32f303xc user would\n\nexpect it to do so.*\n\n\n", "file_path": "README.md", "rank": 85, "score": 19593.206971958884 }, { "content": "### Detailed steps to select the right chip\n\n\n\n1. Get the full name of the chip you are using from your datasheet, user manual\n\n or other source.\n\n\n\n _Example_:\n\n\n\n We want to use the STM32F3Discovery kit.\n\n The [Usermanual][] tells us it's using a STM32F303VC chip.\n\n\n\n2. Find your chip as a feature in the list above.\n\n\n\n _Example_:\n\n\n\n Looking for the right feature for our STM32F303VC chip we first find\n\n `stm32f301xb`. This is the wrong chip, as we're not looking for `f301` but\n\n for `f303`.\n\n\n\n Looking further we find `stm32f303xc`. This matches STM32F303VC\n\n (note that VC → xc).\n\n\n\n3. Add the chip name as a feature to your cargo call.\n\n\n\n _Example_:\n\n\n\n Using the STM32F303VC chip we run `cargo check --features stm32f303xc`.\n\n\n\n[Usermanual]: https://www.st.com/content/ccc/resource/technical/document/user_manual/8a/56/97/63/8d/56/41/73/DM00063382.pdf/files/DM00063382.pdf/jcr:content/translations/en.DM00063382.pdf\n\n\n\n## License\n\n\n\n[0-clause BSD license](LICENSE-0BSD.txt).\n", "file_path": "README.md", "rank": 86, "score": 19590.261924711675 }, { "content": "## [v0.1.0] - 2019-03-31\n\n\n\n- Support `stm32f303` device\n\n\n\n[#184]: https://github.com/stm32-rs/stm32f3xx-hal/pull/184\n\n[#172]: https://github.com/stm32-rs/stm32f3xx-hal/pull/172\n\n[#170]: https://github.com/stm32-rs/stm32f3xx-hal/pull/170\n\n[#164]: https://github.com/stm32-rs/stm32f3xx-hal/pull/164\n\n[#164]: https://github.com/stm32-rs/stm32f3xx-hal/pull/164\n\n[#164]: https://github.com/stm32-rs/stm32f3xx-hal/pull/164\n\n[#164]: https://github.com/stm32-rs/stm32f3xx-hal/pull/164\n\n[#164]: https://github.com/stm32-rs/stm32f3xx-hal/pull/164\n\n[#156]: https://github.com/stm32-rs/stm32f3xx-hal/pull/156\n\n[#154]: https://github.com/stm32-rs/stm32f3xx-hal/pull/154\n\n[#152]: https://github.com/stm32-rs/stm32f3xx-hal/pull/152\n\n[#151]: https://github.com/stm32-rs/stm32f3xx-hal/issues/151\n\n[#139]: https://github.com/stm32-rs/stm32f3xx-hal/pull/139\n\n[#136]: https://github.com/stm32-rs/stm32f3xx-hal/pull/136\n\n[#132]: https://github.com/stm32-rs/stm32f3xx-hal/pull/132\n\n[#129]: https://github.com/stm32-rs/stm32f3xx-hal/pull/129\n\n[#116]: https://github.com/stm32-rs/stm32f3xx-hal/pull/116\n\n[#114]: https://github.com/stm32-rs/stm32f3xx-hal/pull/114\n\n[#107]: https://github.com/stm32-rs/stm32f3xx-hal/pull/107\n\n[#101]: https://github.com/stm32-rs/stm32f3xx-hal/pull/101\n\n[#100]: https://github.com/stm32-rs/stm32f3xx-hal/pull/100\n\n[#99]: https://github.com/stm32-rs/stm32f3xx-hal/pull/99\n\n[#99]: https://github.com/stm32-rs/stm32f3xx-hal/pull/99\n\n[#99]: https://github.com/stm32-rs/stm32f3xx-hal/pull/99\n\n[#98]: https://github.com/stm32-rs/stm32f3xx-hal/pull/98\n\n[#97]: https://github.com/stm32-rs/stm32f3xx-hal/pull/97\n\n[#91]: https://github.com/stm32-rs/stm32f3xx-hal/pull/91\n\n[#86]: https://github.com/stm32-rs/stm32f3xx-hal/pull/86\n\n[#86]: https://github.com/stm32-rs/stm32f3xx-hal/pull/86\n\n[#82]: https://github.com/stm32-rs/stm32f3xx-hal/pull/82\n\n[#75]: https://github.com/stm32-rs/stm32f3xx-hal/pull/75\n\n[#72]: https://github.com/stm32-rs/stm32f3xx-hal/pull/72\n\n[#70]: https://github.com/stm32-rs/stm32f3xx-hal/pull/70\n\n[#67]: https://github.com/stm32-rs/stm32f3xx-hal/pull/67\n\n[#67]: https://github.com/stm32-rs/stm32f3xx-hal/pull/67\n\n[#60]: https://github.com/stm32-rs/stm32f3xx-hal/pull/60\n\n[#58]: https://github.com/stm32-rs/stm32f3xx-hal/pull/58\n\n[#56]: https://github.com/stm32-rs/stm32f3xx-hal/pull/56\n\n[#52]: https://github.com/stm32-rs/stm32f3xx-hal/pull/52\n\n[#50]: https://github.com/stm32-rs/stm32f3xx-hal/pull/50\n\n[#50]: https://github.com/stm32-rs/stm32f3xx-hal/pull/50\n\n[#47]: https://github.com/stm32-rs/stm32f3xx-hal/pull/47\n\n[#42]: https://github.com/stm32-rs/stm32f3xx-hal/pull/42\n\n[#39]: https://github.com/stm32-rs/stm32f3xx-hal/pull/39\n\n[#35]: https://github.com/stm32-rs/stm32f3xx-hal/pull/18\n\n[#35]: https://github.com/stm32-rs/stm32f3xx-hal/pull/35\n\n[#34]: https://github.com/stm32-rs/stm32f3xx-hal/pull/34\n\n[#33]: https://github.com/stm32-rs/stm32f3xx-hal/pull/33\n\n[#31]: https://github.com/stm32-rs/stm32f3xx-hal/pull/33\n\n[#25]: https://github.com/stm32-rs/stm32f3xx-hal/pull/25\n\n[#24]: https://github.com/stm32-rs/stm32f3xx-hal/pull/24\n\n[#21]: https://github.com/stm32-rs/stm32f3xx-hal/pull/21\n\n[#19]: https://github.com/stm32-rs/stm32f3xx-hal/pull/19\n\n[#18]: https://github.com/stm32-rs/stm32f3xx-hal/pull/18\n\n[#17]: https://github.com/stm32-rs/stm32f3xx-hal/pull/17\n\n[#16]: https://github.com/stm32-rs/stm32f3xx-hal/pull/16\n\n[#14]: https://github.com/stm32-rs/stm32f3xx-hal/pull/14\n\n[#12]: https://github.com/stm32-rs/stm32f3xx-hal/pull/12\n\n[#12]: https://github.com/stm32-rs/stm32f3xx-hal/pull/12\n\n[#11]: https://github.com/stm32-rs/stm32f3xx-hal/pull/11\n\n[#6]: https://github.com/stm32-rs/stm32f3xx-hal/pull/6\n\n[#4]: https://github.com/stm32-rs/stm32f3xx-hal/pull/4\n", "file_path": "CHANGELOG.md", "rank": 87, "score": 19588.554694407416 }, { "content": "[#2]: https://github.com/stm32-rs/stm32f3xx-hal/pull/2\n\n\n\n[Unreleased]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.6.1...HEAD\n\n[v0.6.0]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.6.0...v0.6.1\n\n[v0.6.0]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.5.0...v0.6.0\n\n[v0.5.0]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.4.3...v0.5.0\n\n[v0.4.3]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.4.2...v0.4.3\n\n[v0.4.2]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.4.1...v0.4.2\n\n[v0.4.1]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.4.0...v0.4.1\n\n[v0.4.0]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.3.0...v0.4.0\n\n[v0.3.0]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.2.3...v0.3.0\n\n[v0.2.3]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.2.2...v0.2.3\n\n[v0.2.2]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.2.1...v0.2.2\n\n[v0.2.1]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.2.0...v0.2.1\n\n[v0.2.0]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.1.5...v0.2.0\n\n[v0.1.5]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.1.4...v0.1.5\n\n[v0.1.4]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.1.3...v0.1.4\n\n[v0.1.3]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.1.2...v0.1.3\n\n[v0.1.2]: https://github.com/stm32-rs/stm32f3xx-hal/compare/v0.1.1...v0.1.2\n\n[v0.1.1]: https://github.com/stm32-rs/stm32f3xx-hal/releases/tag/v0.1.1\n", "file_path": "CHANGELOG.md", "rank": 88, "score": 19587.73883004799 }, { "content": "# Codegen\n\n\n\nThis crate provides code-generation for the stm32f3xx-hal. It reads information\n\nfrom an [STM32CubeMX](https://www.st.com/en/development-tools/stm32cubemx.html)\n\ndatabase and uses that to output code that can directly be included into the\n\nsource code of the stm32f3xx-hal crate.\n\n\n\nFor more information on how the STM32CubeMX database is structured, check out\n\nthe README in the [cube-parse](https://github.com/dbrgn/cube-parse) repository.\n\n\n\nBecause by default cargo tries to use the `x86_64-unknown-linux-gnu` target,\n\nwhen building `codegen`, due to what's specified in the `.cargo/config`, you\n\nneed to manually specify your host's target if it differs from that, e.g.:\n\n\n\n```bash\n\n$ cargo run --target x86_64-apple-darwin -- help\n\n```\n\n\n\n`codgen` can generate the following code:\n\n\n\n- [GPIO mappings](#gpio-mappings)\n\n\n\n## GPIO mappings\n\n\n\nRunning `codegen`'s `gpio` subcommand generates the `gpio!` macro\n\ninvocations at the end of `src/gpio.rs`. Re-generating those macro-invocations\n\nis simply a matter of deleting the old ones and then executing:\n\n\n\n```bash\n\n$ cargo run -- gpio $cubemx_db_path >> ../src/gpio.rs\n\n```\n\n\n\n`$cubemx_db_path` must be the path to the `db/` directory under an\n\nSTM32CubeMX installation. With a default Linux install, this would be\n\n`/opt/stm32cubemx/db`.\n\n\n\nThe generated `gpio!` invocations are gated by features whose names are derived\n\nfrom the respective GPIO internal peripheral (IP) version:\n\n\n\n- gpio-f302\n\n- gpio-f303\n\n- gpio-f303e\n\n- gpio-f333\n\n- gpio-f373\n\n\n\n`codegen` collects those IP versions from the relevant GPIO IP description\n\nfiles (located at `$cubemx_db_path/mcu/IP/GPIO-*.xml`). The root `<IP>` element\n\nhas a `Version` attribute with a value in the form of \"STM32Fxxx_gpio_v1_0\".\n\nThe feature name is constructed by dropping the parts constant between all\n\nversion strings and prepending \"gpio-\".\n\n\n\nNote that the GPIO IP version names don't necessarily match the MCUs they are\n\nused in. For example, the GPIOs in `STM32F302xB` MCUs have the IP version\n\n\"STM32F303_gpio_v1_0\". The MCU features of the `stm32f3xx-hal` also select the\n\ncorrect `gpio-*` features, so users generally don't have to care about these\n\ndetails.\n", "file_path": "codegen/README.md", "rank": 89, "score": 18894.468656338984 }, { "content": " nb::Error::Other(Error::Overrun)\n\n } else if sr.modf().is_fault() {\n\n nb::Error::Other(Error::ModeFault)\n\n } else if sr.crcerr().is_no_match() {\n\n nb::Error::Other(Error::Crc)\n\n } else if sr.txe().is_empty() {\n\n let write_ptr = &self.spi.dr as *const _ as *mut WORD;\n\n // NOTE(unsafe) write to register owned by this Spi struct\n\n unsafe { ptr::write_volatile(write_ptr, word) };\n\n return Ok(());\n\n } else {\n\n nb::Error::WouldBlock\n\n })\n\n }\n\n }\n\n\n\n impl<PINS, WORD> crate::hal::blocking::spi::transfer::Default<WORD> for Spi<$SPIX, PINS, WORD> {}\n\n impl<PINS, WORD> crate::hal::blocking::spi::write::Default<WORD> for Spi<$SPIX, PINS, WORD> {}\n\n )+\n\n }\n", "file_path": "src/spi.rs", "rank": 90, "score": 31.561003394129642 }, { "content": "///\n\n/// # Safety\n\n///\n\n/// `C` must be the correct DMA channel for the peripheral implementing\n\n/// this trait.\n\npub unsafe trait OnChannel<C: Channel>: Target {}\n\n\n\nmacro_rules! on_channel {\n\n (\n\n $dma:ident,\n\n $( $target:ty => $C:ident, )+\n\n ) => {\n\n $( unsafe impl OnChannel<$dma::$C> for $target {} )+\n\n };\n\n}\n\n\n\non_channel!(dma1,\n\n serial::Rx<pac::USART1> => C5,\n\n serial::Tx<pac::USART1> => C4,\n\n serial::Rx<pac::USART2> => C6,\n\n serial::Tx<pac::USART2> => C7,\n\n serial::Rx<pac::USART3> => C3,\n\n serial::Tx<pac::USART3> => C2,\n\n);\n", "file_path": "src/dma.rs", "rank": 91, "score": 30.961326525778055 }, { "content": "impl embedded_hal_can::Transmitter for CanTransmitter {\n\n fn transmit(\n\n &mut self,\n\n frame: &Self::Frame,\n\n ) -> Result<Option<Self::Frame>, nb::Error<Self::Error>> {\n\n let can = unsafe { &*stm32::CAN::ptr() };\n\n\n\n for tx_idx in 0..3 {\n\n let free = match tx_idx {\n\n 0 => can.tsr.read().tme0().bit_is_set(),\n\n 1 => can.tsr.read().tme1().bit_is_set(),\n\n 2 => can.tsr.read().tme2().bit_is_set(),\n\n _ => crate::unreachable!(),\n\n };\n\n\n\n if !free {\n\n continue;\n\n }\n\n\n\n let tx = &can.tx[tx_idx];\n", "file_path": "src/can.rs", "rank": 92, "score": 30.624379635566946 }, { "content": " _0: (),\n\n}\n\n\n\nimpl APB2 {\n\n pub(crate) fn enr(&mut self) -> &rcc::APB2ENR {\n\n // NOTE(unsafe) this proxy grants exclusive access to this register\n\n unsafe { &(*RCC::ptr()).apb2enr }\n\n }\n\n\n\n pub(crate) fn rstr(&mut self) -> &rcc::APB2RSTR {\n\n // NOTE(unsafe) this proxy grants exclusive access to this register\n\n unsafe { &(*RCC::ptr()).apb2rstr }\n\n }\n\n}\n\n\n\nconst HSI: u32 = 8_000_000; // Hz\n\n\n\n// some microcontrollers do not have USB\n\n#[cfg(any(feature = \"stm32f301\", feature = \"stm32f318\", feature = \"stm32f334\",))]\n\nmod usb_clocking {\n", "file_path": "src/rcc.rs", "rank": 93, "score": 30.294496411836384 }, { "content": " /// Overrun occurred\n\n Overrun,\n\n /// Mode fault occurred\n\n ModeFault,\n\n /// CRC error\n\n Crc,\n\n}\n\n\n\n// FIXME these should be \"closed\" traits\n\n/// SCK pin -- DO NOT IMPLEMENT THIS TRAIT\n\npub unsafe trait SckPin<SPI> {}\n\n\n\n/// MISO pin -- DO NOT IMPLEMENT THIS TRAIT\n\npub unsafe trait MisoPin<SPI> {}\n\n\n\n/// MOSI pin -- DO NOT IMPLEMENT THIS TRAIT\n\npub unsafe trait MosiPin<SPI> {}\n\n\n\nunsafe impl SckPin<SPI1> for PA5<AF5> {}\n\n#[cfg(any(feature = \"stm32f373\", feature = \"stm32f378\"))]\n", "file_path": "src/spi.rs", "rank": 94, "score": 29.98646851206785 }, { "content": "//! Prelude\n\n\n\n#[cfg(any(feature = \"stm32f302\", feature = \"stm32f303\"))]\n\npub use crate::dma::DmaExt as _stm32f3xx_hal_dma_DmaExt;\n\npub use crate::flash::FlashExt as _stm32f3xx_hal_flash_FlashExt;\n\npub use crate::gpio::GpioExt as _stm32f3xx_hal_gpio_GpioExt;\n\npub use crate::hal::prelude::*;\n\npub use crate::rcc::RccExt as _stm32f3xx_hal_rcc_RccExt;\n\npub use crate::time::U32Ext as _stm32f3xx_hal_time_U32Ext;\n\n#[cfg(feature = \"unproven\")]\n\npub use crate::{\n\n hal::digital::v2::InputPin as _embedded_hal_digital_InputPin,\n\n hal::digital::v2::OutputPin as _embedded_hal_digital_OutputPin,\n\n hal::digital::v2::StatefulOutputPin as _embedded_hal_digital_StatefulOutputPin,\n\n hal::digital::v2::ToggleableOutputPin as _embedded_hal_digital_ToggleableOutputPin,\n\n};\n", "file_path": "src/prelude.rs", "rank": 95, "score": 28.44182174515146 }, { "content": "\n\n /// Opaque AFRL register\n\n pub struct AFRL {\n\n _0: (),\n\n }\n\n\n\n impl AFRL {\n\n // A couple device/port combos have no valid alternate functions:\n\n // - stm32f303 GPIOG and GPIOH\n\n // - stm32f318 GPIOC, GPIOD, and GPIOE\n\n // - stm32f328 GPIOE\n\n #[allow(dead_code)]\n\n pub(crate) fn afr(&mut self) -> &$gpioy::AFRL {\n\n unsafe { &(*$GPIOX::ptr()).afrl }\n\n }\n\n }\n\n\n\n /// Opaque AFRH register\n\n pub struct AFRH {\n\n _0: (),\n", "file_path": "src/gpio.rs", "rank": 96, "score": 27.66298156852678 }, { "content": " /// Releases the SPI peripheral and associated pins\n\n pub fn free(self) -> ($SPIX, (SCK, MISO, MOSI)) {\n\n (self.spi, self.pins)\n\n }\n\n\n\n /// Change the baud rate of the SPI\n\n pub fn reclock<F>(&mut self, freq: F, clocks: Clocks)\n\n where F: Into<Hertz>\n\n {\n\n self.spi.cr1.modify(|_, w| w.spe().disabled());\n\n self.spi.cr1.modify(|_, w| {\n\n w.br().variant(Self::compute_baud_rate(clocks.$pclkX(), freq.into()));\n\n w.spe().enabled()\n\n });\n\n }\n\n\n\n fn compute_baud_rate(clocks: Hertz, freq: Hertz) -> spi1::cr1::BR_A {\n\n use spi1::cr1::BR_A;\n\n match clocks.0 / freq.0 {\n\n 0 => crate::unreachable!(),\n", "file_path": "src/spi.rs", "rank": 97, "score": 27.62142023465529 }, { "content": " // NOTE(unsafe) this proxy grants exclusive access to this register\n\n unsafe { &(*RCC::ptr()).ahbrstr }\n\n }\n\n}\n\n\n\n/// Advanced Peripheral Bus 1 (APB1) registers\n\n///\n\n/// An instance of this struct is acquired from the [`RCC`](crate::pac::RCC) struct.\n\n///\n\n/// ```\n\n/// let dp = pac::Peripherals::take().unwrap();\n\n/// let rcc = dp.RCC.constrain();\n\n/// use_apb1(&mut rcc.apb1)\n\n/// ```\n\npub struct APB1 {\n\n _0: (),\n\n}\n\n\n\nimpl APB1 {\n\n pub(crate) fn enr(&mut self) -> &rcc::APB1ENR {\n", "file_path": "src/rcc.rs", "rank": 98, "score": 27.321743384472775 }, { "content": " // Overrun, // slave mode only\n\n // Pec, // SMBUS mode only\n\n // Timeout, // SMBUS mode only\n\n // Alert, // SMBUS mode only\n\n}\n\n\n\n// FIXME these should be \"closed\" traits\n\n/// SCL pin -- DO NOT IMPLEMENT THIS TRAIT\n\npub unsafe trait SclPin<I2C> {}\n\n\n\n/// SDA pin -- DO NOT IMPLEMENT THIS TRAIT\n\npub unsafe trait SdaPin<I2C> {}\n\n\n\nunsafe impl SclPin<I2C1> for gpioa::PA15<AF4> {}\n\nunsafe impl SclPin<I2C1> for gpiob::PB6<AF4> {}\n\nunsafe impl SclPin<I2C1> for gpiob::PB8<AF4> {}\n\nunsafe impl SdaPin<I2C1> for gpioa::PA14<AF4> {}\n\nunsafe impl SdaPin<I2C1> for gpiob::PB7<AF4> {}\n\nunsafe impl SdaPin<I2C1> for gpiob::PB9<AF4> {}\n\n\n", "file_path": "src/i2c.rs", "rank": 99, "score": 26.711920888822036 } ]
Rust
src/processor/mod.rs
gluwa/Sawtooth-SDK-Rust
8a3d99fa2c82eb3131148931a819ffca46d78c66
/* * Copyright 2017 Bitwise IO, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ----------------------------------------------------------------------------- */ #![allow(unknown_lints)] extern crate ctrlc; extern crate protobuf; extern crate rand; extern crate zmq; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::mpsc::RecvTimeoutError; use std::sync::Arc; use std::time::Duration; use self::rand::Rng; pub mod handler; mod zmq_context; use crate::messages::network::PingResponse; use crate::messages::processor::TpProcessRequest; use crate::messages::processor::TpProcessResponse; use crate::messages::processor::TpProcessResponse_Status; use crate::messages::processor::TpRegisterRequest; use crate::messages::processor::TpUnregisterRequest; use crate::messages::validator::Message_MessageType; use crate::messaging::stream::MessageSender; use crate::messaging::stream::ReceiveError; use crate::messaging::stream::SendError; use crate::messaging::stream::{MessageConnection, MessageReceiver}; use crate::messaging::zmq_stream::ZmqMessageConnection; use crate::messaging::zmq_stream::ZmqMessageSender; use protobuf::Message as M; use protobuf::RepeatedField; use rand::distributions::Alphanumeric; use self::handler::TransactionHandler; use self::handler::{ApplyError, TransactionContext}; use self::zmq_context::ZmqTransactionContext; fn generate_correlation_id() -> String { const LENGTH: usize = 16; rand::thread_rng() .sample_iter(Alphanumeric) .take(LENGTH) .map(char::from) .collect() } pub struct EmptyTransactionContext { inner: Arc<InnerEmptyContext>, } impl Clone for EmptyTransactionContext { fn clone(&self) -> Self { Self { inner: Arc::clone(&self.inner), } } } struct InnerEmptyContext { context: Box<dyn TransactionContext + Send + Sync>, _sender: ZmqMessageSender, _receiver: std::sync::Mutex<MessageReceiver>, } impl EmptyTransactionContext { fn new(conn: &ZmqMessageConnection, timeout: Option<Duration>) -> Self { let (_sender, _receiver) = conn.create(); Self { inner: Arc::new(InnerEmptyContext { context: Box::new(ZmqTransactionContext::with_timeout( "", _sender.clone(), timeout, )), _receiver: std::sync::Mutex::new(_receiver), _sender, }), } } pub fn flush(&self) { if let Ok(rx) = self.inner._receiver.try_lock() { if let Ok(Ok(msg)) = rx.recv_timeout(Duration::from_millis(100)) { log::info!("Empty context received message : {:?}", msg); } } } } impl TransactionContext for EmptyTransactionContext { fn get_state_entries( &self, _addresses: &[String], ) -> Result<Vec<(String, Vec<u8>)>, handler::ContextError> { panic!("unsupported for an empty context") } fn set_state_entries( &self, _entries: Vec<(String, Vec<u8>)>, ) -> Result<(), handler::ContextError> { panic!("unsupported for an empty context") } fn delete_state_entries( &self, _addresses: &[String], ) -> Result<Vec<String>, handler::ContextError> { panic!("unsupported for an empty context") } fn add_receipt_data(&self, _data: &[u8]) -> Result<(), handler::ContextError> { panic!("unsupported for an empty context") } fn add_event( &self, _event_type: String, _attributes: Vec<(String, String)>, _data: &[u8], ) -> Result<(), handler::ContextError> { panic!("unsupported for an empty context") } fn get_sig_by_num(&self, block_num: u64) -> Result<String, handler::ContextError> { self.inner.context.get_sig_by_num(block_num) } fn get_reward_block_signatures( &self, block_id: &str, first_pred: u64, last_pred: u64, ) -> Result<Vec<String>, handler::ContextError> { self.inner .context .get_reward_block_signatures(block_id, first_pred, last_pred) } fn get_state_entries_by_prefix( &self, tip_id: &str, address: &str, ) -> Result<Vec<(String, Vec<u8>)>, handler::ContextError> { self.inner .context .get_state_entries_by_prefix(tip_id, address) } } pub struct TransactionProcessor<'a> { endpoint: String, conn: ZmqMessageConnection, handlers: Vec<&'a dyn TransactionHandler>, empty_contexts: Vec<EmptyTransactionContext>, } impl<'a> TransactionProcessor<'a> { pub fn new(endpoint: &str) -> TransactionProcessor { TransactionProcessor { endpoint: String::from(endpoint), conn: ZmqMessageConnection::new(endpoint), handlers: Vec::new(), empty_contexts: Vec::new(), } } pub fn add_handler(&mut self, handler: &'a dyn TransactionHandler) { self.handlers.push(handler); } pub fn empty_context(&mut self, timeout: Option<Duration>) -> EmptyTransactionContext { let context = EmptyTransactionContext::new(&self.conn, timeout); let context_cp = context.clone(); self.empty_contexts.push(context); context_cp } fn register(&mut self, sender: &ZmqMessageSender, unregister: &Arc<AtomicBool>) -> bool { for handler in &self.handlers { for version in handler.family_versions() { let mut request = TpRegisterRequest::new(); request.set_family(handler.family_name().clone()); request.set_version(version.clone()); request.set_namespaces(RepeatedField::from_vec(handler.namespaces().clone())); info!( "sending TpRegisterRequest: {} {}", &handler.family_name(), &version ); let serialized = match request.write_to_bytes() { Ok(serialized) => serialized, Err(err) => { error!("Serialization failed: {}", err); return false; } }; let x: &[u8] = &serialized; let mut future = match sender.send( Message_MessageType::TP_REGISTER_REQUEST, &generate_correlation_id(), x, ) { Ok(fut) => fut, Err(err) => { error!("Registration failed: {}", err); return false; } }; loop { match future.get_timeout(Duration::from_millis(10000)) { Ok(_) => break, Err(_) => { if unregister.load(Ordering::SeqCst) { return false; } } }; } } } true } fn unregister(&mut self, sender: &ZmqMessageSender) { let request = TpUnregisterRequest::new(); info!("sending TpUnregisterRequest"); let serialized = match request.write_to_bytes() { Ok(serialized) => serialized, Err(err) => { error!("Serialization failed: {}", err); return; } }; let x: &[u8] = &serialized; let mut future = match sender.send( Message_MessageType::TP_UNREGISTER_REQUEST, &generate_correlation_id(), x, ) { Ok(fut) => fut, Err(err) => { error!("Unregistration failed: {}", err); return; } }; match future.get_timeout(Duration::from_millis(1000)) { Ok(_) => (), Err(err) => { info!("Unregistration failed: {}", err); } }; } #[allow(clippy::cognitive_complexity)] pub fn start(&mut self) { let unregister = Arc::new(AtomicBool::new(false)); let r = unregister.clone(); ctrlc::set_handler(move || { r.store(true, Ordering::SeqCst); }) .expect("Error setting Ctrl-C handler"); let mut first_time = true; let mut restart = true; while restart { info!("connecting to endpoint: {}", self.endpoint); if first_time { first_time = false; } else { self.conn = ZmqMessageConnection::new(&self.endpoint); } let (mut sender, receiver) = self.conn.create(); if unregister.load(Ordering::SeqCst) { self.unregister(&sender); restart = false; continue; } if !self.register(&sender, &unregister.clone()) { continue; } loop { if unregister.load(Ordering::SeqCst) { self.unregister(&sender); restart = false; break; } match receiver.recv_timeout(Duration::from_millis(1000)) { Ok(r) => { let message = match r { Ok(message) => message, Err(ReceiveError::DisconnectedError) => { info!("Trying to Reconnect"); break; } Err(err) => { error!("Error: {}", err); continue; } }; trace!("Message: {}", message.get_correlation_id()); match message.get_message_type() { Message_MessageType::TP_PROCESS_REQUEST => { let request = match TpProcessRequest::parse_from_bytes( &message.get_content(), ) { Ok(request) => request, Err(err) => { error!("Cannot parse TpProcessRequest: {}", err); continue; } }; let mut context = ZmqTransactionContext::new( request.get_context_id(), sender.clone(), ); let mut response = TpProcessResponse::new(); match self.handlers[0].apply(&request, &mut context) { Ok(()) => { info!("TP_PROCESS_REQUEST sending TpProcessResponse: OK"); response.set_status(TpProcessResponse_Status::OK); } Err(ApplyError::InvalidTransaction(msg)) => { info!( "TP_PROCESS_REQUEST sending TpProcessResponse: {}", &msg ); response.set_status( TpProcessResponse_Status::INVALID_TRANSACTION, ); response.set_message(msg); } Err(err) => { info!( "TP_PROCESS_REQUEST sending TpProcessResponse: {}", err ); response .set_status(TpProcessResponse_Status::INTERNAL_ERROR); response.set_message(err.to_string()); } }; let serialized = match response.write_to_bytes() { Ok(serialized) => serialized, Err(err) => { error!("Serialization failed: {}", err); continue; } }; match sender.reply( Message_MessageType::TP_PROCESS_RESPONSE, message.get_correlation_id(), &serialized, ) { Ok(_) => (), Err(SendError::DisconnectedError) => { error!("DisconnectedError"); break; } Err(SendError::TimeoutError) => error!("TimeoutError"), Err(SendError::UnknownError(e)) => { restart = false; error!("UnknownError: {}", e); break; } }; } Message_MessageType::PING_REQUEST => { trace!("sending PingResponse"); let response = PingResponse::new(); let serialized = match response.write_to_bytes() { Ok(serialized) => serialized, Err(err) => { error!("Serialization failed: {}", err); continue; } }; match sender.reply( Message_MessageType::PING_RESPONSE, message.get_correlation_id(), &serialized, ) { Ok(_) => (), Err(SendError::DisconnectedError) => { error!("DisconnectedError"); break; } Err(SendError::TimeoutError) => error!("TimeoutError"), Err(SendError::UnknownError(e)) => { restart = false; error!("UnknownError: {}", e); break; } }; } _ => { info!( "Transaction Processor recieved invalid message type: {:?}", message.get_message_type() ); } } } Err(RecvTimeoutError::Timeout) => (), Err(err) => { error!("Error: {}", err); } } } sender.close(); } } }
/* * Copyright 2017 Bitwise IO, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ----------------------------------------------------------------------------- */ #![allow(unknown_lints)] extern crate ctrlc; extern crate protobuf; extern crate rand; extern crate zmq; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::mpsc::RecvTimeoutError; use std::sync::Arc; use std::time::Duration; use self::rand::Rng; pub mod handler; mod zmq_context; use crate::messages::network::PingResponse; use crate::messages::processor::TpProcessRequest; use crate::messages::processor::TpProcessResponse; use crate::messages::processor::TpProcessResponse_Status; use crate::messages::processor::TpRegisterRequest; use crate::messages::processor::TpUnregisterRequest; use crate::messages::validator::Message_MessageType; use crate::messaging::stream::MessageSender; use crate::messaging::stream::ReceiveError; use crate::messaging::stream::SendError; use crate::messaging::stream::{MessageConnection, MessageReceiver}; use crate::messaging::zmq_stream::ZmqMessageConnection; use crate::messaging::zmq_stream::ZmqMessageSender; use protobuf::Message as M; use protobuf::RepeatedField; use rand::distributions::Alphanumeric; use self::handler::TransactionHandler; use self::handler::{ApplyError, TransactionContext}; use self::zmq_context::ZmqTransactionContext; fn generate_correlation_id() -> String { const LENGTH: usize = 16; rand::thread_rng() .sample_iter(Alphanumeric) .take(LENGTH) .map(char::from) .collect() } pub struct EmptyTransactionContext { inner: Arc<InnerEmptyContext>, } impl Clone for EmptyTransactionContext { fn clone(&self) -> Self { Self { inner: Arc::clone(&self.inner), } } } struct InnerEmptyContext { context: Box<dyn TransactionContext + Send + Sync>, _sender: ZmqMessageSender, _receiver: std::sync::Mutex<MessageReceiver>, } impl EmptyTransactionContext { fn new(conn: &ZmqMessageConnection, timeout: Option<Duration>) -> Self { let (_sender, _receiver) = conn.create(); Self { inner: Arc::new(InnerEmptyContext { context: Box::new(ZmqTransactionContext::with_timeout( "", _sender.clone(), timeout, )), _receiver: std::sync::Mutex::new(_receiver), _sender, }), } } pub fn flush(&self) { if let Ok(rx) = self.inner._receiver.try_lock() { if let Ok(Ok(msg)) = rx.recv_timeout(Duration::from_millis(100)) { log::info!("Empty context received message : {:?}", msg); } } } } impl TransactionContext for EmptyTransactionContext { fn get_state_entries( &self, _addresses: &[String], ) -> Result<Vec<(String, Vec<u8>)>, handler::ContextError> { panic!("unsupported for an empty context") } fn set_state_entries( &self, _entries: Vec<(String, Vec<u8>)>, ) -> Result<(), handler::ContextError> { panic!("unsupported for an empty context") } fn delete_state_entries( &self, _addresses: &[String], ) -> Result<Vec<String>, handler::ContextError> { panic!("unsupported for an empty context") } fn add_receipt_data(&self, _data: &[u8]) -> Result<(), handler::ContextError> { panic!("unsupported for an empty context") } fn add_event( &self, _event_type: String, _attributes: Vec<(String, String)>, _data: &[u8], ) -> Result<(), handler::ContextError> { panic!("unsupported for an empty context") } fn get_sig_by_num(&self, block_num: u64) -> Result<String, handler::ContextError> { self.inner.context.get_sig_by_num(block_num) } fn get_reward_block_signatures( &self, block_id: &str, first_pred: u64, last_pred: u64, ) -> Result<Vec<String>, handler::ContextError> { self.inner .context .get_reward_block_signatures(block_id, first_pred, last_pred) } fn get_state_entries_by_prefix( &self, tip_id: &str, address: &str, ) -> Result<Vec<(String, Vec<u8>)>, handler::ContextError> { self.inner .context .get_state_entries_by_prefix(tip_id, address) } } pub struct TransactionProcessor<'a> { endpoint: String, conn: ZmqMessageConnection, handlers: Vec<&'a dyn TransactionHandler>, empty_contexts: Vec<EmptyTransactionContext>, } impl<'a> TransactionProcessor<'a> { pub fn new(endpoint: &str) -> TransactionProcessor { TransactionProcessor { endpoint: String::from(endpoint), conn: ZmqMessageConnection::new(endpoint), handlers: Vec::new(), empty_contexts: Vec::new(), } } pub fn add_handler(&mut self, handler: &'a dyn TransactionHandler) { self.handlers.push(handler); } pub fn empty_context(&mut self, timeout: Option<Duration>) -> EmptyTransactionContext { let context = EmptyTransactionContext::new(&self.conn, timeout); let context_cp = context.clone(); self.empty_contexts.push(context); context_cp }
fn unregister(&mut self, sender: &ZmqMessageSender) { let request = TpUnregisterRequest::new(); info!("sending TpUnregisterRequest"); let serialized = match request.write_to_bytes() { Ok(serialized) => serialized, Err(err) => { error!("Serialization failed: {}", err); return; } }; let x: &[u8] = &serialized; let mut future = match sender.send( Message_MessageType::TP_UNREGISTER_REQUEST, &generate_correlation_id(), x, ) { Ok(fut) => fut, Err(err) => { error!("Unregistration failed: {}", err); return; } }; match future.get_timeout(Duration::from_millis(1000)) { Ok(_) => (), Err(err) => { info!("Unregistration failed: {}", err); } }; } #[allow(clippy::cognitive_complexity)] pub fn start(&mut self) { let unregister = Arc::new(AtomicBool::new(false)); let r = unregister.clone(); ctrlc::set_handler(move || { r.store(true, Ordering::SeqCst); }) .expect("Error setting Ctrl-C handler"); let mut first_time = true; let mut restart = true; while restart { info!("connecting to endpoint: {}", self.endpoint); if first_time { first_time = false; } else { self.conn = ZmqMessageConnection::new(&self.endpoint); } let (mut sender, receiver) = self.conn.create(); if unregister.load(Ordering::SeqCst) { self.unregister(&sender); restart = false; continue; } if !self.register(&sender, &unregister.clone()) { continue; } loop { if unregister.load(Ordering::SeqCst) { self.unregister(&sender); restart = false; break; } match receiver.recv_timeout(Duration::from_millis(1000)) { Ok(r) => { let message = match r { Ok(message) => message, Err(ReceiveError::DisconnectedError) => { info!("Trying to Reconnect"); break; } Err(err) => { error!("Error: {}", err); continue; } }; trace!("Message: {}", message.get_correlation_id()); match message.get_message_type() { Message_MessageType::TP_PROCESS_REQUEST => { let request = match TpProcessRequest::parse_from_bytes( &message.get_content(), ) { Ok(request) => request, Err(err) => { error!("Cannot parse TpProcessRequest: {}", err); continue; } }; let mut context = ZmqTransactionContext::new( request.get_context_id(), sender.clone(), ); let mut response = TpProcessResponse::new(); match self.handlers[0].apply(&request, &mut context) { Ok(()) => { info!("TP_PROCESS_REQUEST sending TpProcessResponse: OK"); response.set_status(TpProcessResponse_Status::OK); } Err(ApplyError::InvalidTransaction(msg)) => { info!( "TP_PROCESS_REQUEST sending TpProcessResponse: {}", &msg ); response.set_status( TpProcessResponse_Status::INVALID_TRANSACTION, ); response.set_message(msg); } Err(err) => { info!( "TP_PROCESS_REQUEST sending TpProcessResponse: {}", err ); response .set_status(TpProcessResponse_Status::INTERNAL_ERROR); response.set_message(err.to_string()); } }; let serialized = match response.write_to_bytes() { Ok(serialized) => serialized, Err(err) => { error!("Serialization failed: {}", err); continue; } }; match sender.reply( Message_MessageType::TP_PROCESS_RESPONSE, message.get_correlation_id(), &serialized, ) { Ok(_) => (), Err(SendError::DisconnectedError) => { error!("DisconnectedError"); break; } Err(SendError::TimeoutError) => error!("TimeoutError"), Err(SendError::UnknownError(e)) => { restart = false; error!("UnknownError: {}", e); break; } }; } Message_MessageType::PING_REQUEST => { trace!("sending PingResponse"); let response = PingResponse::new(); let serialized = match response.write_to_bytes() { Ok(serialized) => serialized, Err(err) => { error!("Serialization failed: {}", err); continue; } }; match sender.reply( Message_MessageType::PING_RESPONSE, message.get_correlation_id(), &serialized, ) { Ok(_) => (), Err(SendError::DisconnectedError) => { error!("DisconnectedError"); break; } Err(SendError::TimeoutError) => error!("TimeoutError"), Err(SendError::UnknownError(e)) => { restart = false; error!("UnknownError: {}", e); break; } }; } _ => { info!( "Transaction Processor recieved invalid message type: {:?}", message.get_message_type() ); } } } Err(RecvTimeoutError::Timeout) => (), Err(err) => { error!("Error: {}", err); } } } sender.close(); } } }
fn register(&mut self, sender: &ZmqMessageSender, unregister: &Arc<AtomicBool>) -> bool { for handler in &self.handlers { for version in handler.family_versions() { let mut request = TpRegisterRequest::new(); request.set_family(handler.family_name().clone()); request.set_version(version.clone()); request.set_namespaces(RepeatedField::from_vec(handler.namespaces().clone())); info!( "sending TpRegisterRequest: {} {}", &handler.family_name(), &version ); let serialized = match request.write_to_bytes() { Ok(serialized) => serialized, Err(err) => { error!("Serialization failed: {}", err); return false; } }; let x: &[u8] = &serialized; let mut future = match sender.send( Message_MessageType::TP_REGISTER_REQUEST, &generate_correlation_id(), x, ) { Ok(fut) => fut, Err(err) => { error!("Registration failed: {}", err); return false; } }; loop { match future.get_timeout(Duration::from_millis(10000)) { Ok(_) => break, Err(_) => { if unregister.load(Ordering::SeqCst) { return false; } } }; } } } true }
function_block-full_function
[ { "content": "pub fn create_context(algorithm_name: &str) -> Result<Box<dyn Context>, Error> {\n\n match algorithm_name {\n\n \"secp256k1\" => Ok(Box::new(secp256k1::Secp256k1Context::new())),\n\n _ => Err(Error::NoSuchAlgorithm(format!(\n\n \"no such algorithm: {}\",\n\n algorithm_name\n\n ))),\n\n }\n\n}\n\n/// Factory for generating signers.\n\npub struct CryptoFactory<'a> {\n\n context: &'a dyn Context,\n\n}\n\n\n\nimpl<'a> CryptoFactory<'a> {\n\n /// Constructs a CryptoFactory.\n\n /// # Arguments\n\n ///\n\n /// * `context` - a cryptographic context\n\n pub fn new(context: &'a dyn Context) -> Self {\n", "file_path": "src/signing/mod.rs", "rank": 0, "score": 239241.6055747314 }, { "content": "fn bytes_to_hex_str(b: &[u8]) -> String {\n\n b.iter()\n\n .map(|b| format!(\"{:02x}\", b))\n\n .collect::<Vec<_>>()\n\n .join(\"\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod signing_test {\n\n use super::create_context;\n\n\n\n #[test]\n\n fn no_such_algorithm() {\n\n let result = create_context(\"invalid\");\n\n assert!(result.is_err())\n\n }\n\n}\n", "file_path": "src/signing/mod.rs", "rank": 2, "score": 192853.45910358685 }, { "content": "fn hex_str_to_bytes(s: &str) -> Result<Vec<u8>, Error> {\n\n for (i, ch) in s.chars().enumerate() {\n\n if !ch.is_digit(16) {\n\n return Err(Error::ParseError(format!(\n\n \"invalid character position {}\",\n\n i\n\n )));\n\n }\n\n }\n\n\n\n let input: Vec<_> = s.chars().collect();\n\n\n\n let decoded: Vec<u8> = input\n\n .chunks(2)\n\n .map(|chunk| {\n\n ((chunk[0].to_digit(16).unwrap() << 4) | (chunk[1].to_digit(16).unwrap())) as u8\n\n })\n\n .collect();\n\n\n\n Ok(decoded)\n\n}\n\n\n", "file_path": "src/signing/mod.rs", "rank": 3, "score": 189154.41178236937 }, { "content": "/// Internal stream, guarding a zmq socket.\n\nstruct SendReceiveStream {\n\n address: String,\n\n socket: zmq::Socket,\n\n outbound_recv: Receiver<SocketCommand>,\n\n inbound_router: InboundRouter,\n\n monitor_socket: zmq::Socket,\n\n}\n\n\n\nconst POLL_TIMEOUT: i64 = 10;\n\n\n\nimpl SendReceiveStream {\n\n fn new(\n\n context: &zmq::Context,\n\n address: &str,\n\n outbound_recv: Receiver<SocketCommand>,\n\n inbound_router: InboundRouter,\n\n ) -> Self {\n\n let socket = context.socket(zmq::DEALER).unwrap();\n\n socket\n\n .set_linger(0)\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 4, "score": 175067.0623489542 }, { "content": "pub fn get_xo_prefix() -> String {\n\n let mut sha = Sha512::new();\n\n sha.input_str(\"xo\");\n\n sha.result_str()[..6].to_string()\n\n}\n\n\n\npub struct XoState<'a> {\n\n context: &'a mut dyn TransactionContext,\n\n address_map: HashMap<String, Option<String>>,\n\n}\n\n\n\nimpl<'a> XoState<'a> {\n\n pub fn new(context: &'a mut dyn TransactionContext) -> XoState {\n\n XoState {\n\n context,\n\n address_map: HashMap::new(),\n\n }\n\n }\n\n\n\n fn calculate_address(name: &str) -> String {\n", "file_path": "examples/xo_rust/src/handler/state.rs", "rank": 5, "score": 157091.7574653392 }, { "content": "// Sabre apply must return a bool\n\npub fn apply(\n\n request: &TpProcessRequest,\n\n context: &mut dyn TransactionContext,\n\n) -> Result<bool, ApplyError> {\n\n let handler = XoTransactionHandler::new();\n\n match handler.apply(request, context) {\n\n Ok(_) => Ok(true),\n\n Err(err) => {\n\n info!(\"{}\", err);\n\n Err(err)\n\n }\n\n }\n\n}\n", "file_path": "examples/xo_rust/src/handler/mod.rs", "rank": 6, "score": 155233.13784594403 }, { "content": "/// Generates a random correlation id for use in Message\n\nfn generate_correlation_id() -> String {\n\n const LENGTH: usize = 16;\n\n rand::thread_rng()\n\n .sample_iter(Alphanumeric)\n\n .take(LENGTH)\n\n .map(char::from)\n\n .collect()\n\n}\n\n\n\npub struct ZmqDriver {\n\n stop_receiver: Receiver<()>,\n\n}\n\n\n\nimpl ZmqDriver {\n\n /// Create a new ZMQ-based Consensus Engine driver and a handle for stopping it\n\n pub fn new() -> (Self, Stop) {\n\n let (stop_sender, stop_receiver) = channel();\n\n let stop = Stop {\n\n sender: stop_sender,\n\n };\n", "file_path": "src/consensus/zmq_driver.rs", "rank": 8, "score": 145849.45762018856 }, { "content": "/// Generates a random correlation id for use in Message\n\nfn generate_correlation_id() -> String {\n\n const LENGTH: usize = 16;\n\n rand::thread_rng()\n\n .sample_iter(Alphanumeric)\n\n .take(LENGTH)\n\n .map(char::from)\n\n .collect()\n\n}\n\n\n\npub struct ZmqService {\n\n sender: ZmqMessageSender,\n\n timeout: Duration,\n\n}\n\n\n\nimpl ZmqService {\n\n pub fn new(sender: ZmqMessageSender, timeout: Duration) -> Self {\n\n ZmqService { sender, timeout }\n\n }\n\n\n\n /// Serialize and send a request, wait for the default timeout, and receive and parse an\n", "file_path": "src/consensus/zmq_service.rs", "rank": 9, "score": 145849.45762018856 }, { "content": "/// A context for a cryptographic signing algorithm.\n\npub trait Context {\n\n /// Returns the algorithm name.\n\n fn get_algorithm_name(&self) -> &str;\n\n /// Sign a message\n\n /// Given a private key for this algorithm, sign the given message bytes\n\n /// and return a hex-encoded string of the resulting signature.\n\n /// # Arguments\n\n ///\n\n /// * `message`- the message bytes\n\n /// * `private_key` the private key\n\n ///\n\n /// # Returns\n\n ///\n\n /// * `signature` - The signature in a hex-encoded string\n\n fn sign(&self, message: &[u8], key: &dyn PrivateKey) -> Result<String, Error>;\n\n\n\n /// Verifies that the signature of a message was produced with the\n\n /// associated public key.\n\n /// # Arguments\n\n ///\n", "file_path": "src/signing/mod.rs", "rank": 10, "score": 137362.67632709385 }, { "content": "pub trait TransactionContext {\n\n #[deprecated(\n\n since = \"0.3.0\",\n\n note = \"please use `get_state_entry` or `get_state_entries` instead\"\n\n )]\n\n /// get_state queries the validator state for data at each of the\n\n /// addresses in the given list. The addresses that have been set\n\n /// are returned. get_state is deprecated, please use get_state_entry or get_state_entries\n\n /// instead\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `addresses` - the addresses to fetch\n\n fn get_state(&self, addresses: &[String]) -> Result<Vec<(String, Vec<u8>)>, ContextError> {\n\n self.get_state_entries(addresses)\n\n }\n\n\n\n /// get_state_entry queries the validator state for data at the\n\n /// address given. If the address is set, the data is returned.\n\n ///\n", "file_path": "src/processor/handler.rs", "rank": 11, "score": 133412.46157536452 }, { "content": "fn glob_simple(pattern: &str) -> Vec<String> {\n\n glob::glob(pattern)\n\n .expect(\"glob\")\n\n .map(|g| {\n\n g.expect(\"item\")\n\n .as_path()\n\n .to_str()\n\n .expect(\"utf-8\")\n\n .to_owned()\n\n })\n\n .filter(|p| {\n\n if cfg!(feature = \"old-sawtooth\") {\n\n !p.contains(\"processor.proto\")\n\n } else {\n\n !p.contains(\"processor_old.proto\")\n\n }\n\n })\n\n .collect()\n\n}\n", "file_path": "build.rs", "rank": 12, "score": 132588.16416154732 }, { "content": "pub fn register(\n\n sender: &mut dyn MessageSender,\n\n timeout: Duration,\n\n name: String,\n\n version: String,\n\n additional_protocols: Vec<(String, String)>,\n\n) -> Result<Option<StartupState>, Error> {\n\n let mut request = ConsensusRegisterRequest::new();\n\n request.set_name(name);\n\n request.set_version(version);\n\n request.set_additional_protocols(RepeatedField::from(protocols_from_tuples(\n\n additional_protocols,\n\n )));\n\n let request = request.write_to_bytes()?;\n\n\n\n let mut msg = sender\n\n .send(\n\n Message_MessageType::CONSENSUS_REGISTER_REQUEST,\n\n &generate_correlation_id(),\n\n &request,\n", "file_path": "src/consensus/zmq_driver.rs", "rank": 13, "score": 132055.83241241457 }, { "content": "fn get_intkey_prefix() -> String {\n\n let mut sha = Sha512::new();\n\n sha.input_str(\"intkey\");\n\n sha.result_str()[..6].to_string()\n\n}\n\n\n", "file_path": "examples/intkey_rust/src/handler.rs", "rank": 14, "score": 123629.52372704863 }, { "content": "#[derive(Clone)]\n\nstruct InboundRouter {\n\n inbound_tx: SyncSender<MessageResult>,\n\n expected_replies: Arc<Mutex<HashMap<String, Sender<MessageResult>>>>,\n\n}\n\n\n\nimpl InboundRouter {\n\n fn new(inbound_tx: SyncSender<MessageResult>) -> Self {\n\n InboundRouter {\n\n inbound_tx,\n\n expected_replies: Arc::new(Mutex::new(HashMap::new())),\n\n }\n\n }\n\n fn route(&mut self, message_result: MessageResult) {\n\n match message_result {\n\n Ok(message) => {\n\n let mut expected_replies = self.expected_replies.lock().unwrap();\n\n match expected_replies.remove(message.get_correlation_id()) {\n\n Some(sender) => sender.send(Ok(message)).expect(\"Unable to route reply\"),\n\n None => self\n\n .inbound_tx\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 15, "score": 116862.33500581692 }, { "content": "fn send_ping_reply(\n\n validator_sender: &mut dyn MessageSender,\n\n correlation_id: &str,\n\n) -> Result<(), Error> {\n\n trace!(\"sending PingResponse\");\n\n validator_sender.reply(\n\n Message_MessageType::PING_RESPONSE,\n\n correlation_id,\n\n &PingResponse::new().write_to_bytes()?,\n\n )?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/consensus/zmq_driver.rs", "rank": 16, "score": 112695.84047220922 }, { "content": "fn from_consensus_peer_message(\n\n mut c_msg: ConsensusPeerMessage,\n\n mut c_msg_header: ConsensusPeerMessageHeader,\n\n) -> PeerMessage {\n\n PeerMessage {\n\n header: PeerMessageHeader {\n\n signer_id: c_msg_header.take_signer_id(),\n\n content_sha512: c_msg_header.take_content_sha512(),\n\n message_type: c_msg_header.take_message_type(),\n\n name: c_msg_header.take_name(),\n\n version: c_msg_header.take_version(),\n\n },\n\n header_bytes: c_msg.take_header(),\n\n header_signature: c_msg.take_header_signature(),\n\n content: c_msg.take_content(),\n\n }\n\n}\n\n\n\nimpl From<ProtobufError> for Error {\n\n fn from(error: ProtobufError) -> Error {\n", "file_path": "src/consensus/zmq_driver.rs", "rank": 17, "score": 112362.46184604635 }, { "content": "/// A Message Sender\n\n///\n\n/// A message\n\npub trait MessageSender {\n\n fn send(\n\n &self,\n\n destination: Message_MessageType,\n\n correlation_id: &str,\n\n contents: &[u8],\n\n ) -> Result<MessageFuture, SendError>;\n\n\n\n fn reply(\n\n &self,\n\n destination: Message_MessageType,\n\n correlation_id: &str,\n\n contents: &[u8],\n\n ) -> Result<(), SendError>;\n\n\n\n fn close(&mut self);\n\n}\n\n\n\n/// Result for a message received.\n\npub type MessageResult = Result<Message, ReceiveError>;\n\n\n\n/// A message Receiver\n\npub type MessageReceiver = Receiver<MessageResult>;\n\n\n", "file_path": "src/messaging/stream.rs", "rank": 18, "score": 92080.36448821436 }, { "content": "pub trait TransactionHandler {\n\n /// TransactionHandler that defines the business logic for a new transaction family.\n\n /// The family_name, family_versions, and namespaces functions are\n\n /// used by the processor to route processing requests to the handler.\n\n\n\n /// family_name should return the name of the transaction family that this\n\n /// handler can process, e.g. \"intkey\"\n\n fn family_name(&self) -> String;\n\n\n\n /// family_versions should return a list of versions this transaction\n\n /// family handler can process, e.g. [\"1.0\"]\n\n fn family_versions(&self) -> Vec<String>;\n\n\n\n /// namespaces should return a list containing all the handler's\n\n /// namespaces, e.g. [\"abcdef\"]\n\n fn namespaces(&self) -> Vec<String>;\n\n\n\n /// Apply is the single method where all the business logic for a\n\n /// transaction family is defined. The method will be called by the\n\n /// transaction processor upon receiving a TpProcessRequest that the\n\n /// handler understands and will pass in the TpProcessRequest and an\n\n /// initialized instance of the Context type.\n\n fn apply(\n\n &self,\n\n request: &TpProcessRequest,\n\n context: &mut dyn TransactionContext,\n\n ) -> Result<(), ApplyError>;\n\n}\n", "file_path": "src/processor/handler.rs", "rank": 19, "score": 91134.04065482419 }, { "content": "/// A Message Connection\n\n///\n\n/// This denotes a connection which can create a MessageSender/Receiver pair.\n\npub trait MessageConnection<MS: MessageSender> {\n\n fn create(&self) -> (MS, MessageReceiver);\n\n}\n\n\n\n/// Errors that occur on sending a message.\n\n#[derive(Debug)]\n\npub enum SendError {\n\n DisconnectedError,\n\n TimeoutError,\n\n UnknownError(String),\n\n}\n\n\n\nimpl std::error::Error for SendError {}\n\n\n\nimpl std::fmt::Display for SendError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match *self {\n\n SendError::DisconnectedError => write!(f, \"DisconnectedError\"),\n\n SendError::TimeoutError => write!(f, \"TimeoutError\"),\n\n SendError::UnknownError(ref e) => write!(f, \"UnknownError: {}\", e),\n", "file_path": "src/messaging/stream.rs", "rank": 20, "score": 86681.33470160131 }, { "content": "fn handle_update(\n\n msg: &Message,\n\n validator_sender: &mut dyn MessageSender,\n\n update_sender: &mut Sender<Update>,\n\n) -> Result<(), Error> {\n\n use self::Message_MessageType::*;\n\n\n\n let update = match msg.get_message_type() {\n\n CONSENSUS_NOTIFY_PEER_CONNECTED => {\n\n let mut request = ConsensusNotifyPeerConnected::parse_from_bytes(msg.get_content())?;\n\n Update::PeerConnected(request.take_peer_info().into())\n\n }\n\n CONSENSUS_NOTIFY_PEER_DISCONNECTED => {\n\n let mut request = ConsensusNotifyPeerDisconnected::parse_from_bytes(msg.get_content())?;\n\n Update::PeerDisconnected(request.take_peer_id())\n\n }\n\n CONSENSUS_NOTIFY_PEER_MESSAGE => {\n\n let mut request = ConsensusNotifyPeerMessage::parse_from_bytes(msg.get_content())?;\n\n let header =\n\n ConsensusPeerMessageHeader::parse_from_bytes(request.get_message().get_header())?;\n", "file_path": "src/consensus/zmq_driver.rs", "rank": 21, "score": 84165.78781700066 }, { "content": "fn driver_loop(\n\n mut update_sender: Sender<Update>,\n\n stop_receiver: &Receiver<()>,\n\n mut validator_sender: ZmqMessageSender,\n\n validator_receiver: &Receiver<Result<Message, ReceiveError>>,\n\n) -> Result<(), Error> {\n\n loop {\n\n match validator_receiver.recv_timeout(Duration::from_millis(100)) {\n\n Err(RecvTimeoutError::Timeout) => {\n\n if stop_receiver.try_recv().is_ok() {\n\n update_sender.send(Update::Shutdown)?;\n\n break Ok(());\n\n }\n\n }\n\n Err(RecvTimeoutError::Disconnected) => {\n\n break Err(Error::ReceiveError(\"Sender disconnected\".into()));\n\n }\n\n Ok(Err(err)) => {\n\n break Err(Error::ReceiveError(format!(\n\n \"Unexpected error while receiving: {}\",\n", "file_path": "src/consensus/zmq_driver.rs", "rank": 22, "score": 84165.78781700066 }, { "content": "fn wait_until_active(\n\n validator_sender: &ZmqMessageSender,\n\n validator_receiver: &Receiver<Result<Message, ReceiveError>>,\n\n) -> Result<StartupState, Error> {\n\n use self::Message_MessageType::*;\n\n\n\n let ret: Result<StartupState, Error>;\n\n\n\n loop {\n\n match validator_receiver.recv_timeout(Duration::from_millis(100)) {\n\n Err(RecvTimeoutError::Timeout) => {}\n\n Err(RecvTimeoutError::Disconnected) => {\n\n ret = Err(Error::ReceiveError(\"Sender disconnected\".into()));\n\n break;\n\n }\n\n Ok(Err(err)) => {\n\n ret = Err(Error::ReceiveError(format!(\n\n \"Unexpected error while receiving: {}\",\n\n err\n\n )));\n", "file_path": "src/consensus/zmq_driver.rs", "rank": 23, "score": 84165.78781700066 }, { "content": "fn protocols_from_tuples(\n\n protocols: Vec<(String, String)>,\n\n) -> Vec<ConsensusRegisterRequest_Protocol> {\n\n protocols\n\n .iter()\n\n .map(|(p_name, p_version)| {\n\n let mut protocol = ConsensusRegisterRequest_Protocol::new();\n\n protocol.set_name(p_name.to_string());\n\n protocol.set_version(p_version.to_string());\n\n protocol\n\n })\n\n .collect::<Vec<_>>()\n\n}\n\n\n\nimpl From<ConsensusBlock> for Block {\n\n fn from(mut c_block: ConsensusBlock) -> Block {\n\n Block {\n\n block_id: c_block.take_block_id(),\n\n previous_id: c_block.take_previous_id(),\n\n signer_id: c_block.take_signer_id(),\n", "file_path": "src/consensus/zmq_driver.rs", "rank": 24, "score": 84165.78781700066 }, { "content": "struct IntkeyPayload {\n\n verb: Verb,\n\n name: String,\n\n value: u32,\n\n}\n\n\n\nimpl IntkeyPayload {\n\n pub fn new(payload_data: &[u8]) -> Result<Option<IntkeyPayload>, ApplyError> {\n\n let input = Cursor::new(payload_data);\n\n\n\n let mut decoder = cbor::GenericDecoder::new(cbor::Config::default(), input);\n\n let decoder_value = decoder\n\n .value()\n\n .map_err(|err| ApplyError::InternalError(format!(\"{}\", err)))?;\n\n\n\n let c = cbor::value::Cursor::new(&decoder_value);\n\n\n\n let verb_raw: String = match c.field(\"Verb\").text_plain() {\n\n None => {\n\n return Err(ApplyError::InvalidTransaction(String::from(\n", "file_path": "examples/intkey_rust/src/handler.rs", "rank": 25, "score": 83735.55175055402 }, { "content": "/// A private key instance.\n\n/// The underlying content is dependent on implementation.\n\npub trait PrivateKey {\n\n /// Returns the algorithm name used for this private key.\n\n fn get_algorithm_name(&self) -> &str;\n\n /// Return the private key encoded as a hex string.\n\n fn as_hex(&self) -> String;\n\n /// Return the private key bytes.\n\n fn as_slice(&self) -> &[u8];\n\n}\n\n\n", "file_path": "src/signing/mod.rs", "rank": 26, "score": 82366.00227799089 }, { "content": "/// A public key instance.\n\n/// The underlying content is dependent on implementation.\n\npub trait PublicKey {\n\n /// Returns the algorithm name used for this public key.\n\n fn get_algorithm_name(&self) -> &str;\n\n /// Return the public key encoded as a hex string.\n\n fn as_hex(&self) -> String;\n\n /// Return the public key bytes.\n\n fn as_slice(&self) -> &[u8];\n\n}\n\n\n", "file_path": "src/signing/mod.rs", "rank": 27, "score": 82366.00227799089 }, { "content": "/*\n\n * Copyright 2017 Intel Corporation\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * ------------------------------------------------------------------------------\n\n */\n\npub mod stream;\n\npub mod zmq_stream;\n", "file_path": "src/messaging/mod.rs", "rank": 28, "score": 70574.62154678942 }, { "content": "/*\n\n * Copyright 2017 Intel Corporation\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\nuse std::collections::HashMap;\n\nuse std::sync::mpsc::{channel, sync_channel, Receiver, RecvTimeoutError, Sender, SyncSender};\n\nuse std::sync::{Arc, Mutex};\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 29, "score": 66678.12854680535 }, { "content": "use std::thread;\n\nuse std::time::Duration;\n\n\n\nuse crate::messages::validator::Message;\n\nuse crate::messages::validator::Message_MessageType;\n\n\n\nuse crate::messaging::stream::*;\n\n\n\n/// A MessageConnection over ZMQ sockets\n\npub struct ZmqMessageConnection {\n\n address: String,\n\n context: zmq::Context,\n\n}\n\n\n\nconst CHANNEL_BUFFER_SIZE: usize = 128;\n\n\n\nimpl ZmqMessageConnection {\n\n /// Create a new ZmqMessageConnection\n\n pub fn new(address: &str) -> Self {\n\n ZmqMessageConnection {\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 30, "score": 66638.33740823677 }, { "content": " }\n\n }\n\n\n\n /// Start the message stream instance\n\n fn start(&mut self) {\n\n let (outbound_send, outbound_recv) = sync_channel(CHANNEL_BUFFER_SIZE);\n\n self.outbound_sender = Some(outbound_send);\n\n\n\n let ctx = self.context.clone();\n\n let address = self.address.clone();\n\n let inbound_router = self.inbound_router.clone();\n\n thread::spawn(move || {\n\n let mut inner_stream =\n\n SendReceiveStream::new(&ctx, &address, outbound_recv, inbound_router);\n\n inner_stream.run();\n\n });\n\n }\n\n}\n\n\n\nimpl MessageSender for ZmqMessageSender {\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 31, "score": 66630.12700609282 }, { "content": " address: String::from(address),\n\n context: zmq::Context::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl MessageConnection<ZmqMessageSender> for ZmqMessageConnection {\n\n fn create(&self) -> (ZmqMessageSender, MessageReceiver) {\n\n // Create the channel for request messages (i.e. non-reply messages)\n\n let (request_tx, request_rx) = sync_channel(CHANNEL_BUFFER_SIZE);\n\n let router = InboundRouter::new(request_tx);\n\n let mut sender = ZmqMessageSender::new(self.context.clone(), self.address.clone(), router);\n\n\n\n sender.start();\n\n\n\n (sender, request_rx)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 32, "score": 66627.88872119608 }, { "content": " .recv_timeout(Duration::from_millis(POLL_TIMEOUT as u64))\n\n {\n\n Ok(SocketCommand::Send(msg)) => {\n\n let message_bytes = protobuf::Message::write_to_bytes(&msg).unwrap();\n\n trace!(\"Sending {} bytes\", message_bytes.len());\n\n self.socket.send(&message_bytes, 0).unwrap();\n\n }\n\n Ok(SocketCommand::Shutdown) => {\n\n trace!(\"Shutdown Signal Received\");\n\n self.inbound_router\n\n .route(Err(ReceiveError::DisconnectedError));\n\n break;\n\n }\n\n Err(RecvTimeoutError::Disconnected) => {\n\n debug!(\"Disconnected outbound channel\");\n\n self.inbound_router\n\n .route(Err(ReceiveError::DisconnectedError));\n\n break;\n\n }\n\n _ => continue,\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 33, "score": 66624.14362812189 }, { "content": " Err(e) => Err(SendError::UnknownError(e.to_string())),\n\n }\n\n } else {\n\n Err(SendError::DisconnectedError)\n\n }\n\n }\n\n\n\n fn reply(\n\n &self,\n\n destination: Message_MessageType,\n\n correlation_id: &str,\n\n contents: &[u8],\n\n ) -> Result<(), SendError> {\n\n if let Some(ref sender) = self.outbound_sender {\n\n let mut msg = Message::new();\n\n msg.set_message_type(destination);\n\n msg.set_correlation_id(String::from(correlation_id));\n\n msg.set_content(Vec::from(contents));\n\n\n\n match sender.send(SocketCommand::Send(msg)) {\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 34, "score": 66622.25691192292 }, { "content": " fn send(\n\n &self,\n\n destination: Message_MessageType,\n\n correlation_id: &str,\n\n contents: &[u8],\n\n ) -> Result<MessageFuture, SendError> {\n\n if let Some(ref sender) = self.outbound_sender {\n\n let mut msg = Message::new();\n\n\n\n msg.set_message_type(destination);\n\n msg.set_correlation_id(String::from(correlation_id));\n\n msg.set_content(Vec::from(contents));\n\n\n\n let future = MessageFuture::new(\n\n self.inbound_router\n\n .expect_reply(String::from(correlation_id)),\n\n );\n\n\n\n match sender.send(SocketCommand::Send(msg)) {\n\n Ok(_) => Ok(future),\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 35, "score": 66621.97425159674 }, { "content": " if let Some(received_bytes) = received_parts.pop() {\n\n trace!(\"Received {} bytes\", received_bytes.len());\n\n if !received_bytes.is_empty() {\n\n let message = protobuf::Message::parse_from_bytes(&received_bytes).unwrap();\n\n self.inbound_router.route(Ok(message));\n\n }\n\n } else {\n\n debug!(\"Empty frame received.\");\n\n }\n\n }\n\n if poll_items[1].is_readable() {\n\n self.monitor_socket.recv_multipart(0).unwrap();\n\n let message_result = Err(ReceiveError::DisconnectedError);\n\n info!(\"Received Disconnect\");\n\n self.inbound_router.route(message_result);\n\n break;\n\n }\n\n\n\n match self\n\n .outbound_recv\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 36, "score": 66617.31951595139 }, { "content": " .expect(\"Failed to set socket linger value\");\n\n socket\n\n .monitor(\n\n \"inproc://monitor-socket\",\n\n zmq::SocketEvent::DISCONNECTED as i32,\n\n )\n\n .unwrap_or(());\n\n let monitor_socket = context.socket(zmq::PAIR).unwrap();\n\n monitor_socket\n\n .set_linger(0)\n\n .expect(\"Failed to set socket linger value\");\n\n\n\n let identity = uuid::Uuid::new_v4();\n\n socket.set_identity(identity.as_bytes()).unwrap();\n\n\n\n SendReceiveStream {\n\n address: String::from(address),\n\n socket,\n\n outbound_recv,\n\n inbound_router,\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 37, "score": 66615.62843028875 }, { "content": " .send(Ok(message))\n\n .expect(\"Unable to route new message\"),\n\n }\n\n }\n\n Err(ReceiveError::DisconnectedError) => {\n\n let mut expected_replies = self.expected_replies.lock().unwrap();\n\n for (_, sender) in expected_replies.iter_mut() {\n\n sender\n\n .send(Err(ReceiveError::DisconnectedError))\n\n .unwrap_or_else(|err| error!(\"Failed to send disconnect reply: {}\", err));\n\n }\n\n self.inbound_tx\n\n .send(Err(ReceiveError::DisconnectedError))\n\n .unwrap_or_else(|err| error!(\"Failed to send disconnect: {}\", err));\n\n }\n\n Err(err) => error!(\"Error: {}\", err),\n\n }\n\n }\n\n\n\n fn expect_reply(&self, correlation_id: String) -> Receiver<MessageResult> {\n\n let (expect_tx, expect_rx) = channel();\n\n let mut expected_replies = self.expected_replies.lock().unwrap();\n\n expected_replies.insert(correlation_id, expect_tx);\n\n\n\n expect_rx\n\n }\n\n}\n\n\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 38, "score": 66613.92693776474 }, { "content": " monitor_socket,\n\n }\n\n }\n\n\n\n fn run(&mut self) {\n\n self.socket.connect(&self.address).unwrap();\n\n self.monitor_socket\n\n .connect(\"inproc://monitor-socket\")\n\n .unwrap();\n\n loop {\n\n let mut poll_items = [\n\n self.socket.as_poll_item(zmq::POLLIN),\n\n self.monitor_socket.as_poll_item(zmq::POLLIN),\n\n ];\n\n zmq::poll(&mut poll_items, POLL_TIMEOUT).unwrap();\n\n if poll_items[0].is_readable() {\n\n trace!(\"Readable!\");\n\n let mut received_parts = self.socket.recv_multipart(0).unwrap();\n\n\n\n // Grab the last part, which should contain our message\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 39, "score": 66612.97352711613 }, { "content": "/*\n\n * Copyright 2017 Bitwise IO, Inc.\n\n * Copyright 2019 Cargill Incorporated\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\nuse std::time::Duration;\n\n\n", "file_path": "src/processor/zmq_context.rs", "rank": 40, "score": 66610.09718098503 }, { "content": " Ok(_) => Ok(()),\n\n Err(e) => Err(SendError::UnknownError(e.to_string())),\n\n }\n\n } else {\n\n Err(SendError::DisconnectedError)\n\n }\n\n }\n\n\n\n fn close(&mut self) {\n\n if let Some(ref sender) = self.outbound_sender.take() {\n\n match sender.send(SocketCommand::Shutdown) {\n\n Ok(_) => (),\n\n Err(_) => info!(\"Sender has already closed.\"),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 41, "score": 66606.52354856781 }, { "content": " }\n\n }\n\n\n\n debug!(\"Exited stream\");\n\n self.socket.disconnect(&self.address).unwrap();\n\n if let Err(e) = self.monitor_socket.disconnect(\"inproc://monitor-socket\") {\n\n log::warn!(\"Monitor socket disconnect error: {}\", e)\n\n }\n\n }\n\n}\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 42, "score": 66602.942344887 }, { "content": "use super::generate_correlation_id;\n\n\n\n#[derive(Clone)]\n\npub struct ZmqTransactionContext {\n\n context_id: String,\n\n sender: ZmqMessageSender,\n\n timeout: Option<Duration>,\n\n}\n\n\n\nimpl ZmqTransactionContext {\n\n /// Context provides an interface for getting, setting, and deleting\n\n /// validator state. All validator interactions by a handler should be\n\n /// through a Context instance.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `sender` - for client grpc communication\n\n /// * `context_id` - the context_id passed in from the validator\n\n pub fn new(context_id: &str, sender: ZmqMessageSender) -> Self {\n\n ZmqTransactionContext {\n", "file_path": "src/processor/zmq_context.rs", "rank": 43, "score": 66562.63662470972 }, { "content": " context_id: String::from(context_id),\n\n sender,\n\n timeout: None,\n\n }\n\n }\n\n\n\n pub fn with_timeout(\n\n context_id: &str,\n\n sender: ZmqMessageSender,\n\n timeout: Option<Duration>,\n\n ) -> Self {\n\n ZmqTransactionContext {\n\n context_id: String::from(context_id),\n\n sender,\n\n timeout,\n\n }\n\n }\n\n}\n\n\n\nimpl TransactionContext for ZmqTransactionContext {\n", "file_path": "src/processor/zmq_context.rs", "rank": 44, "score": 66559.85421393995 }, { "content": " /// # Arguments\n\n ///\n\n /// * `addresses` - the addresses to delete\n\n fn delete_state_entries(&self, addresses: &[String]) -> Result<Vec<String>, ContextError> {\n\n let mut request = TpStateDeleteRequest::new();\n\n request.set_context_id(self.context_id.clone());\n\n request.set_addresses(RepeatedField::from_slice(addresses));\n\n\n\n let serialized = request.write_to_bytes()?;\n\n let x: &[u8] = &serialized;\n\n\n\n let mut future = self.sender.send(\n\n Message_MessageType::TP_STATE_DELETE_REQUEST,\n\n &generate_correlation_id(),\n\n x,\n\n )?;\n\n\n\n let response = TpStateDeleteResponse::parse_from_bytes(\n\n future.get_maybe_timeout(self.timeout)?.get_content(),\n\n )?;\n", "file_path": "src/processor/zmq_context.rs", "rank": 45, "score": 66556.69315687394 }, { "content": " /// get_state_entries queries the validator state for data at each of the\n\n /// addresses in the given list. The addresses that have been set\n\n /// are returned.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `addresses` - the addresses to fetch\n\n fn get_state_entries(\n\n &self,\n\n addresses: &[String],\n\n ) -> Result<Vec<(String, Vec<u8>)>, ContextError> {\n\n let mut request = TpStateGetRequest::new();\n\n request.set_context_id(self.context_id.clone());\n\n request.set_addresses(RepeatedField::from_vec(addresses.to_vec()));\n\n let serialized = request.write_to_bytes()?;\n\n let x: &[u8] = &serialized;\n\n\n\n let mut future = self.sender.send(\n\n Message_MessageType::TP_STATE_GET_REQUEST,\n\n &generate_correlation_id(),\n", "file_path": "src/processor/zmq_context.rs", "rank": 46, "score": 66554.54980967517 }, { "content": "use protobuf::Message as M;\n\nuse protobuf::RepeatedField;\n\n\n\nuse crate::messages::block::BlockHeader;\n\nuse crate::messages::client_block::{\n\n ClientBlockGetByNumRequest, ClientBlockGetResponse, ClientBlockGetResponse_Status,\n\n ClientRewardBlockListRequest, ClientRewardBlockListResponse,\n\n ClientRewardBlockListResponse_Status,\n\n};\n\nuse crate::messages::client_state::{\n\n ClientStateListRequest, ClientStateListResponse, ClientStateListResponse_Status,\n\n};\n\nuse crate::messages::events::Event;\n\nuse crate::messages::events::Event_Attribute;\n\nuse crate::messages::state_context::*;\n\nuse crate::messages::validator::Message_MessageType;\n\nuse crate::messaging::stream::MessageSender;\n\nuse crate::messaging::zmq_stream::ZmqMessageSender;\n\nuse crate::processor::handler::{ContextError, TransactionContext};\n\n\n", "file_path": "src/processor/zmq_context.rs", "rank": 47, "score": 66552.53472427862 }, { "content": " fn get_state_entries_by_prefix(\n\n &self,\n\n tip_id: &str,\n\n address: &str,\n\n ) -> Result<Vec<(String, Vec<u8>)>, ContextError> {\n\n let mut start = String::new();\n\n let mut root: String = tip_id.into();\n\n\n\n let mut entries = Vec::new();\n\n\n\n loop {\n\n let mut request = ClientStateListRequest::new();\n\n\n\n request.set_state_root(root.clone());\n\n request.mut_paging().set_start(start.clone());\n\n\n\n //no need to set paging limit explicitely, the default one should work fine or better\n\n //request.mut_paging().set_limit(100);\n\n\n\n request.set_address(address.into());\n", "file_path": "src/processor/zmq_context.rs", "rank": 48, "score": 66552.04526503301 }, { "content": " let mut entry = TpStateEntry::new();\n\n entry.set_address(address);\n\n entry.set_data(payload);\n\n entry\n\n })\n\n .collect();\n\n\n\n let mut request = TpStateSetRequest::new();\n\n request.set_context_id(self.context_id.clone());\n\n request.set_entries(RepeatedField::from_vec(state_entries.to_vec()));\n\n let serialized = request.write_to_bytes()?;\n\n let x: &[u8] = &serialized;\n\n\n\n let mut future = self.sender.send(\n\n Message_MessageType::TP_STATE_SET_REQUEST,\n\n &generate_correlation_id(),\n\n x,\n\n )?;\n\n\n\n let response = TpStateSetResponse::parse_from_bytes(\n", "file_path": "src/processor/zmq_context.rs", "rank": 49, "score": 66549.50558498617 }, { "content": " /// * `data` - the data to add\n\n fn add_receipt_data(&self, data: &[u8]) -> Result<(), ContextError> {\n\n let mut request = TpReceiptAddDataRequest::new();\n\n request.set_context_id(self.context_id.clone());\n\n request.set_data(Vec::from(data));\n\n\n\n let serialized = request.write_to_bytes()?;\n\n let x: &[u8] = &serialized;\n\n\n\n let mut future = self.sender.send(\n\n Message_MessageType::TP_RECEIPT_ADD_DATA_REQUEST,\n\n &generate_correlation_id(),\n\n x,\n\n )?;\n\n\n\n let response = TpReceiptAddDataResponse::parse_from_bytes(\n\n future.get_maybe_timeout(self.timeout)?.get_content(),\n\n )?;\n\n match response.get_status() {\n\n TpReceiptAddDataResponse_Status::OK => Ok(()),\n", "file_path": "src/processor/zmq_context.rs", "rank": 50, "score": 66549.34772100896 }, { "content": " first_pred: u64,\n\n last_pred: u64,\n\n ) -> Result<Vec<String>, ContextError> {\n\n let mut request = ClientRewardBlockListRequest::new();\n\n\n\n request.set_head_id(block_id.into());\n\n request.set_first_predecessor_height(first_pred);\n\n request.set_last_predecessor_height(last_pred);\n\n\n\n let serialized = request.write_to_bytes()?;\n\n\n\n let mut future = self.sender.send(\n\n Message_MessageType::CLIENT_REWARD_BLOCK_LIST_REQUEST,\n\n &generate_correlation_id(),\n\n &serialized,\n\n )?;\n\n\n\n let response = ClientRewardBlockListResponse::parse_from_bytes(\n\n future.get_maybe_timeout(self.timeout)?.get_content(),\n\n )?;\n", "file_path": "src/processor/zmq_context.rs", "rank": 51, "score": 66548.4559214993 }, { "content": " )),\n\n TpEventAddResponse_Status::STATUS_UNSET => Err(ContextError::ResponseAttributeError(\n\n String::from(\"Status was not set for TpEventAddRespons\"),\n\n )),\n\n }\n\n }\n\n\n\n fn get_sig_by_num(&self, block_num: u64) -> Result<String, ContextError> {\n\n let mut request = ClientBlockGetByNumRequest::new();\n\n\n\n request.set_block_num(block_num);\n\n\n\n let serialized = request.write_to_bytes()?;\n\n\n\n let mut future = self.sender.send(\n\n Message_MessageType::CLIENT_BLOCK_GET_BY_NUM_REQUEST,\n\n &generate_correlation_id(),\n\n &serialized,\n\n )?;\n\n\n", "file_path": "src/processor/zmq_context.rs", "rank": 52, "score": 66547.28650289353 }, { "content": " let mut request = TpEventAddRequest::new();\n\n request.set_context_id(self.context_id.clone());\n\n request.set_event(event.clone());\n\n\n\n let serialized = request.write_to_bytes()?;\n\n let x: &[u8] = &serialized;\n\n\n\n let mut future = self.sender.send(\n\n Message_MessageType::TP_EVENT_ADD_REQUEST,\n\n &generate_correlation_id(),\n\n x,\n\n )?;\n\n\n\n let response = TpEventAddResponse::parse_from_bytes(\n\n future.get_maybe_timeout(self.timeout)?.get_content(),\n\n )?;\n\n match response.get_status() {\n\n TpEventAddResponse_Status::OK => Ok(()),\n\n TpEventAddResponse_Status::ERROR => Err(ContextError::TransactionReceiptError(\n\n format!(\"Failed to add event {:?}\", event),\n", "file_path": "src/processor/zmq_context.rs", "rank": 53, "score": 66547.19930800519 }, { "content": " \"Tried to get unauthorized addresses: {:?}\",\n\n addresses\n\n )))\n\n }\n\n TpStateGetResponse_Status::STATUS_UNSET => Err(ContextError::ResponseAttributeError(\n\n String::from(\"Status was not set for TpStateGetResponse\"),\n\n )),\n\n }\n\n }\n\n\n\n /// set_state requests that each address in the provided map be\n\n /// set in validator state to its corresponding value.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `entries` - entries are a hashmap where the key is an address and value is the data\n\n fn set_state_entries(&self, entries: Vec<(String, Vec<u8>)>) -> Result<(), ContextError> {\n\n let state_entries: Vec<TpStateEntry> = entries\n\n .into_iter()\n\n .map(|(address, payload)| {\n", "file_path": "src/processor/zmq_context.rs", "rank": 54, "score": 66543.91984439512 }, { "content": "\n\n let serialized = request.write_to_bytes()?;\n\n\n\n let mut future = self.sender.send(\n\n Message_MessageType::CLIENT_STATE_LIST_REQUEST,\n\n &generate_correlation_id(),\n\n &serialized,\n\n )?;\n\n\n\n let mut response = ClientStateListResponse::parse_from_bytes(\n\n future.get_maybe_timeout(self.timeout)?.get_content(),\n\n )?;\n\n match response.get_status() {\n\n ClientStateListResponse_Status::OK => {\n\n root = response.take_state_root();\n\n start = response.mut_paging().take_next();\n\n entries.reserve(response.get_entries().len());\n\n\n\n for mut entry in response.take_entries() {\n\n entries.push((entry.take_address(), entry.take_data()));\n", "file_path": "src/processor/zmq_context.rs", "rank": 55, "score": 66541.4104746198 }, { "content": " TpReceiptAddDataResponse_Status::ERROR => Err(ContextError::TransactionReceiptError(\n\n format!(\"Failed to add receipt data {:?}\", data),\n\n )),\n\n TpReceiptAddDataResponse_Status::STATUS_UNSET => {\n\n Err(ContextError::ResponseAttributeError(String::from(\n\n \"Status was not set for TpReceiptAddDataResponse\",\n\n )))\n\n }\n\n }\n\n }\n\n\n\n /// add_event adds a new event to the execution result for this transaction.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `event_type` - This is used to subscribe to events. It should be globally unique and\n\n /// describe what, in general, has occured.\n\n /// * `attributes` - Additional information about the event that is transparent to the\n\n /// validator. Attributes can be used by subscribers to filter the type of events\n\n /// they receive.\n", "file_path": "src/processor/zmq_context.rs", "rank": 56, "score": 66541.37345980124 }, { "content": " future.get_maybe_timeout(self.timeout)?.get_content(),\n\n )?;\n\n match response.get_status() {\n\n TpStateSetResponse_Status::OK => Ok(()),\n\n TpStateSetResponse_Status::AUTHORIZATION_ERROR => {\n\n Err(ContextError::AuthorizationError(format!(\n\n \"Tried to set unauthorized addresses: {:?}\",\n\n state_entries\n\n )))\n\n }\n\n TpStateSetResponse_Status::STATUS_UNSET => Err(ContextError::ResponseAttributeError(\n\n String::from(\"Status was not set for TpStateSetResponse\"),\n\n )),\n\n }\n\n }\n\n\n\n /// delete_state_entries requests that each of the provided addresses be unset\n\n /// in validator state. A list of successfully deleted addresses\n\n /// is returned.\n\n ///\n", "file_path": "src/processor/zmq_context.rs", "rank": 57, "score": 66540.9936977594 }, { "content": " match response.get_status() {\n\n TpStateDeleteResponse_Status::OK => Ok(Vec::from(response.get_addresses())),\n\n TpStateDeleteResponse_Status::AUTHORIZATION_ERROR => {\n\n Err(ContextError::AuthorizationError(format!(\n\n \"Tried to delete unauthorized addresses: {:?}\",\n\n addresses\n\n )))\n\n }\n\n TpStateDeleteResponse_Status::STATUS_UNSET => {\n\n Err(ContextError::ResponseAttributeError(String::from(\n\n \"Status was not set for TpStateDeleteResponse\",\n\n )))\n\n }\n\n }\n\n }\n\n\n\n /// add_receipt_data adds a blob to the execution result for this transaction\n\n ///\n\n /// # Arguments\n\n ///\n", "file_path": "src/processor/zmq_context.rs", "rank": 58, "score": 66540.51861771611 }, { "content": " /// * `data` - Additional information about the event that is opaque to the validator.\n\n fn add_event(\n\n &self,\n\n event_type: String,\n\n attributes: Vec<(String, String)>,\n\n data: &[u8],\n\n ) -> Result<(), ContextError> {\n\n let mut event = Event::new();\n\n event.set_event_type(event_type);\n\n\n\n let mut attributes_vec = Vec::new();\n\n for (key, value) in attributes {\n\n let mut attribute = Event_Attribute::new();\n\n attribute.set_key(key);\n\n attribute.set_value(value);\n\n attributes_vec.push(attribute);\n\n }\n\n event.set_attributes(RepeatedField::from_vec(attributes_vec));\n\n event.set_data(Vec::from(data));\n\n\n", "file_path": "src/processor/zmq_context.rs", "rank": 59, "score": 66539.12970306672 }, { "content": " x,\n\n )?;\n\n\n\n let response = TpStateGetResponse::parse_from_bytes(\n\n future.get_maybe_timeout(self.timeout)?.get_content(),\n\n )?;\n\n match response.get_status() {\n\n TpStateGetResponse_Status::OK => {\n\n let mut entries = Vec::new();\n\n for entry in response.get_entries() {\n\n match entry.get_data().len() {\n\n 0 => continue,\n\n _ => entries\n\n .push((entry.get_address().to_string(), Vec::from(entry.get_data()))),\n\n }\n\n }\n\n Ok(entries)\n\n }\n\n TpStateGetResponse_Status::AUTHORIZATION_ERROR => {\n\n Err(ContextError::AuthorizationError(format!(\n", "file_path": "src/processor/zmq_context.rs", "rank": 60, "score": 66539.11276028624 }, { "content": " let response = ClientBlockGetResponse::parse_from_bytes(\n\n future.get_maybe_timeout(self.timeout)?.get_content(),\n\n )?;\n\n match response.get_status() {\n\n ClientBlockGetResponse_Status::OK => {\n\n let raw_header = &response.get_block().header;\n\n let header = BlockHeader::parse_from_bytes(raw_header)?;\n\n\n\n Ok(header.signer_public_key)\n\n }\n\n err_status => Err(ContextError::ResponseAttributeError(format!(\n\n \"Failed to retrieve block by num : {:?}\",\n\n err_status\n\n ))),\n\n }\n\n }\n\n\n\n fn get_reward_block_signatures(\n\n &self,\n\n block_id: &str,\n", "file_path": "src/processor/zmq_context.rs", "rank": 61, "score": 66536.88660101299 }, { "content": " }\n\n }\n\n err_status => {\n\n return Err(ContextError::ResponseAttributeError(format!(\n\n \"Failed to retrieve state entries : {:?}\",\n\n err_status\n\n )))\n\n }\n\n }\n\n if start.is_empty() {\n\n return Ok(entries);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/processor/zmq_context.rs", "rank": 62, "score": 66535.91936883314 }, { "content": " match response.get_status() {\n\n ClientRewardBlockListResponse_Status::OK => {\n\n let blocks = response.get_blocks();\n\n let mut signatures = Vec::with_capacity(blocks.len());\n\n for block in blocks {\n\n let raw_header = &block.header;\n\n let header = BlockHeader::parse_from_bytes(&raw_header)?;\n\n\n\n signatures.push(header.signer_public_key);\n\n }\n\n\n\n Ok(signatures)\n\n }\n\n err_status => Err(ContextError::ResponseAttributeError(format!(\n\n \"Failed to retrieve Reward Block List : {:?}\",\n\n err_status\n\n ))),\n\n }\n\n }\n\n\n", "file_path": "src/processor/zmq_context.rs", "rank": 63, "score": 66531.55660720186 }, { "content": "enum ContextAndKey<'a> {\n\n ByRef(&'a dyn Context, &'a dyn PrivateKey),\n\n ByBox(Box<dyn Context>, Box<dyn PrivateKey>),\n\n}\n\n\n\n/// A convenient wrapper of Context and PrivateKey\n\npub struct Signer<'a> {\n\n context_and_key: ContextAndKey<'a>,\n\n}\n\n\n\nimpl<'a> Signer<'a> {\n\n /// Constructs a new Signer\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `context` - a cryptographic context\n\n /// * `private_key` - private key\n\n pub fn new(context: &'a dyn Context, key: &'a dyn PrivateKey) -> Self {\n\n Signer {\n\n context_and_key: ContextAndKey::ByRef(context, key),\n", "file_path": "src/signing/mod.rs", "rank": 64, "score": 65020.24230616496 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\nmod game;\n\nmod payload;\n\nmod state;\n", "file_path": "examples/xo_rust/src/handler/mod.rs", "rank": 65, "score": 62042.58350517355 }, { "content": "\n\ncfg_if! {\n\n if #[cfg(target_arch = \"wasm32\")] {\n\n use sabre_sdk::{ApplyError, TpProcessRequest, TransactionContext, TransactionHandler,};\n\n } else {\n\n use sawtooth_sdk::messages::processor::TpProcessRequest;\n\n use sawtooth_sdk::processor::handler::{ApplyError, TransactionContext, TransactionHandler};\n\n }\n\n}\n\n\n\nuse crate::handler::game::Game;\n\nuse crate::handler::payload::XoPayload;\n\nuse crate::handler::state::{get_xo_prefix, XoState};\n\n\n\npub struct XoTransactionHandler {\n\n family_name: String,\n\n family_versions: Vec<String>,\n\n namespaces: Vec<String>,\n\n}\n\n\n", "file_path": "examples/xo_rust/src/handler/mod.rs", "rank": 66, "score": 61984.30988595047 }, { "content": "impl XoTransactionHandler {\n\n pub fn new() -> XoTransactionHandler {\n\n XoTransactionHandler {\n\n family_name: \"xo\".into(),\n\n family_versions: vec![\"1.0\".into()],\n\n namespaces: vec![get_xo_prefix()],\n\n }\n\n }\n\n}\n\n\n\nimpl Default for XoTransactionHandler {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl TransactionHandler for XoTransactionHandler {\n\n fn family_name(&self) -> String {\n\n self.family_name.clone()\n\n }\n", "file_path": "examples/xo_rust/src/handler/mod.rs", "rank": 67, "score": 61981.79850827812 }, { "content": "\n\n fn family_versions(&self) -> Vec<String> {\n\n self.family_versions.clone()\n\n }\n\n\n\n fn namespaces(&self) -> Vec<String> {\n\n self.namespaces.clone()\n\n }\n\n\n\n fn apply(\n\n &self,\n\n request: &TpProcessRequest,\n\n context: &mut dyn TransactionContext,\n\n ) -> Result<(), ApplyError> {\n\n let signer = request.get_header().get_signer_public_key();\n\n\n\n let payload = XoPayload::new(request.get_payload())?;\n\n\n\n let mut state = XoState::new(context);\n\n\n", "file_path": "examples/xo_rust/src/handler/mod.rs", "rank": 68, "score": 61978.522142008165 }, { "content": " return Err(ApplyError::InvalidTransaction(String::from(\n\n \"Not player 2's turn\",\n\n )));\n\n }\n\n }\n\n \"P2-NEXT\" => {\n\n let p2 = g.get_player2();\n\n if !p2.is_empty() && p2.as_str() != signer {\n\n return Err(ApplyError::InvalidTransaction(String::from(\n\n \"Not player 1's turn\",\n\n )));\n\n }\n\n }\n\n _ => {\n\n return Err(ApplyError::InvalidTransaction(String::from(\n\n \"Invalid state\",\n\n )));\n\n }\n\n }\n\n\n", "file_path": "examples/xo_rust/src/handler/mod.rs", "rank": 69, "score": 61968.91567378645 }, { "content": " let game = Game::new(payload.get_name());\n\n state.set_game(payload.get_name().as_str(), game)?;\n\n info!(\"Created game: {}\", payload.get_name().as_str());\n\n } else {\n\n return Err(ApplyError::InvalidTransaction(String::from(\n\n \"Invalid action: Game already exists\",\n\n )));\n\n }\n\n }\n\n \"take\" => {\n\n if let Some(mut g) = game {\n\n match g.get_state().as_str() {\n\n \"P1-WIN\" | \"P2-WIN\" | \"TIE\" => {\n\n return Err(ApplyError::InvalidTransaction(String::from(\n\n \"Invalid action: Game has ended\",\n\n )));\n\n }\n\n \"P1-NEXT\" => {\n\n let p1 = g.get_player1();\n\n if !p1.is_empty() && p1.as_str() != signer {\n", "file_path": "examples/xo_rust/src/handler/mod.rs", "rank": 70, "score": 61968.45695578314 }, { "content": " let board_chars: Vec<char> = g.get_board().chars().collect();\n\n if board_chars[payload.get_space() - 1] != '-' {\n\n return Err(ApplyError::InvalidTransaction(format!(\n\n \"Space {} is already taken\",\n\n payload.get_space()\n\n )));\n\n }\n\n\n\n if g.get_player1().is_empty() {\n\n g.set_player1(signer);\n\n } else if g.get_player2().is_empty() {\n\n g.set_player2(signer)\n\n }\n\n\n\n g.mark_space(payload.get_space())?;\n\n g.update_state()?;\n\n\n\n g.display();\n\n\n\n state.set_game(payload.get_name().as_str(), g)?;\n", "file_path": "examples/xo_rust/src/handler/mod.rs", "rank": 71, "score": 61967.56606027622 }, { "content": " info!(\n\n \"Payload: {} {} {}\",\n\n payload.get_name(),\n\n payload.get_action(),\n\n payload.get_space(),\n\n );\n\n\n\n let game = state.get_game(payload.get_name().as_str())?;\n\n\n\n match payload.get_action().as_str() {\n\n \"delete\" => {\n\n if game.is_none() {\n\n return Err(ApplyError::InvalidTransaction(String::from(\n\n \"Invalid action: game does not exist\",\n\n )));\n\n }\n\n state.delete_game(payload.get_name().as_str())?;\n\n }\n\n \"create\" => {\n\n if game.is_none() {\n", "file_path": "examples/xo_rust/src/handler/mod.rs", "rank": 72, "score": 61965.485623346445 }, { "content": " } else {\n\n return Err(ApplyError::InvalidTransaction(String::from(\n\n \"Invalid action: Take requires an existing game\",\n\n )));\n\n }\n\n }\n\n other_action => {\n\n return Err(ApplyError::InvalidTransaction(format!(\n\n \"Invalid action: '{}'\",\n\n other_action\n\n )));\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\n// Sabre apply must return a bool\n", "file_path": "examples/xo_rust/src/handler/mod.rs", "rank": 73, "score": 61964.94762484941 }, { "content": "#[derive(Debug)]\n\nenum SocketCommand {\n\n Send(Message),\n\n Shutdown,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ZmqMessageSender {\n\n context: zmq::Context,\n\n address: String,\n\n inbound_router: InboundRouter,\n\n outbound_sender: Option<SyncSender<SocketCommand>>,\n\n}\n\n\n\nimpl ZmqMessageSender {\n\n fn new(ctx: zmq::Context, address: String, router: InboundRouter) -> Self {\n\n ZmqMessageSender {\n\n context: ctx,\n\n address,\n\n inbound_router: router,\n\n outbound_sender: None,\n", "file_path": "src/messaging/zmq_stream.rs", "rank": 74, "score": 61683.24639055463 }, { "content": "fn main() {\n\n // Generate protobuf files\n\n let proto_src_files = glob_simple(\"./protos/*.proto\");\n\n println!(\"{:?}\", proto_src_files);\n\n\n\n let out_dir = env::var(\"OUT_DIR\").expect(\"No OUT_DIR env variable\");\n\n let dest_path = Path::new(&out_dir).join(\"messages\");\n\n fs::create_dir_all(&dest_path).expect(\"Unable to create proto destination directory\");\n\n\n\n let mod_file_content = proto_src_files\n\n .iter()\n\n .map(|proto_file| {\n\n let proto_path = Path::new(proto_file);\n\n format!(\n\n \"pub mod {};\",\n\n proto_path\n\n .file_stem()\n\n .expect(\"Unable to extract stem\")\n\n .to_str()\n\n .expect(\"Unable to extract filename\")\n", "file_path": "build.rs", "rank": 75, "score": 59479.61183466169 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\nfn main() {\n\n let matches = clap_app!(xo =>\n\n (version: crate_version!())\n\n (about: \"XO Transaction Processor (Rust)\")\n\n (@arg connect: -C --connect +takes_value\n\n \"connection endpoint for validator\")\n\n (@arg verbose: -v --verbose +multiple\n\n \"increase output verbosity\"))\n\n .get_matches();\n\n\n\n let endpoint = matches\n\n .value_of(\"connect\")\n\n .unwrap_or(\"tcp://localhost:4004\");\n\n\n\n let console_log_level;\n\n match matches.occurrences_of(\"verbose\") {\n\n 0 => console_log_level = LevelFilter::Warn,\n\n 1 => console_log_level = LevelFilter::Info,\n\n 2 => console_log_level = LevelFilter::Debug,\n\n _ => console_log_level = LevelFilter::Trace,\n", "file_path": "examples/xo_rust/src/main.rs", "rank": 76, "score": 54000.81531351962 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\nfn main() {}\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\n#[no_mangle]\n\npub unsafe fn entrypoint(payload: WasmPtr, signer: WasmPtr, signature: WasmPtr) -> i32 {\n\n execute_entrypoint(payload, signer, signature, apply)\n\n}\n", "file_path": "examples/xo_rust/src/main.rs", "rank": 77, "score": 54000.81531351962 }, { "content": "fn main() {\n\n let matches = clap_app!(intkey =>\n\n (version: crate_version!())\n\n (about: \"Intkey Transaction Processor (Rust)\")\n\n (@arg connect: -C --connect +takes_value\n\n \"connection endpoint for validator\")\n\n (@arg verbose: -v --verbose +multiple\n\n \"increase output verbosity\"))\n\n .get_matches();\n\n\n\n let endpoint = matches\n\n .value_of(\"connect\")\n\n .unwrap_or(\"tcp://localhost:4004\");\n\n\n\n let console_log_level;\n\n match matches.occurrences_of(\"verbose\") {\n\n 0 => console_log_level = LevelFilter::Warn,\n\n 1 => console_log_level = LevelFilter::Info,\n\n 2 => console_log_level = LevelFilter::Debug,\n\n _ => console_log_level = LevelFilter::Trace,\n", "file_path": "examples/intkey_rust/src/main.rs", "rank": 78, "score": 54000.81531351962 }, { "content": "/// Engine is the only trait that needs to be implemented when adding a new consensus engine.\n\n///\n\n/// The consensus engine should listen for notifications from the validator about the status of\n\n/// blocks and messages from peers. It must also determine internally when to build and publish\n\n/// blocks based on its view of the network and the consensus algorithm it implements. Often this\n\n/// will be some sort of timer-based interrupt.\n\n///\n\n/// Based on the updates the engine receives through the `Receiver<Update>` and the specifics of\n\n/// the algorithm being implemented, the engine utilizes the provided `Service` to create new\n\n/// blocks, communicate with its peers, request that certain blocks be committed, and fail or\n\n/// ignore blocks that should not be committed.\n\n///\n\n/// While the validator may take actions beyond what the engine instructs it to do for performance\n\n/// optimization reasons, it is the consensus engine's responsibility to drive the progress of the\n\n/// validator and ensure liveness.\n\n///\n\n/// It is not the engine's responsibility to manage blocks or memory, other than to ensure it\n\n/// responds to every new block with a commit, fail, or ignore within a \"reasonable amount of\n\n/// time\". The validator is responsible for guaranteeing the integrity of all blocks sent to the\n\n/// engine until the engine responds. After the engine responds, the validator does not guarantee\n\n/// that the block and its predecessors continue to be available unless the block was committed.\n\n///\n\n/// Finally, as an optimization, the consensus engine can send prioritized lists of blocks to the\n\n/// chain controller for checking instead of sending them one at a time, which allows the chain\n\n/// controller to intelligently work ahead while the consensus engine makes its decisions.\n\npub trait Engine {\n\n /// Called after the engine is initialized, when a connection to the validator has been\n\n /// established. Notifications from the validator are sent along `updates`. `service` is used\n\n /// to send requests to the validator.\n\n fn start(\n\n &mut self,\n\n updates: Receiver<Update>,\n\n service: Box<dyn Service>,\n\n startup_state: StartupState,\n\n ) -> Result<(), Error>;\n\n\n\n /// Get the version of this engine\n\n fn version(&self) -> String;\n\n\n\n /// Get the name of the engine, typically the algorithm being implemented\n\n fn name(&self) -> String;\n\n\n\n /// Any additional name/version pairs this engine supports\n\n fn additional_protocols(&self) -> Vec<(String, String)>;\n\n}\n", "file_path": "src/consensus/engine.rs", "rank": 79, "score": 52425.981779205846 }, { "content": "/// Provides methods that allow the consensus engine to issue commands and requests.\n\npub trait Service {\n\n // -- P2P --\n\n\n\n /// Send a consensus message to a specific, connected peer\n\n #[allow(clippy::ptr_arg)]\n\n fn send_to(&mut self, peer: &PeerId, message_type: &str, payload: Vec<u8>)\n\n -> Result<(), Error>;\n\n\n\n /// Broadcast a message to all connected peers\n\n fn broadcast(&mut self, message_type: &str, payload: Vec<u8>) -> Result<(), Error>;\n\n\n\n // -- Block Creation --\n\n\n\n /// Initialize a new block built on the block with the given previous id and\n\n /// begin adding batches to it. If no previous id is specified, the current\n\n /// head will be used.\n\n fn initialize_block(&mut self, previous_id: Option<BlockId>) -> Result<(), Error>;\n\n\n\n /// Stop adding batches to the current block and return a summary of its\n\n /// contents.\n", "file_path": "src/consensus/service.rs", "rank": 80, "score": 52415.80740604094 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n// Includes the autogenerated protobuf messages\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/messages/mod.rs\"));\n\n\n\npub use protobuf::Message;\n\n\n\n#[cfg(feature = \"old-sawtooth\")]\n\npub use self::processor_old as processor;\n", "file_path": "src/messages.rs", "rank": 81, "score": 37060.7601485878 }, { "content": "/*\n\n * Copyright 2017 Intel Corporation\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\nuse crate::messages::validator::Message;\n\nuse crate::messages::validator::Message_MessageType;\n\nuse std::sync::mpsc::Receiver;\n\nuse std::sync::mpsc::RecvError;\n\nuse std::time::Duration;\n\n\n\n/// A Message Sender\n\n///\n\n/// A message\n", "file_path": "src/messaging/stream.rs", "rank": 82, "score": 35521.07010753668 }, { "content": "\n\nimpl std::fmt::Display for ReceiveError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match *self {\n\n ReceiveError::TimeoutError => write!(f, \"TimeoutError\"),\n\n ReceiveError::ChannelError(ref err) => write!(f, \"ChannelError: {}\", err),\n\n ReceiveError::DisconnectedError => write!(f, \"DisconnectedError\"),\n\n }\n\n }\n\n}\n\n/// MessageFuture is a promise for the reply to a sent message on connection.\n\npub struct MessageFuture {\n\n inner: Receiver<MessageResult>,\n\n result: Option<MessageResult>,\n\n}\n\n\n\nimpl MessageFuture {\n\n pub fn new(inner: Receiver<MessageResult>) -> Self {\n\n MessageFuture {\n\n inner,\n", "file_path": "src/messaging/stream.rs", "rank": 83, "score": 35474.204046875246 }, { "content": " result: None,\n\n }\n\n }\n\n\n\n pub fn get(&mut self) -> MessageResult {\n\n if let Some(ref result) = self.result {\n\n return result.clone();\n\n }\n\n\n\n match self.inner.recv() {\n\n Ok(result) => {\n\n self.result = Some(result.clone());\n\n result\n\n }\n\n Err(err) => Err(ReceiveError::ChannelError(err)),\n\n }\n\n }\n\n\n\n pub fn get_timeout(&mut self, timeout: Duration) -> MessageResult {\n\n if let Some(ref result) = self.result {\n", "file_path": "src/messaging/stream.rs", "rank": 84, "score": 35467.45296208306 }, { "content": " return result.clone();\n\n }\n\n\n\n match self.inner.recv_timeout(timeout) {\n\n Ok(result) => {\n\n self.result = Some(result.clone());\n\n result\n\n }\n\n Err(_) => Err(ReceiveError::TimeoutError),\n\n }\n\n }\n\n\n\n pub fn get_maybe_timeout(&mut self, timeout: Option<Duration>) -> MessageResult {\n\n if let Some(timeout) = timeout {\n\n self.get_timeout(timeout)\n\n } else {\n\n self.get()\n\n }\n\n }\n\n}\n", "file_path": "src/messaging/stream.rs", "rank": 85, "score": 35467.35301719353 }, { "content": "\n\n/// Queue for inbound messages, sent directly to this stream.\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use std::sync::mpsc::channel;\n\n use std::thread;\n\n\n\n use crate::messages::validator::Message;\n\n use crate::messages::validator::Message_MessageType;\n\n\n\n use super::MessageFuture;\n\n\n\n fn make_ping(correlation_id: &str) -> Message {\n\n let mut message = Message::new();\n\n message.set_message_type(Message_MessageType::PING_REQUEST);\n\n message.set_correlation_id(String::from(correlation_id));\n\n message.set_content(String::from(\"PING\").into_bytes());\n\n\n", "file_path": "src/messaging/stream.rs", "rank": 86, "score": 35464.64348138496 }, { "content": " }\n\n }\n\n}\n\n\n\n/// Errors that occur on receiving a message.\n\n#[derive(Debug, Clone)]\n\npub enum ReceiveError {\n\n TimeoutError,\n\n ChannelError(RecvError),\n\n DisconnectedError,\n\n}\n\n\n\nimpl std::error::Error for ReceiveError {\n\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n\n match self {\n\n ReceiveError::ChannelError(err) => Some(&*err),\n\n _ => None,\n\n }\n\n }\n\n}\n", "file_path": "src/messaging/stream.rs", "rank": 87, "score": 35463.511055931514 }, { "content": " message\n\n }\n\n\n\n #[test]\n\n fn future_get() {\n\n let (tx, rx) = channel();\n\n\n\n let mut fut = MessageFuture::new(rx);\n\n\n\n let t = thread::spawn(move || {\n\n tx.send(Ok(make_ping(\"my_test\"))).unwrap();\n\n });\n\n\n\n let msg = fut.get().expect(\"Should have a message\");\n\n\n\n t.join().unwrap();\n\n\n\n assert_eq!(msg, make_ping(\"my_test\"));\n\n }\n\n}\n", "file_path": "src/messaging/stream.rs", "rank": 88, "score": 35453.43492635579 }, { "content": "/*\n\n * Copyright 2018 Intel Corporation\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * ------------------------------------------------------------------------------\n\n */\n\n\n\npub mod engine;\n\npub mod service;\n\n\n\npub mod zmq_driver;\n\npub mod zmq_service;\n", "file_path": "src/consensus/mod.rs", "rank": 90, "score": 35130.38438641916 }, { "content": "/*\n\n * Copyright 2017 Intel Corporation\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * ------------------------------------------------------------------------------\n\n */\n\n\n\npub mod secp256k1;\n\n\n\nuse std::error::Error as StdError;\n", "file_path": "src/signing/mod.rs", "rank": 91, "score": 35127.28623491208 } ]
Rust
client/telemetry/src/transport.rs
cruz101-hub/substrate
6e45ffaa4d2a2aa62405194890477985b94747cd
use futures::{ executor::block_on, prelude::*, ready, task::{Context, Poll}, }; use libp2p::{ core::transport::{timeout::TransportTimeout, OptionalTransport}, wasm_ext, Transport, }; use std::io; use std::pin::Pin; use std::time::Duration; const CONNECT_TIMEOUT: Duration = Duration::from_secs(20); pub(crate) fn initialize_transport( wasm_external_transport: Option<wasm_ext::ExtTransport>, ) -> Result<WsTrans, io::Error> { let transport = match wasm_external_transport.clone() { Some(t) => OptionalTransport::some(t), None => OptionalTransport::none(), } .map((|inner, _| StreamSink::from(inner)) as fn(_, _) -> _); #[cfg(not(target_os = "unknown"))] let transport = transport.or_transport({ let inner = block_on(libp2p::dns::DnsConfig::system(libp2p::tcp::TcpConfig::new()))?; libp2p::websocket::framed::WsConfig::new(inner).and_then(|connec, _| { let connec = connec .with(|item| { let item = libp2p::websocket::framed::OutgoingData::Binary(item); future::ready(Ok::<_, io::Error>(item)) }) .try_filter(|item| future::ready(item.is_data())) .map_ok(|data| data.into_bytes()); future::ready(Ok::<_, io::Error>(connec)) }) }); Ok(TransportTimeout::new( transport.map(|out, _| { let out = out .map_err(|err| io::Error::new(io::ErrorKind::Other, err)) .sink_map_err(|err| io::Error::new(io::ErrorKind::Other, err)); Box::pin(out) as Pin<Box<_>> }), CONNECT_TIMEOUT, ) .boxed()) } pub(crate) trait StreamAndSink<I>: Stream + Sink<I> {} impl<T: ?Sized + Stream + Sink<I>, I> StreamAndSink<I> for T {} pub(crate) type WsTrans = libp2p::core::transport::Boxed< Pin< Box< dyn StreamAndSink<Vec<u8>, Item = Result<Vec<u8>, io::Error>, Error = io::Error> + Send, >, >, >; #[pin_project::pin_project] pub(crate) struct StreamSink<T>(#[pin] T, Option<Vec<u8>>); impl<T> From<T> for StreamSink<T> { fn from(inner: T) -> StreamSink<T> { StreamSink(inner, None) } } impl<T: AsyncRead> Stream for StreamSink<T> { type Item = Result<Vec<u8>, io::Error>; fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> { let this = self.project(); let mut buf = vec![0; 128]; match ready!(AsyncRead::poll_read(this.0, cx, &mut buf)) { Ok(0) => Poll::Ready(None), Ok(n) => { buf.truncate(n); Poll::Ready(Some(Ok(buf))) } Err(err) => Poll::Ready(Some(Err(err))), } } } impl<T: AsyncWrite> StreamSink<T> { fn poll_flush_buffer(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), io::Error>> { let this = self.project(); if let Some(buffer) = this.1 { if ready!(this.0.poll_write(cx, &buffer[..]))? != buffer.len() { log::error!(target: "telemetry", "Detected some internal buffering happening in the telemetry"); let err = io::Error::new(io::ErrorKind::Other, "Internal buffering detected"); return Poll::Ready(Err(err)); } } *this.1 = None; Poll::Ready(Ok(())) } } impl<T: AsyncWrite> Sink<Vec<u8>> for StreamSink<T> { type Error = io::Error; fn poll_ready(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> { ready!(StreamSink::poll_flush_buffer(self, cx))?; Poll::Ready(Ok(())) } fn start_send(self: Pin<&mut Self>, item: Vec<u8>) -> Result<(), Self::Error> { let this = self.project(); debug_assert!(this.1.is_none()); *this.1 = Some(item); Ok(()) } fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> { ready!(self.as_mut().poll_flush_buffer(cx))?; let this = self.project(); AsyncWrite::poll_flush(this.0, cx) } fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> { ready!(self.as_mut().poll_flush_buffer(cx))?; let this = self.project(); AsyncWrite::poll_close(this.0, cx) } }
use futures::{ executor::block_on, prelude::*, ready, task::{Context, Poll}, }; use libp2p::{ core::transport::{timeout::TransportTimeout, OptionalTransport}, wasm_ext, Transport, }; use std::io; use std::pin::Pin; use std::time::Duration; const CONNECT_TIMEOUT: Duration = Duration::from_secs(20); pub(crate) fn initialize_transport( wasm_external_transport: Option<wasm_ext::ExtTransport>, ) -> Result<WsTrans, io::Error> { let transport = match wasm_external_transport.clone() { Some(t) => OptionalTransport::some(t), None => OptionalTransport::none(), } .map((|inner, _| StreamSink::from(inner)) as fn(_, _) -> _); #[cfg(not(target_os = "unknown"))] let transport = transport.or_transport({ let inner = block_on(libp2p::dns::DnsConfig::system(libp2p::tcp::TcpConfig::new()))?; libp2p::websocket::framed::WsConfig::new(inner).and_then(|connec, _| { let connec = connec .with(|item| { let item = libp2p::websocket::framed::OutgoingData::Binary(item); future::ready(Ok::<_, io::Error>(item)) }) .try_filter(|item| future::ready(item.is_data())) .map_ok(|data| data.into_bytes()); future::ready(Ok::<_, io::Error>(connec)) }) });
} pub(crate) trait StreamAndSink<I>: Stream + Sink<I> {} impl<T: ?Sized + Stream + Sink<I>, I> StreamAndSink<I> for T {} pub(crate) type WsTrans = libp2p::core::transport::Boxed< Pin< Box< dyn StreamAndSink<Vec<u8>, Item = Result<Vec<u8>, io::Error>, Error = io::Error> + Send, >, >, >; #[pin_project::pin_project] pub(crate) struct StreamSink<T>(#[pin] T, Option<Vec<u8>>); impl<T> From<T> for StreamSink<T> { fn from(inner: T) -> StreamSink<T> { StreamSink(inner, None) } } impl<T: AsyncRead> Stream for StreamSink<T> { type Item = Result<Vec<u8>, io::Error>; fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> { let this = self.project(); let mut buf = vec![0; 128]; match ready!(AsyncRead::poll_read(this.0, cx, &mut buf)) { Ok(0) => Poll::Ready(None), Ok(n) => { buf.truncate(n); Poll::Ready(Some(Ok(buf))) } Err(err) => Poll::Ready(Some(Err(err))), } } } impl<T: AsyncWrite> StreamSink<T> { fn poll_flush_buffer(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), io::Error>> { let this = self.project(); if let Some(buffer) = this.1 { if ready!(this.0.poll_write(cx, &buffer[..]))? != buffer.len() { log::error!(target: "telemetry", "Detected some internal buffering happening in the telemetry"); let err = io::Error::new(io::ErrorKind::Other, "Internal buffering detected"); return Poll::Ready(Err(err)); } } *this.1 = None; Poll::Ready(Ok(())) } } impl<T: AsyncWrite> Sink<Vec<u8>> for StreamSink<T> { type Error = io::Error; fn poll_ready(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> { ready!(StreamSink::poll_flush_buffer(self, cx))?; Poll::Ready(Ok(())) } fn start_send(self: Pin<&mut Self>, item: Vec<u8>) -> Result<(), Self::Error> { let this = self.project(); debug_assert!(this.1.is_none()); *this.1 = Some(item); Ok(()) } fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> { ready!(self.as_mut().poll_flush_buffer(cx))?; let this = self.project(); AsyncWrite::poll_flush(this.0, cx) } fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> { ready!(self.as_mut().poll_flush_buffer(cx))?; let this = self.project(); AsyncWrite::poll_close(this.0, cx) } }
Ok(TransportTimeout::new( transport.map(|out, _| { let out = out .map_err(|err| io::Error::new(io::ErrorKind::Other, err)) .sink_map_err(|err| io::Error::new(io::ErrorKind::Other, err)); Box::pin(out) as Pin<Box<_>> }), CONNECT_TIMEOUT, ) .boxed())
call_expression
[ { "content": "fn decl_runtime_version_impl_inner(item: ItemConst) -> Result<TokenStream> {\n\n\tlet runtime_version = ParseRuntimeVersion::parse_expr(&*item.expr)?.build(item.expr.span())?;\n\n\tlet link_section =\n\n\t\tgenerate_emit_link_section_decl(&runtime_version.encode(), \"runtime_version\");\n\n\n\n\tOk(quote! {\n\n\t\t#item\n\n\t\t#link_section\n\n\t})\n\n}\n\n\n\n/// This is a duplicate of `sp_version::RuntimeVersion`. We cannot unfortunately use the original\n\n/// declaration, because if we directly depend on `sp_version` from this proc-macro cargo will\n\n/// enable `std` feature even for `no_std` wasm runtime builds.\n\n///\n\n/// One difference from the original definition is the `apis` field. Since we don't actually parse\n\n/// `apis` from this macro it will always be emitteed as empty. An empty vector can be encoded as\n\n/// a zero-byte, thus `u8` is sufficient here.\n", "file_path": "primitives/version/proc-macro/src/decl_runtime_version.rs", "rank": 0, "score": 293240.17012822774 }, { "content": "/// Wraps around a `Future`. Report the polling duration to the `Histogram` and when the polling\n\n/// starts to the `Counter`.\n\npub fn with_poll_durations<T>(\n\n\tpoll_duration: Histogram,\n\n\tpoll_start: Counter<U64>,\n\n\tinner: T\n\n) -> PrometheusFuture<T> {\n\n\tPrometheusFuture {\n\n\t\tinner,\n\n\t\tpoll_duration,\n\n\t\tpoll_start,\n\n\t}\n\n}\n\n\n\n/// Wraps around `Future` and adds diagnostics to it.\n\n#[pin_project::pin_project]\n\n#[derive(Clone)]\n\npub struct PrometheusFuture<T> {\n\n\t/// The inner future doing the actual work.\n\n\t#[pin]\n\n\tinner: T,\n\n\tpoll_duration: Histogram,\n", "file_path": "client/service/src/task_manager/prometheus_future.rs", "rank": 1, "score": 267922.29082690284 }, { "content": "/// Creates a stream that returns a new value every `duration`.\n\nfn interval(duration: Duration) -> impl Stream<Item = ()> + Unpin {\n\n\tfutures::stream::unfold((), move |_| Delay::new(duration).map(|_| Some(((), ())))).map(drop)\n\n}\n\n\n\n/// The format to print telemetry output in.\n\n#[derive(Clone, Debug)]\n\npub struct OutputFormat {\n\n\t/// Enable color output in logs.\n\n\t///\n\n\t/// Is enabled by default.\n\n\tpub enable_color: bool,\n\n}\n\n\n\nimpl Default for OutputFormat {\n\n\tfn default() -> Self {\n\n\t\tSelf {\n\n\t\t\tenable_color: true,\n\n\t\t}\n\n\t}\n\n}\n\n\n\n/// Marker trait for a type that implements `TransactionPool` and `MallocSizeOf` on `not(target_os = \"unknown\")`.\n", "file_path": "client/informant/src/lib.rs", "rank": 2, "score": 259522.50670416854 }, { "content": "/// Creates a stream that returns a new value every `duration`.\n\npub fn interval(duration: Duration) -> impl Stream<Item = ()> + Unpin {\n\n\tunfold((), move |_| Delay::new(duration).map(|_| Some(((), ())))).map(drop)\n\n}\n\n\n\n/// Wrapper around `LinkedHashSet` with bounded growth.\n\n///\n\n/// In the limit, for each element inserted the oldest existing element will be removed.\n\n#[derive(Debug, Clone)]\n\npub struct LruHashSet<T: Hash + Eq> {\n\n\tset: LinkedHashSet<T>,\n\n\tlimit: NonZeroUsize,\n\n}\n\n\n\nimpl<T: Hash + Eq> LruHashSet<T> {\n\n\t/// Create a new `LruHashSet` with the given (exclusive) limit.\n\n\tpub fn new(limit: NonZeroUsize) -> Self {\n\n\t\tSelf {\n\n\t\t\tset: LinkedHashSet::new(),\n\n\t\t\tlimit,\n\n\t\t}\n", "file_path": "client/network/src/utils.rs", "rank": 3, "score": 252968.85953306122 }, { "content": "// See: https://github.com/rust-lang/rust/issues/40062\n\nfn remove_item<T: PartialEq>(vec: &mut Vec<T>, item: &T) {\n\n\tif let Some(idx) = vec.iter().position(|i| i == item) {\n\n\t\tvec.swap_remove(idx);\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\tuse sp_runtime::transaction_validity::TransactionSource as Source;\n\n\n\n\tfn tx(id: u8) -> Transaction<u64, Vec<u8>> {\n\n\t\tTransaction {\n\n\t\t\tdata: vec![id],\n\n\t\t\tbytes: 1,\n\n\t\t\thash: id as u64,\n\n\t\t\tpriority: 1,\n\n\t\t\tvalid_till: 2,\n\n\t\t\trequires: vec![vec![1], vec![2]],\n\n\t\t\tprovides: vec![vec![3], vec![4]],\n", "file_path": "client/transaction-pool/src/graph/ready.rs", "rank": 4, "score": 235665.41237045272 }, { "content": "// Get an iterator of errors from a model. If the model is `None` all errors are zero.\n\nfn extract_errors(model: &Option<RegressionModel>) -> impl Iterator<Item=u128> + '_ {\n\n\tlet mut errors = model.as_ref().map(|m| m.se.regressor_values.iter());\n\n\tstd::iter::from_fn(move || {\n\n\t\tmatch &mut errors {\n\n\t\t\tSome(model) => model.next().map(|val| *val as u128),\n\n\t\t\t_ => Some(0),\n\n\t\t}\n\n\t})\n\n}\n\n\n", "file_path": "utils/frame/benchmarking-cli/src/writer.rs", "rank": 5, "score": 208142.02082295576 }, { "content": "fn generate_cache<Voters, Item>(voters: Voters) -> HashMap<Item, usize>\n\nwhere\n\n\tVoters: Iterator<Item = Item>,\n\n\tItem: Hash + Eq + Copy,\n\n{\n\n\tlet mut cache = HashMap::new();\n\n\tfor (idx, voter_id) in voters.enumerate() {\n\n\t\tcache.insert(voter_id, idx);\n\n\t}\n\n\tcache\n\n}\n\n\n", "file_path": "primitives/npos-elections/src/mock.rs", "rank": 6, "score": 206613.62684196013 }, { "content": "/// Return item version (`#[version(X)]`) attribute, if present.\n\nfn get_item_version(item: &TraitItemMethod) -> Result<Option<u32>> {\n\n\titem.attrs.iter().find(|attr| attr.path.is_ident(\"version\"))\n\n\t\t.map(|attr| parse_version_attribute(attr))\n\n\t\t.transpose()\n\n}\n\n\n", "file_path": "primitives/runtime-interface/proc-macro/src/utils.rs", "rank": 7, "score": 204228.1931027672 }, { "content": "/// Encode and allocate node type header (type and size), and partial value.\n\n/// It uses an iterator over encoded partial bytes as input.\n\nfn partial_from_iterator_encode<I: Iterator<Item = u8>>(\n\n\tpartial: I,\n\n\tnibble_count: usize,\n\n\tnode_kind: NodeKind,\n\n) -> Vec<u8> {\n\n\tlet nibble_count = sp_std::cmp::min(trie_constants::NIBBLE_SIZE_BOUND, nibble_count);\n\n\n\n\tlet mut output = Vec::with_capacity(3 + (nibble_count / nibble_ops::NIBBLE_PER_BYTE));\n\n\tmatch node_kind {\n\n\t\tNodeKind::Leaf => NodeHeader::Leaf(nibble_count).encode_to(&mut output),\n\n\t\tNodeKind::BranchWithValue => NodeHeader::Branch(true, nibble_count).encode_to(&mut output),\n\n\t\tNodeKind::BranchNoValue => NodeHeader::Branch(false, nibble_count).encode_to(&mut output),\n\n\t};\n\n\toutput.extend(partial);\n\n\toutput\n\n}\n\n\n", "file_path": "primitives/trie/src/node_codec.rs", "rank": 8, "score": 195822.10504991523 }, { "content": "#[proc_macro]\n\npub fn generate_solution_type(item: TokenStream) -> TokenStream {\n\n\tlet SolutionDef {\n\n\t\tvis,\n\n\t\tident,\n\n\t\tcount,\n\n\t\tvoter_type,\n\n\t\ttarget_type,\n\n\t\tweight_type,\n\n\t\tcompact_encoding,\n\n\t} = syn::parse_macro_input!(item as SolutionDef);\n\n\n\n\tlet imports = imports().unwrap_or_else(|e| e.to_compile_error());\n\n\n\n\tlet solution_struct = struct_def(\n\n\t\tvis,\n\n\t\tident.clone(),\n\n\t\tcount,\n\n\t\tvoter_type.clone(),\n\n\t\ttarget_type.clone(),\n\n\t\tweight_type.clone(),\n\n\t\tcompact_encoding,\n\n\t).unwrap_or_else(|e| e.to_compile_error());\n\n\n\n\tquote!(\n\n\t\t#imports\n\n\t\t#solution_struct\n\n\t)\n\n\t.into()\n\n}\n\n\n", "file_path": "primitives/npos-elections/compact/src/lib.rs", "rank": 9, "score": 187436.4699226708 }, { "content": "/// Returns an iterator over all trait methods for the given trait definition.\n\nfn get_trait_methods<'a>(trait_def: &'a ItemTrait) -> impl Iterator<Item = &'a TraitItemMethod> {\n\n\ttrait_def\n\n\t\t.items\n\n\t\t.iter()\n\n\t\t.filter_map(|i| match i {\n\n\t\t\tTraitItem::Method(ref method) => Some(method),\n\n\t\t\t_ => None,\n\n\t\t})\n\n}\n\n\n", "file_path": "primitives/runtime-interface/proc-macro/src/utils.rs", "rank": 10, "score": 186557.01073868768 }, { "content": "fn impl_test(args: TokenStream, item: TokenStream) -> TokenStream {\n\n\tlet input = syn::parse_macro_input!(item as syn::ItemFn);\n\n\tlet args = syn::parse_macro_input!(args as syn::AttributeArgs);\n\n\n\n\tparse_knobs(input, args).unwrap_or_else(|e| e.to_compile_error().into())\n\n}\n\n\n", "file_path": "test-utils/derive/src/lib.rs", "rank": 11, "score": 183302.66681266058 }, { "content": "/// Builds the transport that serves as a common ground for all connections.\n\n///\n\n/// If `memory_only` is true, then only communication within the same process are allowed. Only\n\n/// addresses with the format `/memory/...` are allowed.\n\n///\n\n/// `yamux_window_size` is the maximum size of the Yamux receive windows. `None` to leave the\n\n/// default (256kiB).\n\n///\n\n/// `yamux_maximum_buffer_size` is the maximum allowed size of the Yamux buffer. This should be\n\n/// set either to the maximum of all the maximum allowed sizes of messages frames of all\n\n/// high-level protocols combined, or to some generously high value if you are sure that a maximum\n\n/// size is enforced on all high-level protocols.\n\n///\n\n/// Returns a `BandwidthSinks` object that allows querying the average bandwidth produced by all\n\n/// the connections spawned with this transport.\n\npub fn build_transport(\n\n\tkeypair: identity::Keypair,\n\n\tmemory_only: bool,\n\n\twasm_external_transport: Option<wasm_ext::ExtTransport>,\n\n\tyamux_window_size: Option<u32>,\n\n\tyamux_maximum_buffer_size: usize,\n\n) -> (Boxed<(PeerId, StreamMuxerBox)>, Arc<BandwidthSinks>) {\n\n\t// Build the base layer of the transport.\n\n\tlet transport = if let Some(t) = wasm_external_transport {\n\n\t\tOptionalTransport::some(t)\n\n\t} else {\n\n\t\tOptionalTransport::none()\n\n\t};\n\n\t#[cfg(not(target_os = \"unknown\"))]\n\n\tlet transport = transport.or_transport(if !memory_only {\n\n\t\tlet desktop_trans = tcp::TcpConfig::new().nodelay(true);\n\n\t\tlet desktop_trans = websocket::WsConfig::new(desktop_trans.clone())\n\n\t\t\t.or_transport(desktop_trans);\n\n\t\tlet dns_init = futures::executor::block_on(dns::DnsConfig::system(desktop_trans.clone()));\n\n\t\tOptionalTransport::some(if let Ok(dns) = dns_init {\n", "file_path": "client/network/src/transport.rs", "rank": 12, "score": 182922.04531452223 }, { "content": "/// Compute the error due to integer division in the expression `x / denom * numer`.\n\n///\n\n/// Take the remainder of `x / denom` and multiply by `numer / denom`. The result can be added\n\n/// to `x / denom * numer` for an accurate result.\n\nfn rational_mul_correction<N, P>(x: N, numer: P::Inner, denom: P::Inner, rounding: Rounding) -> N\n\nwhere\n\n\tN: UniqueSaturatedInto<P::Inner> + ops::Div<N, Output=N> + ops::Mul<N,\n\n\tOutput=N> + ops::Add<N, Output=N> + ops::Rem<N, Output=N> + Unsigned,\n\n\tP: PerThing,\n\n\tP::Inner: Into<N>\n\n{\n\n\tlet numer_upper = P::Upper::from(numer);\n\n\tlet denom_n: N = denom.into();\n\n\tlet denom_upper = P::Upper::from(denom);\n\n\tlet rem = x.rem(denom_n);\n\n\t// `rem` is less than `denom`, which fits in `P::Inner`.\n\n\tlet rem_inner = rem.saturated_into::<P::Inner>();\n\n\t// `P::Upper` always fits `P::Inner::max_value().pow(2)`, thus it fits `rem * numer`.\n\n\tlet rem_mul_upper = P::Upper::from(rem_inner) * numer_upper;\n\n\t// `rem` is less than `denom`, so `rem * numer / denom` is less than `numer`, which fits in\n\n\t// `P::Inner`.\n\n\tlet mut rem_mul_div_inner = (rem_mul_upper / denom_upper).saturated_into::<P::Inner>();\n\n\tmatch rounding {\n\n\t\t// Already rounded down\n", "file_path": "primitives/arithmetic/src/per_things.rs", "rank": 13, "score": 182846.53454784086 }, { "content": "fn hash_authority_id(id: &[u8]) -> libp2p::kad::record::Key {\n\n\tlibp2p::kad::record::Key::new(&libp2p::multihash::Sha2_256::digest(id))\n\n}\n\n\n\n/// Prometheus metrics for a [`Worker`].\n\n#[derive(Clone)]\n\npub(crate) struct Metrics {\n\n\tpublish: Counter<U64>,\n\n\tamount_addresses_last_published: Gauge<U64>,\n\n\trequests: Counter<U64>,\n\n\trequests_pending: Gauge<U64>,\n\n\tdht_event_received: CounterVec<U64>,\n\n\thandle_value_found_event_failure: Counter<U64>,\n\n\tknown_authorities_count: Gauge<U64>,\n\n}\n\n\n\nimpl Metrics {\n\n\tpub(crate) fn register(registry: &prometheus_endpoint::Registry) -> Result<Self> {\n\n\t\tOk(Self {\n\n\t\t\tpublish: register(\n", "file_path": "client/authority-discovery/src/worker.rs", "rank": 14, "score": 181599.05850095314 }, { "content": "fn decl_runtime_apis_impl_inner(api_decls: &[ItemTrait]) -> Result<TokenStream> {\n\n\tcheck_trait_decls(&api_decls)?;\n\n\n\n\tlet hidden_includes = generate_hidden_includes(HIDDEN_INCLUDES_ID);\n\n\tlet runtime_decls = generate_runtime_decls(api_decls)?;\n\n\tlet client_side_decls = generate_client_side_decls(api_decls)?;\n\n\n\n\tOk(\n\n\t\tquote!(\n\n\t\t\t#hidden_includes\n\n\n\n\t\t\t#runtime_decls\n\n\n\n\t\t\t#client_side_decls\n\n\t\t)\n\n\t)\n\n}\n", "file_path": "primitives/api/proc-macro/src/decl_runtime_apis.rs", "rank": 15, "score": 181346.08479829735 }, { "content": "fn impl_runtime_apis_impl_inner(api_impls: &[ItemImpl]) -> Result<TokenStream> {\n\n\tlet dispatch_impl = generate_dispatch_function(api_impls)?;\n\n\tlet api_impls_for_runtime = generate_api_impl_for_runtime(api_impls)?;\n\n\tlet base_runtime_api = generate_runtime_api_base_structures()?;\n\n\tlet hidden_includes = generate_hidden_includes(HIDDEN_INCLUDES_ID);\n\n\tlet runtime_api_versions = generate_runtime_api_versions(api_impls)?;\n\n\tlet wasm_interface = generate_wasm_interface(api_impls)?;\n\n\tlet api_impls_for_runtime_api = generate_api_impl_for_runtime_api(api_impls)?;\n\n\n\n\tOk(\n\n\t\tquote!(\n\n\t\t\t#hidden_includes\n\n\n\n\t\t\t#base_runtime_api\n\n\n\n\t\t\t#api_impls_for_runtime\n\n\n\n\t\t\t#api_impls_for_runtime_api\n\n\n\n\t\t\t#runtime_api_versions\n", "file_path": "primitives/api/proc-macro/src/impl_runtime_apis.rs", "rank": 16, "score": 181346.08479829735 }, { "content": "/// Parses a WebSocket URL into a libp2p `Multiaddr`.\n\nfn url_to_multiaddr(url: &str) -> Result<Multiaddr, libp2p::multiaddr::Error> {\n\n\t// First, assume that we have a `Multiaddr`.\n\n\tlet parse_error = match url.parse() {\n\n\t\tOk(ma) => return Ok(ma),\n\n\t\tErr(err) => err,\n\n\t};\n\n\n\n\t// If not, try the `ws://path/url` format.\n\n\tif let Ok(ma) = libp2p::multiaddr::from_url(url) {\n\n\t\treturn Ok(ma);\n\n\t}\n\n\n\n\t// If we have no clue about the format of that string, assume that we were expecting a\n\n\t// `Multiaddr`.\n\n\tErr(parse_error)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::url_to_multiaddr;\n", "file_path": "client/telemetry/src/endpoints.rs", "rank": 17, "score": 180241.99586861115 }, { "content": "#[proc_macro_attribute]\n\npub fn test(args: TokenStream, item: TokenStream) -> TokenStream {\n\n\timpl_test(args, item)\n\n}\n\n\n", "file_path": "test-utils/derive/src/lib.rs", "rank": 18, "score": 179554.22523176996 }, { "content": "#[proc_macro_attribute]\n\npub fn pallet(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n\tpallet::pallet(attr, item)\n\n}\n\n\n\n/// Execute the annotated function in a new storage transaction.\n\n///\n\n/// The return type of the annotated function must be `Result`. All changes to storage performed\n\n/// by the annotated function are discarded if it returns `Err`, or committed if `Ok`.\n\n///\n\n/// # Example\n\n///\n\n/// ```nocompile\n\n/// #[transactional]\n\n/// fn value_commits(v: u32) -> result::Result<u32, &'static str> {\n\n/// \tValue::set(v);\n\n/// \tOk(v)\n\n/// }\n\n///\n\n/// #[transactional]\n\n/// fn value_rollbacks(v: u32) -> result::Result<u32, &'static str> {\n\n/// \tValue::set(v);\n\n/// \tErr(\"nah\")\n\n/// }\n\n/// ```\n", "file_path": "frame/support/procedural/src/lib.rs", "rank": 19, "score": 179554.22523176996 }, { "content": "/// Take all the pallet attributes (e.g. attribute like `#[pallet..]`) and decode them to `Attr`\n\npub fn take_item_pallet_attrs<Attr>(item: &mut impl MutItemAttrs) -> syn::Result<Vec<Attr>> where\n\n\tAttr: syn::parse::Parse,\n\n{\n\n\tlet mut pallet_attrs = Vec::new();\n\n\n\n\twhile let Some(attr) = take_first_item_pallet_attr(item)? {\n\n\t\tpallet_attrs.push(attr)\n\n\t}\n\n\n\n\tOk(pallet_attrs)\n\n}\n\n\n", "file_path": "frame/support/procedural/src/pallet/parse/helper.rs", "rank": 20, "score": 177312.69782628177 }, { "content": "fn mock_impl_runtime_apis_impl_inner(api_impls: &[ItemImpl]) -> Result<TokenStream> {\n\n\tlet hidden_includes = generate_hidden_includes(HIDDEN_INCLUDES_ID);\n\n\tlet GeneratedRuntimeApiImpls { impls, block_type, self_ty } =\n\n\t\tgenerate_runtime_api_impls(api_impls)?;\n\n\tlet api_traits = implement_common_api_traits(block_type, self_ty)?;\n\n\n\n\tOk(quote!(\n\n\t\t#hidden_includes\n\n\n\n\t\t#impls\n\n\n\n\t\t#api_traits\n\n\t))\n\n}\n", "file_path": "primitives/api/proc-macro/src/mock_impl_runtime_apis.rs", "rank": 21, "score": 176642.33771172477 }, { "content": "/// Get a particular value in storage by the `module`, the map's `item` name and the key `hash`.\n\npub fn remove_storage_prefix(module: &[u8], item: &[u8], hash: &[u8]) {\n\n\tlet mut key = vec![0u8; 32 + hash.len()];\n\n\tkey[0..16].copy_from_slice(&Twox128::hash(module));\n\n\tkey[16..32].copy_from_slice(&Twox128::hash(item));\n\n\tkey[32..].copy_from_slice(hash);\n\n\tframe_support::storage::unhashed::kill_prefix(&key, None);\n\n}\n\n\n", "file_path": "frame/support/src/storage/migration.rs", "rank": 22, "score": 176146.71656016837 }, { "content": "/// Take the first pallet attribute (e.g. attribute like `#[pallet..]`) and decode it to `Attr`\n\npub fn take_first_item_pallet_attr<Attr>(item: &mut impl MutItemAttrs) -> syn::Result<Option<Attr>> where\n\n\tAttr: syn::parse::Parse,\n\n{\n\n\tlet attrs = if let Some(attrs) = item.mut_item_attrs() {\n\n\t\tattrs\n\n\t} else {\n\n\t\treturn Ok(None)\n\n\t};\n\n\n\n\tif let Some(index) = attrs.iter()\n\n\t\t.position(|attr|\n\n\t\t\tattr.path.segments.first().map_or(false, |segment| segment.ident == \"pallet\")\n\n\t\t)\n\n\t{\n\n\t\tlet pallet_attr = attrs.remove(index);\n\n\t\tOk(Some(syn::parse2(pallet_attr.into_token_stream())?))\n\n\t} else {\n\n\t\tOk(None)\n\n\t}\n\n}\n\n\n", "file_path": "frame/support/procedural/src/pallet/parse/helper.rs", "rank": 23, "score": 175996.75741248697 }, { "content": "#[proc_macro_attribute]\n\npub fn prefix_logs_with(arg: TokenStream, item: TokenStream) -> TokenStream {\n\n\tlet item_fn = syn::parse_macro_input!(item as ItemFn);\n\n\n\n\tif arg.is_empty() {\n\n\t\treturn Error::new(\n\n\t\t\tSpan::call_site(),\n\n\t\t\t\"missing argument: name of the node. Example: sc_cli::prefix_logs_with(<expr>)\",\n\n\t\t)\n\n\t\t.to_compile_error()\n\n\t\t.into();\n\n\t}\n\n\n\n\tlet name = syn::parse_macro_input!(arg as Expr);\n\n\n\n\tlet crate_name = match crate_name(\"sc-tracing\") {\n\n\t\tOk(FoundCrate::Itself) => Ident::from(Ident::new(\"sc_tracing\", Span::call_site())),\n\n\t\tOk(FoundCrate::Name(crate_name)) => Ident::new(&crate_name, Span::call_site()),\n\n\t\tErr(e) => return Error::new(Span::call_site(), e).to_compile_error().into(),\n\n\t};\n\n\n", "file_path": "client/tracing/proc-macro/src/lib.rs", "rank": 24, "score": 175820.8305530896 }, { "content": "/// Returns current duration since unix epoch.\n\npub fn duration_now() -> Duration {\n\n\tuse std::time::SystemTime;\n\n\tlet now = SystemTime::now();\n\n\tnow.duration_since(SystemTime::UNIX_EPOCH).unwrap_or_else(|e| panic!(\n\n\t\t\"Current time {:?} is before unix epoch. Something is wrong: {:?}\",\n\n\t\tnow,\n\n\t\te,\n\n\t))\n\n}\n\n\n", "file_path": "client/consensus/slots/src/slots.rs", "rank": 25, "score": 175646.60364065596 }, { "content": "fn branch_node_bit_mask(has_children: impl Iterator<Item = bool>) -> (u8, u8) {\n\n\tlet mut bitmap: u16 = 0;\n\n\tlet mut cursor: u16 = 1;\n\n\tfor v in has_children {\n\n\t\tif v { bitmap |= cursor }\n\n\t\tcursor <<= 1;\n\n\t}\n\n\t((bitmap % 256 ) as u8, (bitmap / 256 ) as u8)\n\n}\n\n\n\n\n", "file_path": "primitives/trie/src/trie_stream.rs", "rank": 26, "score": 174238.46638866252 }, { "content": "/// Get a particular value in storage by the `module`, the map's `item` name and the key `hash`.\n\npub fn have_storage_value(module: &[u8], item: &[u8], hash: &[u8]) -> bool {\n\n\tget_storage_value::<()>(module, item, hash).is_some()\n\n}\n\n\n", "file_path": "frame/support/src/storage/migration.rs", "rank": 27, "score": 173023.31949024633 }, { "content": "/// Returns the duration until the next slot from now.\n\npub fn time_until_next_slot(slot_duration: Duration) -> Duration {\n\n\tlet now = duration_now().as_millis();\n\n\n\n\tlet next_slot = (now + slot_duration.as_millis()) / slot_duration.as_millis();\n\n\tlet remaining_millis = next_slot * slot_duration.as_millis() - now;\n\n\tDuration::from_millis(remaining_millis as u64)\n\n}\n\n\n\n/// Information about a slot.\n\npub struct SlotInfo<B: BlockT> {\n\n\t/// The slot number as found in the inherent data.\n\n\tpub slot: Slot,\n\n\t/// Current timestamp as found in the inherent data.\n\n\tpub timestamp: sp_timestamp::Timestamp,\n\n\t/// The instant at which the slot ends.\n\n\tpub ends_at: Instant,\n\n\t/// The inherent data.\n\n\tpub inherent_data: InherentData,\n\n\t/// Slot duration.\n\n\tpub duration: Duration,\n", "file_path": "client/consensus/slots/src/slots.rs", "rank": 28, "score": 170463.98805463332 }, { "content": "fn branch_node(has_value: bool, has_children: impl Iterator<Item = bool>) -> [u8; 3] {\n\n\tlet mut result = [0, 0, 0];\n\n\tbranch_node_buffered(has_value, has_children, &mut result[..]);\n\n\tresult\n\n}\n\n\n", "file_path": "primitives/trie/src/trie_stream.rs", "rank": 29, "score": 170148.3137834688 }, { "content": "/// Start an import queue for the BABE consensus algorithm.\n\n///\n\n/// This method returns the import queue, some data that needs to be passed to the block authoring\n\n/// logic (`BabeLink`), and a future that must be run to\n\n/// completion and is responsible for listening to finality notifications and\n\n/// pruning the epoch changes tree.\n\n///\n\n/// The block import object provided must be the `BabeBlockImport` or a wrapper\n\n/// of it, otherwise crucial import logic will be omitted.\n\npub fn import_queue<Block: BlockT, Client, SelectChain, Inner, CAW, CIDP>(\n\n\tbabe_link: BabeLink<Block>,\n\n\tblock_import: Inner,\n\n\tjustification_import: Option<BoxJustificationImport<Block>>,\n\n\tclient: Arc<Client>,\n\n\tselect_chain: SelectChain,\n\n\tcreate_inherent_data_providers: CIDP,\n\n\tspawner: &impl sp_core::traits::SpawnEssentialNamed,\n\n\tregistry: Option<&Registry>,\n\n\tcan_author_with: CAW,\n\n\ttelemetry: Option<TelemetryHandle>,\n\n) -> ClientResult<DefaultImportQueue<Block, Client>> where\n\n\tInner: BlockImport<Block, Error = ConsensusError, Transaction = sp_api::TransactionFor<Client, Block>>\n\n\t\t+ Send + Sync + 'static,\n\n\tClient: ProvideRuntimeApi<Block> + ProvideCache<Block> + HeaderBackend<Block>\n\n\t\t+ HeaderMetadata<Block, Error = sp_blockchain::Error> + AuxStore\n\n\t\t+ Send + Sync + 'static,\n\n\tClient::Api: BlockBuilderApi<Block> + BabeApi<Block> + ApiExt<Block>,\n\n\tSelectChain: sp_consensus::SelectChain<Block> + 'static,\n\n\tCAW: CanAuthorWith<Block> + Send + Sync + 'static,\n", "file_path": "client/consensus/babe/src/lib.rs", "rank": 30, "score": 169342.33688489252 }, { "content": "#[test]\n\nfn lazy_removal_does_not_use_all_weight() {\n\n\tlet (code, hash) = compile_module::<Test>(\"self_destruct\").unwrap();\n\n\tExtBuilder::default().existential_deposit(50).build().execute_with(|| {\n\n\t\tlet subsistence = Pallet::<Test>::subsistence_threshold();\n\n\t\tlet _ = Balances::deposit_creating(&ALICE, 1000 * subsistence);\n\n\n\n\t\tassert_ok!(\n\n\t\t\tContracts::instantiate_with_code(\n\n\t\t\t\tOrigin::signed(ALICE),\n\n\t\t\t\tsubsistence * 100,\n\n\t\t\t\tGAS_LIMIT,\n\n\t\t\t\tcode,\n\n\t\t\t\tvec![],\n\n\t\t\t\tvec![],\n\n\t\t\t),\n\n\t\t);\n\n\n\n\t\tlet addr = Contracts::contract_address(&ALICE, &hash, &[]);\n\n\t\tlet mut info = <ContractInfoOf::<Test>>::get(&addr).unwrap().get_alive().unwrap();\n\n\t\tlet weight_limit = 5_000_000_000;\n", "file_path": "frame/contracts/src/tests.rs", "rank": 31, "score": 168838.30513152576 }, { "content": "#[test]\n\nfn tip_new_cannot_be_used_twice() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tBalances::make_free_balance_be(&Treasury::account_id(), 101);\n\n\t\tassert_ok!(TipsModTestInst::tip_new(Origin::signed(10), b\"awesome.dot\".to_vec(), 3, 10));\n\n\t\tassert_noop!(\n\n\t\t\tTipsModTestInst::tip_new(Origin::signed(11), b\"awesome.dot\".to_vec(), 3, 10),\n\n\t\t\tError::<Test>::AlreadyKnown\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "frame/tips/src/tests.rs", "rank": 32, "score": 168838.30513152576 }, { "content": "#[test]\n\nfn babe_transcript_generation_match() {\n\n\tsp_tracing::try_init_simple();\n\n\tlet keystore_path = tempfile::tempdir().expect(\"Creates keystore path\");\n\n\tlet keystore: SyncCryptoStorePtr = Arc::new(LocalKeystore::open(keystore_path.path(), None)\n\n\t\t.expect(\"Creates keystore\"));\n\n\tlet public = SyncCryptoStore::sr25519_generate_new(&*keystore, BABE, Some(\"//Alice\"))\n\n\t\t.expect(\"Generates authority pair\");\n\n\n\n\tlet epoch = Epoch {\n\n\t\tstart_slot: 0.into(),\n\n\t\tauthorities: vec![(public.into(), 1)],\n\n\t\trandomness: [0; 32],\n\n\t\tepoch_index: 1,\n\n\t\tduration: 100,\n\n\t\tconfig: BabeEpochConfiguration {\n\n\t\t\tc: (3, 10),\n\n\t\t\tallowed_slots: AllowedSlots::PrimaryAndSecondaryPlainSlots,\n\n\t\t},\n\n\t};\n\n\n", "file_path": "client/consensus/babe/src/tests.rs", "rank": 33, "score": 166326.71783613967 }, { "content": "fn main() {}\n", "file_path": "primitives/api/test/tests/ui/changed_in_unknown_version.rs", "rank": 34, "score": 166325.77130637143 }, { "content": "#[test]\n\nfn deposit_event_uses_actual_weight() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tSystem::initialize(\n\n\t\t\t&1,\n\n\t\t\t&[0u8; 32].into(),\n\n\t\t\t&Default::default(),\n\n\t\t\tInitKind::Full,\n\n\t\t);\n\n\t\tSystem::note_finished_initialize();\n\n\n\n\t\tlet pre_info = DispatchInfo {\n\n\t\t\tweight: 1000,\n\n\t\t\t.. Default::default()\n\n\t\t};\n\n\t\tSystem::note_applied_extrinsic(\n\n\t\t\t&Ok(Some(300).into()),\n\n\t\t\tpre_info,\n\n\t\t);\n\n\t\tSystem::note_applied_extrinsic(\n\n\t\t\t&Ok(Some(1000).into()),\n", "file_path": "frame/system/src/tests.rs", "rank": 35, "score": 166322.51572662915 }, { "content": "#[test]\n\nfn vested_balance_should_transfer_using_vest_other() {\n\n\tExtBuilder::default()\n\n\t\t.existential_deposit(10)\n\n\t\t.build()\n\n\t\t.execute_with(|| {\n\n\t\t\tlet user1_free_balance = Balances::free_balance(&1);\n\n\t\t\tassert_eq!(user1_free_balance, 100); // Account 1 has free balance\n\n\t\t\t// Account 1 has only 5 units vested at block 1 (plus 50 unvested)\n\n\t\t\tassert_eq!(Vesting::vesting_balance(&1), Some(45));\n\n\t\t\tassert_ok!(Vesting::vest_other(Some(2).into(), 1));\n\n\t\t\tassert_ok!(Balances::transfer(Some(1).into(), 2, 55));\n\n\t\t});\n\n}\n\n\n", "file_path": "frame/vesting/src/tests.rs", "rank": 36, "score": 166322.51572662915 }, { "content": "#[test]\n\nfn election_second_tally_should_use_runners_up() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tSystem::set_block_number(4);\n\n\t\tassert_ok!(Elections::submit_candidacy(Origin::signed(1), 0));\n\n\t\tassert_ok!(Elections::set_approvals(Origin::signed(6), vec![true], 0, 0, 60));\n\n\t\tassert_ok!(Elections::submit_candidacy(Origin::signed(2), 1));\n\n\t\tassert_ok!(Elections::set_approvals(Origin::signed(2), vec![false, true], 0, 0, 20));\n\n\t\tassert_ok!(Elections::submit_candidacy(Origin::signed(3), 2));\n\n\t\tassert_ok!(Elections::set_approvals(Origin::signed(3), vec![false, false, true], 0, 0, 30));\n\n\t\tassert_ok!(Elections::submit_candidacy(Origin::signed(4), 3));\n\n\t\tassert_ok!(Elections::set_approvals(Origin::signed(4), vec![false, false, false, true], 0, 0, 40));\n\n\t\tassert_ok!(Elections::submit_candidacy(Origin::signed(5), 4));\n\n\t\tassert_ok!(Elections::set_approvals(Origin::signed(5), vec![false, false, false, false, true], 0, 0, 50));\n\n\t\tassert_ok!(Elections::end_block(System::block_number()));\n\n\n\n\t\tSystem::set_block_number(6);\n\n\t\tassert_ok!(Elections::present_winner(Origin::signed(4), 1, 60, 0));\n\n\t\tassert_ok!(Elections::present_winner(Origin::signed(4), 3, 30, 0));\n\n\t\tassert_ok!(Elections::present_winner(Origin::signed(4), 4, 40, 0));\n\n\t\tassert_ok!(Elections::present_winner(Origin::signed(4), 5, 50, 0));\n", "file_path": "frame/elections/src/tests.rs", "rank": 37, "score": 166322.51572662915 }, { "content": "#[test]\n\nfn use_trie_function() {\n\n\tlet client = TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::AlwaysWasm).build();\n\n\tlet runtime_api = client.runtime_api();\n\n\tlet block_id = BlockId::Number(client.chain_info().best_number);\n\n\tassert_eq!(runtime_api.use_trie(&block_id).unwrap(), 2);\n\n}\n\n\n", "file_path": "primitives/api/test/tests/runtime_calls.rs", "rank": 38, "score": 166322.51572662915 }, { "content": "#[test]\n\nfn system_digest_item_encoding() {\n\n\tlet item = DigestItem::ChangesTrieRoot::<H256>(H256::default());\n\n\tlet encoded = item.encode();\n\n\tassert_eq!(encoded, vec![\n\n\t\t// type = DigestItemType::ChangesTrieRoot\n\n\t\t2,\n\n\t\t// trie root\n\n\t\t0, 0, 0, 0,\n\n\t\t0, 0, 0, 0,\n\n\t\t0, 0, 0, 0,\n\n\t\t0, 0, 0, 0,\n\n\t\t0, 0, 0, 0,\n\n\t\t0, 0, 0, 0,\n\n\t\t0, 0, 0, 0,\n\n\t\t0, 0, 0, 0,\n\n\t]);\n\n\n\n\tlet decoded: DigestItem<H256> = Decode::decode(&mut &encoded[..]).unwrap();\n\n\tassert_eq!(item, decoded);\n\n}\n\n\n", "file_path": "primitives/runtime/src/generic/tests.rs", "rank": 39, "score": 166291.8585240126 }, { "content": "#[test]\n\nfn retract_non_existent_item_fails() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\trun_to_block(1);\n\n\t\tassert_noop!(Gilt::retract_bid(Origin::signed(1), 10, 1), Error::<Test>::NotFound);\n\n\t\tassert_ok!(Gilt::place_bid(Origin::signed(1), 10, 1));\n\n\t\tassert_noop!(Gilt::retract_bid(Origin::signed(1), 20, 1), Error::<Test>::NotFound);\n\n\t\tassert_noop!(Gilt::retract_bid(Origin::signed(1), 10, 2), Error::<Test>::NotFound);\n\n\t\tassert_noop!(Gilt::retract_bid(Origin::signed(2), 10, 1), Error::<Test>::NotFound);\n\n\t});\n\n}\n\n\n", "file_path": "frame/gilt/src/tests.rs", "rank": 40, "score": 166291.8585240126 }, { "content": "#[test]\n\nfn retract_single_item_queue_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\trun_to_block(1);\n\n\t\tassert_ok!(Gilt::place_bid(Origin::signed(1), 10, 1));\n\n\t\tassert_ok!(Gilt::place_bid(Origin::signed(1), 10, 2));\n\n\t\tassert_ok!(Gilt::retract_bid(Origin::signed(1), 10, 1));\n\n\n\n\t\tassert_eq!(Balances::reserved_balance(1), 10);\n\n\t\tassert_eq!(Queues::<Test>::get(1), vec![]);\n\n\t\tassert_eq!(Queues::<Test>::get(2), vec![ GiltBid { amount: 10, who: 1 } ]);\n\n\t\tassert_eq!(QueueTotals::<Test>::get(), vec![(0, 0), (1, 10), (0, 0)]);\n\n\t});\n\n}\n\n\n", "file_path": "frame/gilt/src/tests.rs", "rank": 41, "score": 166291.8585240126 }, { "content": "/// Converts the deadline into a `Future` that resolves when the deadline is reached.\n\n///\n\n/// If `None`, returns a never-ending `Future`.\n\npub fn deadline_to_future(\n\n\tdeadline: Option<Timestamp>,\n\n) -> futures::future::MaybeDone<impl futures::Future<Output = ()>> {\n\n\tuse futures::future::{self, Either};\n\n\n\n\tfuture::maybe_done(match deadline.map(timestamp_from_now) {\n\n\t\tNone => Either::Left(future::pending()),\n\n\t\t// Only apply delay if we need to wait a non-zero duration\n\n\t\tSome(duration) if duration <= Duration::from_secs(0) =>\n\n\t\t\tEither::Right(Either::Left(future::ready(()))),\n\n\t\tSome(duration) =>\n\n\t\t\tEither::Right(Either::Right(futures_timer::Delay::new(duration))),\n\n\t})\n\n}\n", "file_path": "client/offchain/src/api/timestamp.rs", "rank": 42, "score": 165789.41361276215 }, { "content": "fn ensure_addresses_consistent_with_transport<'a>(\n\n\taddresses: impl Iterator<Item = &'a Multiaddr>,\n\n\ttransport: &TransportConfig,\n\n) -> Result<(), Error> {\n\n\tif matches!(transport, TransportConfig::MemoryOnly) {\n\n\t\tlet addresses: Vec<_> = addresses\n\n\t\t\t.filter(|x| x.iter()\n\n\t\t\t\t.any(|y| !matches!(y, libp2p::core::multiaddr::Protocol::Memory(_)))\n\n\t\t\t)\n\n\t\t\t.cloned()\n\n\t\t\t.collect();\n\n\n\n\t\tif !addresses.is_empty() {\n\n\t\t\treturn Err(Error::AddressesForAnotherTransport {\n\n\t\t\t\ttransport: transport.clone(),\n\n\t\t\t\taddresses,\n\n\t\t\t});\n\n\t\t}\n\n\t} else {\n\n\t\tlet addresses: Vec<_> = addresses\n", "file_path": "client/network/src/service.rs", "rank": 43, "score": 165777.89990193103 }, { "content": "fn main() {\n\n}\n", "file_path": "frame/support/test/tests/pallet_ui/call_invalid_const.rs", "rank": 44, "score": 163914.1947752476 }, { "content": "#[test]\n\nfn candidacy_submission_not_using_free_slot_should_not_work() {\n\n\tlet mut t = new_test_ext_with_candidate_holes();\n\n\n\n\tt.execute_with(|| {\n\n\t\tassert_noop!(\n\n\t\t\tElections::submit_candidacy(Origin::signed(4), 3),\n\n\t\t\tError::<Test>::InvalidCandidateSlot\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "frame/elections/src/tests.rs", "rank": 45, "score": 163911.30034545274 }, { "content": "#[test]\n\nfn candidacy_submission_using_free_slot_should_work() {\n\n\tlet mut t = new_test_ext_with_candidate_holes();\n\n\n\n\tt.execute_with(|| {\n\n\t\tassert_eq!(Elections::candidates(), vec![0, 0, 1]);\n\n\n\n\t\tassert_ok!(Elections::submit_candidacy(Origin::signed(2), 1));\n\n\t\tassert_eq!(Elections::candidates(), vec![0, 2, 1]);\n\n\n\n\t\tassert_ok!(Elections::submit_candidacy(Origin::signed(3), 0));\n\n\t\tassert_eq!(Elections::candidates(), vec![3, 2, 1]);\n\n\t});\n\n}\n\n\n", "file_path": "frame/elections/src/tests.rs", "rank": 46, "score": 163911.30034545274 }, { "content": "fn main() {}\n", "file_path": "primitives/runtime-interface/tests/ui/pass_by_inner_with_two_fields.rs", "rank": 47, "score": 163905.82391508817 }, { "content": "#[test]\n\nfn ready_set_should_not_resolve_before_block_update() {\n\n\tlet (pool, _guard, _notifier) = maintained_pool();\n\n\tlet xt1 = uxt(Alice, 209);\n\n\tblock_on(pool.submit_one(&BlockId::number(0), SOURCE, xt1.clone())).expect(\"1. Imported\");\n\n\n\n\tassert!(pool.ready_at(1).now_or_never().is_none());\n\n}\n\n\n", "file_path": "client/transaction-pool/tests/pool.rs", "rank": 48, "score": 163902.36459505412 }, { "content": "#[test]\n\nfn ready_set_should_resolve_after_block_update() {\n\n\tlet (pool, _guard, _notifier) = maintained_pool();\n\n\tlet header = pool.api().push_block(1, vec![], true);\n\n\n\n\tlet xt1 = uxt(Alice, 209);\n\n\n\n\tblock_on(pool.submit_one(&BlockId::number(1), SOURCE, xt1.clone())).expect(\"1. Imported\");\n\n\tblock_on(pool.maintain(block_event(header)));\n\n\n\n\tassert!(pool.ready_at(1).now_or_never().is_some());\n\n}\n\n\n", "file_path": "client/transaction-pool/tests/pool.rs", "rank": 49, "score": 163902.36459505412 }, { "content": "fn main() {\n\n}\n", "file_path": "frame/support/test/tests/pallet_ui/storage_incomplete_item.rs", "rank": 50, "score": 163881.28030805907 }, { "content": "fn main() {\n\n}\n", "file_path": "frame/support/test/tests/pallet_ui/storage_wrong_item.rs", "rank": 51, "score": 163881.28030805907 }, { "content": "fn main() {\n\n}\n", "file_path": "frame/support/test/tests/pallet_ui/trait_invalid_item.rs", "rank": 52, "score": 163881.28030805907 }, { "content": "fn main() {\n\n}\n", "file_path": "frame/support/test/tests/pallet_ui/event_wrong_item.rs", "rank": 53, "score": 163881.28030805907 }, { "content": "fn main() {\n\n}\n", "file_path": "frame/support/test/tests/pallet_ui/error_wrong_item.rs", "rank": 54, "score": 163881.28030805907 }, { "content": "#[test]\n\nfn non_system_digest_item_encoding() {\n\n\tlet item = DigestItem::Other::<H256>(vec![10, 20, 30]);\n\n\tlet encoded = item.encode();\n\n\tassert_eq!(encoded, vec![\n\n\t\t// type = DigestItemType::Other\n\n\t\t0,\n\n\t\t// length of other data\n\n\t\t12,\n\n\t\t// authorities\n\n\t\t10, 20, 30,\n\n\t]);\n\n\n\n\tlet decoded: DigestItem<H256> = Decode::decode(&mut &encoded[..]).unwrap();\n\n\tassert_eq!(item, decoded);\n\n}\n", "file_path": "primitives/runtime/src/generic/tests.rs", "rank": 55, "score": 163881.28030805907 }, { "content": "fn main() {\n\n}\n", "file_path": "frame/support/test/tests/pallet_ui/inherent_invalid_item.rs", "rank": 56, "score": 163881.28030805907 }, { "content": "fn main() {\n\n}\n", "file_path": "frame/support/test/tests/pallet_ui/hooks_invalid_item.rs", "rank": 57, "score": 163881.28030805907 }, { "content": "/// Saturating reciprocal multiplication. Compute `x / self`, saturating at the numeric\n\n/// bounds instead of overflowing.\n\nfn saturating_reciprocal_mul<N, P>(x: N, part: P::Inner, rounding: Rounding) -> N\n\nwhere\n\n\tN: Clone + UniqueSaturatedInto<P::Inner> + ops::Div<N, Output=N> + ops::Mul<N,\n\n\tOutput=N> + ops::Add<N, Output=N> + ops::Rem<N, Output=N> + Saturating + Unsigned,\n\n\tP: PerThing,\n\n\tP::Inner: Into<N>,\n\n{\n\n\tlet maximum: N = P::ACCURACY.into();\n\n\tlet c = rational_mul_correction::<N, P>(\n\n\t\tx.clone(),\n\n\t\tP::ACCURACY,\n\n\t\tpart,\n\n\t\trounding,\n\n\t);\n\n\t(x / part.into()).saturating_mul(maximum).saturating_add(c)\n\n}\n\n\n", "file_path": "primitives/arithmetic/src/per_things.rs", "rank": 58, "score": 163823.4745074884 }, { "content": "/// Overflow-prune multiplication. Accurately multiply a value by `self` without overflowing.\n\nfn overflow_prune_mul<N, P>(x: N, part: P::Inner, rounding: Rounding) -> N\n\nwhere\n\n\tN: Clone + UniqueSaturatedInto<P::Inner> + ops::Div<N, Output=N> + ops::Mul<N,\n\n\tOutput=N> + ops::Add<N, Output=N> + ops::Rem<N, Output=N> + Unsigned,\n\n\tP: PerThing,\n\n\tP::Inner: Into<N>,\n\n{\n\n\tlet maximum: N = P::ACCURACY.into();\n\n\tlet part_n: N = part.into();\n\n\tlet c = rational_mul_correction::<N, P>(\n\n\t\tx.clone(),\n\n\t\tpart,\n\n\t\tP::ACCURACY,\n\n\t\trounding,\n\n\t);\n\n\t(x / maximum) * part_n + c\n\n}\n\n\n", "file_path": "primitives/arithmetic/src/per_things.rs", "rank": 59, "score": 163823.4745074884 }, { "content": "fn code_using_trie() -> u64 {\n\n\tlet pairs = [\n\n\t\t(b\"0103000000000000000464\".to_vec(), b\"0400000000\".to_vec()),\n\n\t\t(b\"0103000000000000000469\".to_vec(), b\"0401000000\".to_vec()),\n\n\t].to_vec();\n\n\n\n\tlet mut mdb = PrefixedMemoryDB::default();\n\n\tlet mut root = sp_std::default::Default::default();\n\n\tlet _ = {\n\n\t\tlet v = &pairs;\n\n\t\tlet mut t = TrieDBMut::<Hashing>::new(&mut mdb, &mut root);\n\n\t\tfor i in 0..v.len() {\n\n\t\t\tlet key: &[u8]= &v[i].0;\n\n\t\t\tlet val: &[u8] = &v[i].1;\n\n\t\t\tif !t.insert(key, val).is_ok() {\n\n\t\t\t\treturn 101;\n\n\t\t\t}\n\n\t\t}\n\n\t\tt\n\n\t};\n", "file_path": "test-utils/runtime/src/lib.rs", "rank": 60, "score": 163265.11815101933 }, { "content": "#[cfg(test)]\n\nfn check_events<T: Config, I: Iterator<Item = <T as SystemConfig>::Event>>(expected: I) {\n\n\tlet events = System::<T>::events()\n\n\t\t.into_iter()\n\n\t\t.map(|frame_system::EventRecord { event, .. }| event)\n\n\t\t.collect::<Vec<_>>();\n\n\tlet expected = expected.collect::<Vec<_>>();\n\n\tlet lengths = (events.len(), expected.len());\n\n\tlet length_mismatch = if lengths.0 != lengths.1 {\n\n\t\tfn pretty<D: std::fmt::Debug>(header: &str, ev: &[D]) {\n\n\t\t\tprintln!(\"{}\", header);\n\n\t\t\tfor (idx, ev) in ev.iter().enumerate() {\n\n\t\t\t\tprintln!(\"\\t[{:04}] {:?}\", idx, ev);\n\n\t\t\t}\n\n\t\t}\n\n\t\tpretty(\"--Got:\", &events);\n\n\t\tpretty(\"--Expected:\", &expected);\n\n\t\tformat!(\"Mismatching length. Got: {}, expected: {}\", lengths.0, lengths.1)\n\n\t} else { Default::default() };\n\n\n\n\tfor (idx, (a, b)) in events.into_iter().zip(expected).enumerate() {\n", "file_path": "frame/offences/benchmarking/src/lib.rs", "rank": 61, "score": 163161.69765753785 }, { "content": "/// Convert from the parsed pallet to their final information.\n\n/// Assign index to each pallet using same rules as rust for fieldless enum.\n\n/// I.e. implicit are assigned number incrementedly from last explicit or 0.\n\nfn complete_pallets(decl: impl Iterator<Item = PalletDeclaration>) -> syn::Result<Vec<Pallet>> {\n\n\tlet mut indices = HashMap::new();\n\n\tlet mut last_index: Option<u8> = None;\n\n\tlet mut names = HashMap::new();\n\n\n\n\tdecl\n\n\t\t.map(|pallet| {\n\n\t\t\tlet final_index = match pallet.index {\n\n\t\t\t\tSome(i) => i,\n\n\t\t\t\tNone => last_index.map_or(Some(0), |i| i.checked_add(1))\n\n\t\t\t\t\t.ok_or_else(|| {\n\n\t\t\t\t\t\tlet msg = \"Pallet index doesn't fit into u8, index is 256\";\n\n\t\t\t\t\t\tsyn::Error::new(pallet.name.span(), msg)\n\n\t\t\t\t\t})?,\n\n\t\t\t};\n\n\n\n\t\t\tlast_index = Some(final_index);\n\n\n\n\t\t\tif let Some(used_pallet) = indices.insert(final_index, pallet.name.clone()) {\n\n\t\t\t\tlet msg = format!(\n", "file_path": "frame/support/procedural/src/construct_runtime/mod.rs", "rank": 62, "score": 162949.51401646977 }, { "content": "/// Returns the function arguments of the given `Signature`, minus any `self` arguments.\n\npub fn get_function_arguments<'a>(sig: &'a Signature) -> impl Iterator<Item = PatType> + 'a {\n\n\tsig.inputs\n\n\t\t.iter()\n\n\t\t.filter_map(|a| match a {\n\n\t\t\tFnArg::Receiver(_) => None,\n\n\t\t\tFnArg::Typed(pat_type) => Some(pat_type),\n\n\t\t})\n\n\t\t.enumerate()\n\n\t\t.map(|(i, arg)| {\n\n\t\t\tlet mut res = arg.clone();\n\n\t\t\tif let Pat::Wild(wild) = &*arg.pat {\n\n\t\t\t\tlet ident = Ident::new(\n\n\t\t\t\t\t&format!(\"__runtime_interface_generated_{}_\", i),\n\n\t\t\t\t\twild.span(),\n\n\t\t\t\t);\n\n\n\n\t\t\t\tres.pat = Box::new(parse_quote!( #ident ))\n\n\t\t\t}\n\n\n\n\t\t\tres\n\n\t\t})\n\n}\n\n\n", "file_path": "primitives/runtime-interface/proc-macro/src/utils.rs", "rank": 63, "score": 162005.41280376806 }, { "content": "#[test]\n\n#[should_panic(expected = \"don't match the transport\")]\n\nfn ensure_listen_addresses_consistent_with_transport_memory() {\n\n\tlet listen_addr = config::build_multiaddr![Ip4([127, 0, 0, 1]), Tcp(0_u16)];\n\n\n\n\tlet _ = build_test_full_node(config::NetworkConfiguration {\n\n\t\tlisten_addresses: vec![listen_addr.clone()],\n\n\t\ttransport: config::TransportConfig::MemoryOnly,\n\n\t\t.. config::NetworkConfiguration::new(\"test-node\", \"test-client\", Default::default(), None)\n\n\t});\n\n}\n\n\n", "file_path": "client/network/src/service/tests.rs", "rank": 64, "score": 161609.9260876656 }, { "content": "#[test]\n\n#[should_panic(expected = \"don't match the transport\")]\n\nfn ensure_listen_addresses_consistent_with_transport_not_memory() {\n\n\tlet listen_addr = config::build_multiaddr![Memory(rand::random::<u64>())];\n\n\n\n\tlet _ = build_test_full_node(config::NetworkConfiguration {\n\n\t\tlisten_addresses: vec![listen_addr.clone()],\n\n\t\t.. config::NetworkConfiguration::new(\"test-node\", \"test-client\", Default::default(), None)\n\n\t});\n\n}\n\n\n", "file_path": "client/network/src/service/tests.rs", "rank": 65, "score": 161609.9260876656 }, { "content": "#[test]\n\n#[should_panic(expected = \"don't match the transport\")]\n\nfn ensure_public_addresses_consistent_with_transport_not_memory() {\n\n\tlet listen_addr = config::build_multiaddr![Ip4([127, 0, 0, 1]), Tcp(0_u16)];\n\n\tlet public_address = config::build_multiaddr![Memory(rand::random::<u64>())];\n\n\n\n\tlet _ = build_test_full_node(config::NetworkConfiguration {\n\n\t\tlisten_addresses: vec![listen_addr.clone()],\n\n\t\tpublic_addresses: vec![public_address],\n\n\t\t.. config::NetworkConfiguration::new(\"test-node\", \"test-client\", Default::default(), None)\n\n\t});\n\n}\n", "file_path": "client/network/src/service/tests.rs", "rank": 66, "score": 161609.9260876656 }, { "content": "#[test]\n\n#[should_panic(expected = \"don't match the transport\")]\n\nfn ensure_public_addresses_consistent_with_transport_memory() {\n\n\tlet listen_addr = config::build_multiaddr![Memory(rand::random::<u64>())];\n\n\tlet public_address = config::build_multiaddr![Ip4([127, 0, 0, 1]), Tcp(0_u16)];\n\n\n\n\tlet _ = build_test_full_node(config::NetworkConfiguration {\n\n\t\tlisten_addresses: vec![listen_addr.clone()],\n\n\t\ttransport: config::TransportConfig::MemoryOnly,\n\n\t\tpublic_addresses: vec![public_address],\n\n\t\t.. config::NetworkConfiguration::new(\"test-node\", \"test-client\", Default::default(), None)\n\n\t});\n\n}\n\n\n", "file_path": "client/network/src/service/tests.rs", "rank": 67, "score": 161609.9260876656 }, { "content": "#[test]\n\nfn candidacy_submission_using_alternative_free_slot_should_work() {\n\n\tlet mut t = new_test_ext_with_candidate_holes();\n\n\n\n\tt.execute_with(|| {\n\n\t\tassert_eq!(Elections::candidates(), vec![0, 0, 1]);\n\n\n\n\t\tassert_ok!(Elections::submit_candidacy(Origin::signed(2), 0));\n\n\t\tassert_eq!(Elections::candidates(), vec![2, 0, 1]);\n\n\n\n\t\tassert_ok!(Elections::submit_candidacy(Origin::signed(3), 1));\n\n\t\tassert_eq!(Elections::candidates(), vec![2, 3, 1]);\n\n\t});\n\n}\n\n\n", "file_path": "frame/elections/src/tests.rs", "rank": 68, "score": 161598.27148739557 }, { "content": "#[test]\n\nfn unscored_entities_must_not_be_used_for_filling_members() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\t// given\n\n\t\t// we submit a candidacy, score will be `None`\n\n\t\tassert_ok!(ScoredPool::submit_candidacy(Origin::signed(15)));\n\n\n\n\t\t// when\n\n\t\t// we remove every scored member\n\n\t\tScoredPool::pool()\n\n\t\t\t.into_iter()\n\n\t\t\t.for_each(|(who, score)| {\n\n\t\t\t\tif let Some(_) = score {\n\n\t\t\t\t\tlet index = find_in_pool(who).expect(\"entity must be in pool\") as u32;\n\n\t\t\t\t\tassert_ok!(ScoredPool::kick(Origin::signed(KickOrigin::get()), who, index));\n\n\t\t\t\t}\n\n\t\t\t});\n\n\n\n\t\t// then\n\n\t\t// the `None` candidates should not have been filled in\n\n\t\tassert!(ScoredPool::members().is_empty());\n\n\t\tassert_eq!(MEMBERS.with(|m| m.borrow().clone()), ScoredPool::members());\n\n\t});\n\n}\n\n\n", "file_path": "frame/scored-pool/src/tests.rs", "rank": 69, "score": 161598.27148739557 }, { "content": "fn main() {\n\n}\n", "file_path": "frame/support/test/tests/pallet_ui/inherent_check_inner_span.rs", "rank": 70, "score": 161592.90655923457 }, { "content": "#[test]\n\nfn ensure_blocking_futures_are_awaited_on_shutdown() {\n\n\tlet mut runtime = tokio::runtime::Runtime::new().unwrap();\n\n\tlet handle = runtime.handle().clone();\n\n\tlet task_executor: TaskExecutor = (move |future, _| handle.spawn(future).map(|_| ())).into();\n\n\n\n\tlet task_manager = new_task_manager(task_executor);\n\n\tlet spawn_handle = task_manager.spawn_handle();\n\n\tlet drop_tester = DropTester::new();\n\n\tspawn_handle.spawn(\n\n\t\t\"task1\",\n\n\t\trun_background_task_blocking(Duration::from_secs(3), drop_tester.new_ref()),\n\n\t);\n\n\tspawn_handle.spawn(\n\n\t\t\"task2\",\n\n\t\trun_background_task_blocking(Duration::from_secs(3), drop_tester.new_ref()),\n\n\t);\n\n\tassert_eq!(drop_tester, 2);\n\n\t// allow the tasks to even start\n\n\truntime.block_on(async { tokio::time::delay_for(Duration::from_secs(1)).await });\n\n\tassert_eq!(drop_tester, 2);\n\n\truntime.block_on(task_manager.clean_shutdown());\n\n\tassert_eq!(drop_tester, 0);\n\n}\n\n\n", "file_path": "client/service/src/task_manager/tests.rs", "rank": 71, "score": 161592.05946448172 }, { "content": "#[test]\n\nfn wait_until_deferred_block_announce_validation_is_ready() {\n\n\tsp_tracing::try_init_simple();\n\n\tlet mut net = TestNet::with_fork_choice(ForkChoiceStrategy::Custom(false));\n\n\tnet.add_full_peer_with_config(Default::default());\n\n\tnet.add_full_peer_with_config(FullPeerConfig {\n\n\t\tblock_announce_validator: Some(Box::new(NewBestBlockAnnounceValidator)),\n\n\t\t..Default::default()\n\n\t});\n\n\n\n\tnet.block_until_connected();\n\n\n\n\tlet block_hash = net.peer(0).push_blocks(1, true);\n\n\n\n\twhile !net.peer(1).has_block(&block_hash) {\n\n\t\tnet.block_until_idle();\n\n\t}\n\n}\n\n\n\n/// When we don't inform the sync protocol about the best block, a node will not sync from us as the\n\n/// handshake is not does not contain our best block.\n", "file_path": "client/network/test/src/sync.rs", "rank": 72, "score": 161589.51767227112 }, { "content": "fn main() {\n\n}\n", "file_path": "frame/support/test/tests/pallet_ui/type_value_invalid_item.rs", "rank": 73, "score": 161568.86266937255 }, { "content": "fn main() {\n\n}\n", "file_path": "frame/support/test/tests/pallet_ui/error_wrong_item_name.rs", "rank": 74, "score": 161568.86266937255 }, { "content": "fn main() {\n\n}\n", "file_path": "frame/support/test/tests/pallet_ui/event_wrong_item_name.rs", "rank": 75, "score": 161568.86266937255 }, { "content": "/// Create a leaf/branch node, encoding a number of nibbles.\n\nfn fuse_nibbles_node<'a>(nibbles: &'a [u8], kind: NodeKind) -> impl Iterator<Item = u8> + 'a {\n\n\tlet size = sp_std::cmp::min(trie_constants::NIBBLE_SIZE_BOUND, nibbles.len());\n\n\n\n\tlet iter_start = match kind {\n\n\t\tNodeKind::Leaf => size_and_prefix_iterator(size, trie_constants::LEAF_PREFIX_MASK),\n\n\t\tNodeKind::BranchNoValue => size_and_prefix_iterator(size, trie_constants::BRANCH_WITHOUT_MASK),\n\n\t\tNodeKind::BranchWithValue => size_and_prefix_iterator(size, trie_constants::BRANCH_WITH_MASK),\n\n\t};\n\n\titer_start\n\n\t\t.chain(if nibbles.len() % 2 == 1 { Some(nibbles[0]) } else { None })\n\n\t\t.chain(nibbles[nibbles.len() % 2..].chunks(2).map(|ch| ch[0] << 4 | ch[1]))\n\n}\n\n\n\n\n\nimpl trie_root::TrieStream for TrieStream {\n\n\n\n\tfn new() -> Self {\n\n\t\tTrieStream {\n\n\t\t\tbuffer: Vec::new()\n\n\t\t}\n", "file_path": "primitives/trie/src/trie_stream.rs", "rank": 76, "score": 161329.495327936 }, { "content": "/// Like `claim_slot`, but allows passing an explicit set of key pairs. Useful if we intend\n\n/// to make repeated calls for different slots using the same key pairs.\n\npub fn claim_slot_using_keys(\n\n\tslot: Slot,\n\n\tepoch: &Epoch,\n\n\tkeystore: &SyncCryptoStorePtr,\n\n\tkeys: &[(AuthorityId, usize)],\n\n) -> Option<(PreDigest, AuthorityId)> {\n\n\tclaim_primary_slot(slot, epoch, epoch.config.c, keystore, &keys)\n\n\t\t.or_else(|| {\n\n\t\t\tif epoch.config.allowed_slots.is_secondary_plain_slots_allowed() ||\n\n\t\t\t\tepoch.config.allowed_slots.is_secondary_vrf_slots_allowed()\n\n\t\t\t{\n\n\t\t\t\tclaim_secondary_slot(\n\n\t\t\t\t\tslot,\n\n\t\t\t\t\t&epoch,\n\n\t\t\t\t\tkeys,\n\n\t\t\t\t\t&keystore,\n\n\t\t\t\t\tepoch.config.allowed_slots.is_secondary_vrf_slots_allowed(),\n\n\t\t\t\t)\n\n\t\t\t} else {\n\n\t\t\t\tNone\n\n\t\t\t}\n\n\t\t})\n\n}\n\n\n", "file_path": "client/consensus/babe/src/authorship.rs", "rank": 77, "score": 160861.66495789393 }, { "content": "#[test]\n\n#[should_panic(expected = \"don't match the transport\")]\n\nfn ensure_boot_node_addresses_consistent_with_transport_not_memory() {\n\n\tlet listen_addr = config::build_multiaddr![Ip4([127, 0, 0, 1]), Tcp(0_u16)];\n\n\tlet boot_node = config::MultiaddrWithPeerId {\n\n\t\tmultiaddr: config::build_multiaddr![Memory(rand::random::<u64>())],\n\n\t\tpeer_id: PeerId::random(),\n\n\t};\n\n\n\n\tlet _ = build_test_full_node(config::NetworkConfiguration {\n\n\t\tlisten_addresses: vec![listen_addr.clone()],\n\n\t\tboot_nodes: vec![boot_node],\n\n\t\t.. config::NetworkConfiguration::new(\"test-node\", \"test-client\", Default::default(), None)\n\n\t});\n\n}\n\n\n", "file_path": "client/network/src/service/tests.rs", "rank": 78, "score": 159389.26242886612 }, { "content": "#[test]\n\n#[should_panic(expected = \"don't match the transport\")]\n\nfn ensure_boot_node_addresses_consistent_with_transport_memory() {\n\n\tlet listen_addr = config::build_multiaddr![Memory(rand::random::<u64>())];\n\n\tlet boot_node = config::MultiaddrWithPeerId {\n\n\t\tmultiaddr: config::build_multiaddr![Ip4([127, 0, 0, 1]), Tcp(0_u16)],\n\n\t\tpeer_id: PeerId::random(),\n\n\t};\n\n\n\n\tlet _ = build_test_full_node(config::NetworkConfiguration {\n\n\t\tlisten_addresses: vec![listen_addr.clone()],\n\n\t\ttransport: config::TransportConfig::MemoryOnly,\n\n\t\tboot_nodes: vec![boot_node],\n\n\t\t.. config::NetworkConfiguration::new(\"test-node\", \"test-client\", Default::default(), None)\n\n\t});\n\n}\n\n\n", "file_path": "client/network/src/service/tests.rs", "rank": 79, "score": 159389.26242886612 }, { "content": "#[test]\n\n#[should_panic(expected = \"don't match the transport\")]\n\nfn ensure_reserved_node_addresses_consistent_with_transport_memory() {\n\n\tlet listen_addr = config::build_multiaddr![Memory(rand::random::<u64>())];\n\n\tlet reserved_node = config::MultiaddrWithPeerId {\n\n\t\tmultiaddr: config::build_multiaddr![Ip4([127, 0, 0, 1]), Tcp(0_u16)],\n\n\t\tpeer_id: PeerId::random(),\n\n\t};\n\n\n\n\tlet _ = build_test_full_node(config::NetworkConfiguration {\n\n\t\tlisten_addresses: vec![listen_addr.clone()],\n\n\t\ttransport: config::TransportConfig::MemoryOnly,\n\n\t\tdefault_peers_set: config::SetConfig {\n\n\t\t\treserved_nodes: vec![reserved_node],\n\n\t\t\t.. Default::default()\n\n\t\t},\n\n\t\t.. config::NetworkConfiguration::new(\"test-node\", \"test-client\", Default::default(), None)\n\n\t});\n\n}\n\n\n", "file_path": "client/network/src/service/tests.rs", "rank": 80, "score": 159389.26242886612 }, { "content": "#[test]\n\n#[should_panic(expected = \"don't match the transport\")]\n\nfn ensure_reserved_node_addresses_consistent_with_transport_not_memory() {\n\n\tlet listen_addr = config::build_multiaddr![Ip4([127, 0, 0, 1]), Tcp(0_u16)];\n\n\tlet reserved_node = config::MultiaddrWithPeerId {\n\n\t\tmultiaddr: config::build_multiaddr![Memory(rand::random::<u64>())],\n\n\t\tpeer_id: PeerId::random(),\n\n\t};\n\n\n\n\tlet _ = build_test_full_node(config::NetworkConfiguration {\n\n\t\tlisten_addresses: vec![listen_addr.clone()],\n\n\t\tdefault_peers_set: config::SetConfig {\n\n\t\t\treserved_nodes: vec![reserved_node],\n\n\t\t\t.. Default::default()\n\n\t\t},\n\n\t\t.. config::NetworkConfiguration::new(\"test-node\", \"test-client\", Default::default(), None)\n\n\t});\n\n}\n\n\n", "file_path": "client/network/src/service/tests.rs", "rank": 81, "score": 159389.26242886612 }, { "content": "#[test]\n\nfn ready_set_should_eventually_resolve_when_block_update_arrives() {\n\n\tlet (pool, _guard, _notifier) = maintained_pool();\n\n\tlet header = pool.api().push_block(1, vec![], true);\n\n\n\n\tlet xt1 = uxt(Alice, 209);\n\n\n\n\tblock_on(pool.submit_one(&BlockId::number(1), SOURCE, xt1.clone())).expect(\"1. Imported\");\n\n\n\n\tlet noop_waker = futures::task::noop_waker();\n\n\tlet mut context = futures::task::Context::from_waker(&noop_waker);\n\n\n\n\tlet mut ready_set_future = pool.ready_at(1);\n\n\tif let Poll::Ready(_) = ready_set_future.poll_unpin(&mut context) {\n\n\t\tpanic!(\"Ready set should not be ready before block update!\");\n\n\t}\n\n\n\n\tblock_on(pool.maintain(block_event(header)));\n\n\n\n\tmatch ready_set_future.poll_unpin(&mut context) {\n\n\t\tPoll::Pending => {\n\n\t\t\tpanic!(\"Ready set should become ready after block update!\");\n\n\t\t},\n\n\t\tPoll::Ready(iterator) => {\n\n\t\t\tlet data = iterator.collect::<Vec<_>>();\n\n\t\t\tassert_eq!(data.len(), 1);\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "client/transaction-pool/tests/pool.rs", "rank": 82, "score": 159368.9723387727 }, { "content": "/// Read a value from the child trie with given query.\n\npub fn read_child_trie_value_with<L: TrieConfiguration, Q: Query<L::Hash, Item=DBValue>, DB>(\n\n\tkeyspace: &[u8],\n\n\tdb: &DB,\n\n\troot_slice: &[u8],\n\n\tkey: &[u8],\n\n\tquery: Q\n\n) -> Result<Option<Vec<u8>>, Box<TrieError<L>>>\n\n\twhere\n\n\t\tDB: hash_db::HashDBRef<L::Hash, trie_db::DBValue>\n\n{\n\n\tlet mut root = TrieHash::<L>::default();\n\n\t// root is fetched from DB, not writable by runtime, so it's always valid.\n\n\troot.as_mut().copy_from_slice(root_slice);\n\n\n\n\tlet db = KeySpacedDB::new(&*db, keyspace);\n\n\tTrieDB::<L>::new(&db, &root)?.get_with(key, query).map(|x| x.map(|val| val.to_vec()))\n\n}\n\n\n\n/// `HashDB` implementation that append a encoded prefix (unique id bytes) in addition to the\n\n/// prefix of every key value.\n\npub struct KeySpacedDB<'a, DB, H>(&'a DB, &'a [u8], PhantomData<H>);\n\n\n\n/// `HashDBMut` implementation that append a encoded prefix (unique id bytes) in addition to the\n\n/// prefix of every key value.\n\n///\n\n/// Mutable variant of `KeySpacedDB`, see [`KeySpacedDB`].\n\npub struct KeySpacedDBMut<'a, DB, H>(&'a mut DB, &'a [u8], PhantomData<H>);\n\n\n", "file_path": "primitives/trie/src/lib.rs", "rank": 83, "score": 158654.0326807269 }, { "content": "#[cfg(feature = \"std\")]\n\nfn current_timestamp() -> std::time::Duration {\n\n\tuse wasm_timer::SystemTime;\n\n\n\n\tlet now = SystemTime::now();\n\n\tnow.duration_since(SystemTime::UNIX_EPOCH)\n\n\t\t.expect(\"Current time is always after unix epoch; qed\")\n\n}\n\n\n\n/// Provide duration since unix epoch in millisecond for timestamp inherent.\n\n#[cfg(feature = \"std\")]\n\npub struct InherentDataProvider {\n\n\tmax_drift: InherentType,\n\n\ttimestamp: InherentType,\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl InherentDataProvider {\n\n\t/// Create `Self` while using the system time to get the timestamp.\n\n\tpub fn from_system_time() -> Self {\n\n\t\tSelf {\n", "file_path": "primitives/timestamp/src/lib.rs", "rank": 84, "score": 158146.14407813875 }, { "content": "/// Put a particular value into storage by the `module`, the map's `item` name and the key `hash`.\n\npub fn put_storage_value<T: Encode>(module: &[u8], item: &[u8], hash: &[u8], value: T) {\n\n\tlet mut key = vec![0u8; 32 + hash.len()];\n\n\tkey[0..16].copy_from_slice(&Twox128::hash(module));\n\n\tkey[16..32].copy_from_slice(&Twox128::hash(item));\n\n\tkey[32..].copy_from_slice(hash);\n\n\tframe_support::storage::unhashed::put(&key, &value);\n\n}\n\n\n", "file_path": "frame/support/src/storage/migration.rs", "rank": 85, "score": 157864.62357837276 }, { "content": "/// Returns the function argument names of the given `Signature`, minus any `self`.\n\npub fn get_function_argument_names<'a>(sig: &'a Signature) -> impl Iterator<Item = Box<Pat>> + 'a {\n\n\tget_function_arguments(sig).map(|pt| pt.pat)\n\n}\n\n\n", "file_path": "primitives/runtime-interface/proc-macro/src/utils.rs", "rank": 86, "score": 157857.5657559454 }, { "content": "/// Returns the function argument types of the given `Signature`, minus any `Self` type.\n\npub fn get_function_argument_types<'a>(sig: &'a Signature) -> impl Iterator<Item = Box<Type>> + 'a {\n\n\tget_function_arguments(sig).map(|pt| pt.ty)\n\n}\n\n\n", "file_path": "primitives/runtime-interface/proc-macro/src/utils.rs", "rank": 87, "score": 157857.5657559454 }, { "content": "/// Extract all types that appear in signatures in the given `ImplItem`'s.\n\n///\n\n/// If a type is a reference, the inner type is extracted (without the reference).\n\npub fn extract_all_signature_types(items: &[ImplItem]) -> Vec<Type> {\n\n\titems.iter()\n\n\t\t.filter_map(|i| match i {\n\n\t\t\tImplItem::Method(method) => Some(&method.sig),\n\n\t\t\t_ => None,\n\n\t\t})\n\n\t\t.flat_map(|sig| {\n\n\t\t\tlet ret_ty = match &sig.output {\n\n\t\t\t\tReturnType::Default => None,\n\n\t\t\t\tReturnType::Type(_, ty) => Some((**ty).clone()),\n\n\t\t\t};\n\n\n\n\t\t\tsig.inputs.iter().filter_map(|i| match i {\n\n\t\t\t\tFnArg::Typed(arg) => Some(&arg.ty),\n\n\t\t\t\t_ => None,\n\n\t\t\t}).map(|ty| match &**ty {\n\n\t\t\t\tType::Reference(t) => (*t.elem).clone(),\n\n\t\t\t\t_ => (**ty).clone(),\n\n\t\t\t}).chain(ret_ty)\n\n\t\t})\n\n\t\t.collect()\n\n}\n\n\n", "file_path": "primitives/api/proc-macro/src/utils.rs", "rank": 88, "score": 157124.33995605225 }, { "content": "/// Get the enum fields idents of the given `data` object as iterator.\n\n///\n\n/// Returns an error if the number of variants is greater than `256`, the given `data` is not an\n\n/// enum or a variant is not an unit.\n\nfn get_enum_field_idents<'a>(data: &'a Data) -> Result<impl Iterator<Item = Result<&'a Ident>>> {\n\n\tmatch data {\n\n\t\tData::Enum(d) => {\n\n\t\t\tif d.variants.len() <= 256 {\n\n\t\t\t\tOk(\n\n\t\t\t\t\td.variants.iter().map(|v| if let Fields::Unit = v.fields {\n\n\t\t\t\t\t\tOk(&v.ident)\n\n\t\t\t\t\t} else {\n\n\t\t\t\t\t\tErr(Error::new(\n\n\t\t\t\t\t\t\tSpan::call_site(),\n\n\t\t\t\t\t\t\t\"`PassByEnum` only supports unit variants.\",\n\n\t\t\t\t\t\t))\n\n\t\t\t\t\t})\n\n\t\t\t\t)\n\n\t\t\t} else {\n\n\t\t\t\tErr(Error::new(Span::call_site(), \"`PassByEnum` only supports `256` variants.\"))\n\n\t\t\t}\n\n\t\t},\n\n\t\t_ => Err(Error::new(Span::call_site(), \"`PassByEnum` only supports enums as input type.\"))\n\n\t}\n\n}\n", "file_path": "primitives/runtime-interface/proc-macro/src/pass_by/enum_.rs", "rank": 89, "score": 156211.21457488235 }, { "content": "#[test]\n\nfn dont_stop_polling_dht_event_stream_after_bogus_event() {\n\n\tlet remote_multiaddr = {\n\n\t\tlet peer_id = PeerId::random();\n\n\t\tlet address: Multiaddr = \"/ip6/2001:db8:0:0:0:0:0:1/tcp/30333\".parse().unwrap();\n\n\n\n\t\taddress.with(multiaddr::Protocol::P2p(\n\n\t\t\tpeer_id.into(),\n\n\t\t))\n\n\t};\n\n\tlet remote_key_store = KeyStore::new();\n\n\tlet remote_public_key: AuthorityId = block_on(\n\n\t\tremote_key_store.sr25519_generate_new(key_types::AUTHORITY_DISCOVERY, None),\n\n\t).unwrap().into();\n\n\n\n\tlet (mut dht_event_tx, dht_event_rx) = channel(1);\n\n\tlet (network, mut network_events) = {\n\n\t\tlet mut n = TestNetwork::default();\n\n\t\tlet r = n.get_event_receiver().unwrap();\n\n\t\t(Arc::new(n), r)\n\n\t};\n", "file_path": "client/authority-discovery/src/worker/tests.rs", "rank": 90, "score": 155194.72620986658 }, { "content": "#[test]\n\nfn ensure_task_manager_future_ends_when_task_manager_terminated() {\n\n\tlet mut runtime = tokio::runtime::Runtime::new().unwrap();\n\n\tlet handle = runtime.handle().clone();\n\n\tlet task_executor: TaskExecutor = (move |future, _| handle.spawn(future).map(|_| ())).into();\n\n\n\n\tlet mut task_manager = new_task_manager(task_executor);\n\n\tlet spawn_handle = task_manager.spawn_handle();\n\n\tlet drop_tester = DropTester::new();\n\n\tspawn_handle.spawn(\"task1\", run_background_task(drop_tester.new_ref()));\n\n\tspawn_handle.spawn(\"task2\", run_background_task(drop_tester.new_ref()));\n\n\tassert_eq!(drop_tester, 2);\n\n\t// allow the tasks to even start\n\n\truntime.block_on(async { tokio::time::delay_for(Duration::from_secs(1)).await });\n\n\tassert_eq!(drop_tester, 2);\n\n\ttask_manager.terminate();\n\n\truntime.block_on(task_manager.future()).expect(\"future has ended without error\");\n\n\truntime.block_on(task_manager.clean_shutdown());\n\n\tassert_eq!(drop_tester, 0);\n\n}\n\n\n", "file_path": "client/service/src/task_manager/tests.rs", "rank": 91, "score": 155185.92854810352 }, { "content": "type FutureResult<T> = Box<dyn rpc_future::Future<Item = T, Error = RpcError> + Send>;\n\n\n\n/// Provides rpc methods for interacting with Babe.\n", "file_path": "client/consensus/babe/rpc/src/lib.rs", "rank": 92, "score": 154032.37572963032 }, { "content": "/// Construct iterator to iterate over map items in `module` for the map called `item`.\n\npub fn storage_iter<T: Decode + Sized>(module: &[u8], item: &[u8]) -> PrefixIterator<(Vec<u8>, T)> {\n\n\tstorage_iter_with_suffix(module, item, &[][..])\n\n}\n\n\n", "file_path": "frame/support/src/storage/migration.rs", "rank": 93, "score": 153954.5890715591 }, { "content": "/// Take a particular value in storage by the `module`, the map's `item` name and the key `hash`.\n\npub fn take_storage_value<T: Decode + Sized>(module: &[u8], item: &[u8], hash: &[u8]) -> Option<T> {\n\n\tlet mut key = vec![0u8; 32 + hash.len()];\n\n\tkey[0..16].copy_from_slice(&Twox128::hash(module));\n\n\tkey[16..32].copy_from_slice(&Twox128::hash(item));\n\n\tkey[32..].copy_from_slice(hash);\n\n\tframe_support::storage::unhashed::take::<T>(&key)\n\n}\n\n\n", "file_path": "frame/support/src/storage/migration.rs", "rank": 94, "score": 153953.4104550899 }, { "content": "/// Get a particular value in storage by the `module`, the map's `item` name and the key `hash`.\n\npub fn get_storage_value<T: Decode + Sized>(module: &[u8], item: &[u8], hash: &[u8]) -> Option<T> {\n\n\tlet mut key = vec![0u8; 32 + hash.len()];\n\n\tkey[0..16].copy_from_slice(&Twox128::hash(module));\n\n\tkey[16..32].copy_from_slice(&Twox128::hash(item));\n\n\tkey[32..].copy_from_slice(hash);\n\n\tframe_support::storage::unhashed::get::<T>(&key)\n\n}\n\n\n", "file_path": "frame/support/src/storage/migration.rs", "rank": 95, "score": 153953.4104550899 }, { "content": "#[test]\n\nfn ensure_task_manager_future_ends_with_error_when_essential_task_fails() {\n\n\tlet mut runtime = tokio::runtime::Runtime::new().unwrap();\n\n\tlet handle = runtime.handle().clone();\n\n\tlet task_executor: TaskExecutor = (move |future, _| handle.spawn(future).map(|_| ())).into();\n\n\n\n\tlet mut task_manager = new_task_manager(task_executor);\n\n\tlet spawn_handle = task_manager.spawn_handle();\n\n\tlet spawn_essential_handle = task_manager.spawn_essential_handle();\n\n\tlet drop_tester = DropTester::new();\n\n\tspawn_handle.spawn(\"task1\", run_background_task(drop_tester.new_ref()));\n\n\tspawn_handle.spawn(\"task2\", run_background_task(drop_tester.new_ref()));\n\n\tassert_eq!(drop_tester, 2);\n\n\t// allow the tasks to even start\n\n\truntime.block_on(async { tokio::time::delay_for(Duration::from_secs(1)).await });\n\n\tassert_eq!(drop_tester, 2);\n\n\tspawn_essential_handle.spawn(\"task3\", async { panic!(\"task failed\") });\n\n\truntime.block_on(task_manager.future()).expect_err(\"future()'s Result must be Err\");\n\n\tassert_eq!(drop_tester, 2);\n\n\truntime.block_on(task_manager.clean_shutdown());\n\n\tassert_eq!(drop_tester, 0);\n\n}\n\n\n", "file_path": "client/service/src/task_manager/tests.rs", "rank": 96, "score": 153211.36318124275 }, { "content": "#[test]\n\nfn ensure_task_manager_future_continues_when_childs_not_essential_task_fails() {\n\n\tlet mut runtime = tokio::runtime::Runtime::new().unwrap();\n\n\tlet handle = runtime.handle().clone();\n\n\tlet task_executor: TaskExecutor = (move |future, _| handle.spawn(future).map(|_| ())).into();\n\n\n\n\tlet mut task_manager = new_task_manager(task_executor.clone());\n\n\tlet child_1 = new_task_manager(task_executor.clone());\n\n\tlet spawn_handle_child_1 = child_1.spawn_handle();\n\n\tlet child_2 = new_task_manager(task_executor.clone());\n\n\tlet spawn_handle_child_2 = child_2.spawn_handle();\n\n\ttask_manager.add_child(child_1);\n\n\ttask_manager.add_child(child_2);\n\n\tlet spawn_handle = task_manager.spawn_handle();\n\n\tlet drop_tester = DropTester::new();\n\n\tspawn_handle.spawn(\"task1\", run_background_task(drop_tester.new_ref()));\n\n\tspawn_handle.spawn(\"task2\", run_background_task(drop_tester.new_ref()));\n\n\tspawn_handle_child_1.spawn(\"task3\", run_background_task(drop_tester.new_ref()));\n\n\tspawn_handle_child_2.spawn(\"task4\", run_background_task(drop_tester.new_ref()));\n\n\tassert_eq!(drop_tester, 4);\n\n\t// allow the tasks to even start\n", "file_path": "client/service/src/task_manager/tests.rs", "rank": 97, "score": 153211.36318124275 }, { "content": "/// Future's type for jsonrpc\n\ntype FutureResult<T> = Box<dyn jsonrpc_core::futures::Future<Item = T, Error = Error> + Send>;\n\n/// sender passed to the authorship task to report errors or successes.\n\npub type Sender<T> = Option<oneshot::Sender<std::result::Result<T, crate::Error>>>;\n\n\n\n/// Message sent to the background authorship task, usually by RPC.\n\npub enum EngineCommand<Hash> {\n\n\t/// Tells the engine to propose a new block\n\n\t///\n\n\t/// if create_empty == true, it will create empty blocks if there are no transactions\n\n\t/// in the transaction pool.\n\n\t///\n\n\t/// if finalize == true, the block will be instantly finalized.\n\n\tSealNewBlock {\n\n\t\t/// if true, empty blocks(without extrinsics) will be created.\n\n\t\t/// otherwise, will return Error::EmptyTransactionPool.\n\n\t\tcreate_empty: bool,\n\n\t\t/// instantly finalize this block?\n\n\t\tfinalize: bool,\n\n\t\t/// specify the parent hash of the about-to-created block\n\n\t\tparent_hash: Option<Hash>,\n", "file_path": "client/consensus/manual-seal/src/rpc.rs", "rank": 98, "score": 152751.5190953978 }, { "content": "#[test]\n\nfn ensure_task_manager_future_ends_with_error_when_childs_essential_task_fails() {\n\n\tlet mut runtime = tokio::runtime::Runtime::new().unwrap();\n\n\tlet handle = runtime.handle().clone();\n\n\tlet task_executor: TaskExecutor = (move |future, _| handle.spawn(future).map(|_| ())).into();\n\n\n\n\tlet mut task_manager = new_task_manager(task_executor.clone());\n\n\tlet child_1 = new_task_manager(task_executor.clone());\n\n\tlet spawn_handle_child_1 = child_1.spawn_handle();\n\n\tlet spawn_essential_handle_child_1 = child_1.spawn_essential_handle();\n\n\tlet child_2 = new_task_manager(task_executor.clone());\n\n\tlet spawn_handle_child_2 = child_2.spawn_handle();\n\n\ttask_manager.add_child(child_1);\n\n\ttask_manager.add_child(child_2);\n\n\tlet spawn_handle = task_manager.spawn_handle();\n\n\tlet drop_tester = DropTester::new();\n\n\tspawn_handle.spawn(\"task1\", run_background_task(drop_tester.new_ref()));\n\n\tspawn_handle.spawn(\"task2\", run_background_task(drop_tester.new_ref()));\n\n\tspawn_handle_child_1.spawn(\"task3\", run_background_task(drop_tester.new_ref()));\n\n\tspawn_handle_child_2.spawn(\"task4\", run_background_task(drop_tester.new_ref()));\n\n\tassert_eq!(drop_tester, 4);\n\n\t// allow the tasks to even start\n\n\truntime.block_on(async { tokio::time::delay_for(Duration::from_secs(1)).await });\n\n\tassert_eq!(drop_tester, 4);\n\n\tspawn_essential_handle_child_1.spawn(\"task5\", async { panic!(\"task failed\") });\n\n\truntime.block_on(task_manager.future()).expect_err(\"future()'s Result must be Err\");\n\n\tassert_eq!(drop_tester, 4);\n\n\truntime.block_on(task_manager.clean_shutdown());\n\n\tassert_eq!(drop_tester, 0);\n\n}\n\n\n", "file_path": "client/service/src/task_manager/tests.rs", "rank": 99, "score": 151309.77445620106 } ]
Rust
tests/baking_mod/macros_baking/macro_choice.rs
julien-lange/mpst_rust_github
ca10d860f06d3bc4b6d1a9df290d2812235b456f
use either::Either; use mpstthree::binary::struct_trait::{end::End, recv::Recv, send::Send, session::Session}; use mpstthree::role::end::RoleEnd; use mpstthree::role::Role; use std::error::Error; use rand::{thread_rng, Rng}; use mpstthree::bundle_impl; bundle_impl!(MeshedChannels, A, B, C); type OfferMpst<S0, S1, S2, S3, R0, R1, N0> = Recv<Either<MeshedChannels<S0, S1, R0, N0>, MeshedChannels<S2, S3, R1, N0>>, End>; type ChooseMpst<S0, S1, S2, S3, R0, R1, N0> = Send< Either< MeshedChannels< <S0 as Session>::Dual, <S1 as Session>::Dual, <R0 as Role>::Dual, <N0 as Role>::Dual, >, MeshedChannels< <S2 as Session>::Dual, <S3 as Session>::Dual, <R1 as Role>::Dual, <N0 as Role>::Dual, >, >, End, >; type AtoCClose = End; type AtoBClose = End; type AtoCVideo<N> = Recv<N, Send<N, End>>; type AtoBVideo<N> = Send<N, Recv<N, End>>; type BtoAClose = <AtoBClose as Session>::Dual; type BtoCClose = End; type BtoAVideo<N> = <AtoBVideo<N> as Session>::Dual; type CtoBClose = <BtoCClose as Session>::Dual; type CtoAClose = <AtoCClose as Session>::Dual; type CtoAVideo<N> = <AtoCVideo<N> as Session>::Dual; type StackAEnd = RoleEnd; type StackAVideo = RoleC<RoleB<RoleB<RoleC<RoleEnd>>>>; type StackAVideoDual = <StackAVideo as Role>::Dual; type StackAFull = RoleC<RoleC<RoleAlltoC<RoleEnd, RoleEnd>>>; type StackBEnd = RoleEnd; type StackBVideo = RoleA<RoleA<RoleEnd>>; type StackBVideoDual = <StackBVideo as Role>::Dual; type StackBFull = RoleAlltoC<RoleEnd, RoleEnd>; type StackCEnd = RoleEnd; type StackCVideo = RoleA<RoleA<RoleEnd>>; type StackCChoice = RoleCtoAll<StackCVideo, StackCEnd>; type StackCFull = RoleA<RoleA<StackCChoice>>; type ChooseCtoA<N> = ChooseMpst< BtoAVideo<N>, CtoAVideo<N>, BtoAClose, CtoAClose, StackAVideoDual, StackAEnd, RoleADual<RoleEnd>, >; type ChooseCtoB<N> = ChooseMpst< AtoBVideo<N>, CtoBClose, AtoBClose, CtoBClose, StackBVideoDual, StackBEnd, RoleBDual<RoleEnd>, >; type InitC<N> = Send<N, Recv<N, ChooseCtoA<N>>>; type EndpointCFull<N> = MeshedChannels<InitC<N>, ChooseCtoB<N>, StackCFull, RoleC<RoleEnd>>; type EndpointAVideo<N> = MeshedChannels<AtoBVideo<N>, AtoCVideo<N>, StackAVideo, RoleA<RoleEnd>>; type EndpointAEnd = MeshedChannels<AtoBClose, AtoCClose, StackAEnd, RoleA<RoleEnd>>; type OfferA<N> = OfferMpst< AtoBVideo<N>, AtoCVideo<N>, AtoBClose, AtoCClose, StackAVideo, StackAEnd, RoleA<RoleEnd>, >; type InitA<N> = Recv<N, Send<N, OfferA<N>>>; type EndpointAFull<N> = MeshedChannels<End, InitA<N>, StackAFull, RoleA<RoleEnd>>; type EndpointBVideo<N> = MeshedChannels<BtoAVideo<N>, BtoCClose, StackBVideo, RoleB<RoleEnd>>; type EndpointBEnd = MeshedChannels<BtoAClose, BtoCClose, StackBEnd, RoleB<RoleEnd>>; type OfferB<N> = OfferMpst< BtoAVideo<N>, BtoCClose, BtoAClose, BtoCClose, StackBVideo, StackBEnd, RoleB<RoleEnd>, >; type EndpointBFull<N> = MeshedChannels<End, OfferB<N>, StackBFull, RoleB<RoleEnd>>; fn server(s: EndpointBFull<i32>) -> Result<(), Box<dyn Error>> { s.offer( |s: EndpointBVideo<i32>| { let (request, s) = s.recv()?; s.send(request + 1).close() }, |s: EndpointBEnd| s.close(), ) } fn authenticator(s: EndpointAFull<i32>) -> Result<(), Box<dyn Error>> { let (id, s) = s.recv()?; s.send(id + 1).offer( |s: EndpointAVideo<i32>| { let (request, s) = s.recv()?; let (video, s) = s.send(request + 1).recv()?; assert_eq!(request, id + 1); assert_eq!(video, id + 3); s.send(video + 1).close() }, |s: EndpointAEnd| s.close(), ) } fn client_video(s: EndpointCFull<i32>) -> Result<(), Box<dyn Error>> { let mut rng = thread_rng(); let id: i32 = rng.gen(); let (accept, s) = s.send(id).recv()?; assert_eq!(accept, id + 1); let (result, s) = s.choose_left().send(accept).recv()?; assert_eq!(result, accept + 3); s.close() } fn client_close(s: EndpointCFull<i32>) -> Result<(), Box<dyn Error>> { let mut rng = thread_rng(); let id: i32 = rng.gen(); let (accept, s) = s.send(id).recv()?; assert_eq!(accept, id + 1); s.choose_right().close() } pub fn run_usecase_right() { assert!(|| -> Result<(), Box<dyn Error>> { { let (thread_a, thread_b, thread_c) = fork_mpst(authenticator, server, client_close); assert!(thread_a.join().is_ok()); assert!(thread_b.join().is_ok()); assert!(thread_c.join().is_ok()); } Ok(()) }() .is_ok()); } pub fn run_usecase_left() { assert!(|| -> Result<(), Box<dyn Error>> { { let (thread_a, thread_b, thread_c) = fork_mpst(authenticator, server, client_video); assert!(thread_a.join().is_ok()); assert!(thread_b.join().is_ok()); assert!(thread_c.join().is_ok()); } Ok(()) }() .is_ok()); }
use either::Either; use mpstthree::binary::struct_trait::{end::End, recv::Recv, send::Send, session::Session}; use mpstthree::role::end::RoleEnd; use mpstthree::role::Role; use std::error::Error; use rand::{thread_rng, Rng}; use mpstthree::bundle_impl; bundle_impl!(MeshedChannels, A, B, C); type OfferMpst<S0, S1, S2, S3, R0, R1, N0> = Recv<Either<MeshedChannels<S0, S1, R0, N0>, MeshedChannels<S2, S3, R1, N0>>, End>; type ChooseMpst<S0, S1, S2, S3, R0, R1, N0> = Send< Either< MeshedChannels< <S0 as Session>::Dual, <S1 as Session>::Dual, <R0 as Role>::Dual, <N0 as Role>::Dual, >, MeshedChannels< <S2 as Session>::Dual, <S3 as Session>::Dual, <R1 as Role>::Dual, <N0 as Role>::Dual, >, >, End, >; type AtoCClose = End; type AtoBClose = End; type AtoCVideo<N> = Recv<N, Send<N, End>>; type AtoBVideo<N> = Send<N, Recv<N, End>>; type BtoAClose = <AtoBClose as Session>::Dual; type BtoCClose = End; type BtoAVideo<N> = <AtoBVideo<N> as Session>::Dual; type CtoBClose = <BtoCClose as Session>::Dual; type CtoAClose = <AtoCClose as Session>::Dual; type CtoAVideo<N> = <AtoCVideo<N> as Session>::Dual; type StackAEnd = RoleEnd; t
tackAEnd, RoleA<RoleEnd>>; type OfferA<N> = OfferMpst< AtoBVideo<N>, AtoCVideo<N>, AtoBClose, AtoCClose, StackAVideo, StackAEnd, RoleA<RoleEnd>, >; type InitA<N> = Recv<N, Send<N, OfferA<N>>>; type EndpointAFull<N> = MeshedChannels<End, InitA<N>, StackAFull, RoleA<RoleEnd>>; type EndpointBVideo<N> = MeshedChannels<BtoAVideo<N>, BtoCClose, StackBVideo, RoleB<RoleEnd>>; type EndpointBEnd = MeshedChannels<BtoAClose, BtoCClose, StackBEnd, RoleB<RoleEnd>>; type OfferB<N> = OfferMpst< BtoAVideo<N>, BtoCClose, BtoAClose, BtoCClose, StackBVideo, StackBEnd, RoleB<RoleEnd>, >; type EndpointBFull<N> = MeshedChannels<End, OfferB<N>, StackBFull, RoleB<RoleEnd>>; fn server(s: EndpointBFull<i32>) -> Result<(), Box<dyn Error>> { s.offer( |s: EndpointBVideo<i32>| { let (request, s) = s.recv()?; s.send(request + 1).close() }, |s: EndpointBEnd| s.close(), ) } fn authenticator(s: EndpointAFull<i32>) -> Result<(), Box<dyn Error>> { let (id, s) = s.recv()?; s.send(id + 1).offer( |s: EndpointAVideo<i32>| { let (request, s) = s.recv()?; let (video, s) = s.send(request + 1).recv()?; assert_eq!(request, id + 1); assert_eq!(video, id + 3); s.send(video + 1).close() }, |s: EndpointAEnd| s.close(), ) } fn client_video(s: EndpointCFull<i32>) -> Result<(), Box<dyn Error>> { let mut rng = thread_rng(); let id: i32 = rng.gen(); let (accept, s) = s.send(id).recv()?; assert_eq!(accept, id + 1); let (result, s) = s.choose_left().send(accept).recv()?; assert_eq!(result, accept + 3); s.close() } fn client_close(s: EndpointCFull<i32>) -> Result<(), Box<dyn Error>> { let mut rng = thread_rng(); let id: i32 = rng.gen(); let (accept, s) = s.send(id).recv()?; assert_eq!(accept, id + 1); s.choose_right().close() } pub fn run_usecase_right() { assert!(|| -> Result<(), Box<dyn Error>> { { let (thread_a, thread_b, thread_c) = fork_mpst(authenticator, server, client_close); assert!(thread_a.join().is_ok()); assert!(thread_b.join().is_ok()); assert!(thread_c.join().is_ok()); } Ok(()) }() .is_ok()); } pub fn run_usecase_left() { assert!(|| -> Result<(), Box<dyn Error>> { { let (thread_a, thread_b, thread_c) = fork_mpst(authenticator, server, client_video); assert!(thread_a.join().is_ok()); assert!(thread_b.join().is_ok()); assert!(thread_c.join().is_ok()); } Ok(()) }() .is_ok()); }
ype StackAVideo = RoleC<RoleB<RoleB<RoleC<RoleEnd>>>>; type StackAVideoDual = <StackAVideo as Role>::Dual; type StackAFull = RoleC<RoleC<RoleAlltoC<RoleEnd, RoleEnd>>>; type StackBEnd = RoleEnd; type StackBVideo = RoleA<RoleA<RoleEnd>>; type StackBVideoDual = <StackBVideo as Role>::Dual; type StackBFull = RoleAlltoC<RoleEnd, RoleEnd>; type StackCEnd = RoleEnd; type StackCVideo = RoleA<RoleA<RoleEnd>>; type StackCChoice = RoleCtoAll<StackCVideo, StackCEnd>; type StackCFull = RoleA<RoleA<StackCChoice>>; type ChooseCtoA<N> = ChooseMpst< BtoAVideo<N>, CtoAVideo<N>, BtoAClose, CtoAClose, StackAVideoDual, StackAEnd, RoleADual<RoleEnd>, >; type ChooseCtoB<N> = ChooseMpst< AtoBVideo<N>, CtoBClose, AtoBClose, CtoBClose, StackBVideoDual, StackBEnd, RoleBDual<RoleEnd>, >; type InitC<N> = Send<N, Recv<N, ChooseCtoA<N>>>; type EndpointCFull<N> = MeshedChannels<InitC<N>, ChooseCtoB<N>, StackCFull, RoleC<RoleEnd>>; type EndpointAVideo<N> = MeshedChannels<AtoBVideo<N>, AtoCVideo<N>, StackAVideo, RoleA<RoleEnd>>; type EndpointAEnd = MeshedChannels<AtoBClose, AtoCClose, S
random
[ { "content": "type ChooseMpstThree<S0, S1, S2, S3, R0, R1, N0> = Send<\n\n Either<\n\n MeshedChannels<\n\n <S0 as Session>::Dual,\n\n <S1 as Session>::Dual,\n\n <R0 as Role>::Dual,\n\n <N0 as Role>::Dual,\n\n >,\n\n MeshedChannels<\n\n <S2 as Session>::Dual,\n\n <S3 as Session>::Dual,\n\n <R1 as Role>::Dual,\n\n <N0 as Role>::Dual,\n\n >,\n\n >,\n\n End,\n\n>;\n\n\n", "file_path": "tests/baking_mod/macros_baking/macro_multi_choice.rs", "rank": 1, "score": 997217.3855924574 }, { "content": "// Those types will be code generated\n\ntype OfferMpstThree<S0, S1, S2, S3, R0, R1, N0> =\n\n Recv<Either<MeshedChannels<S0, S1, R0, N0>, MeshedChannels<S2, S3, R1, N0>>, End>;\n\n\n", "file_path": "tests/baking_mod/macros_baking/macro_multi_choice.rs", "rank": 3, "score": 903950.8374837806 }, { "content": "type ShortChooseMpstOne<S0, S1, S2, S4, R0, R1, N0> = ChooseMpst<S2, S0, S4, S1, R0, R1, N0>;\n", "file_path": "src/functionmpst/choose.rs", "rank": 4, "score": 892092.2019756674 }, { "content": "type ShortChooseMpstTwo<S0, S1, S3, S5, R0, R1, N0> =\n\n ChooseMpst<S3, <S0 as Session>::Dual, S5, <S1 as Session>::Dual, R0, R1, N0>;\n\n\n", "file_path": "src/functionmpst/choose.rs", "rank": 5, "score": 810914.8054885122 }, { "content": "type ShortMeshedChannelsAtoAll<S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5> = MeshedChannels<\n\n ShortChooseMpstOne<S0, S1, S2, S4, R0, R1, RoleBDual<RoleEnd>>,\n\n ShortChooseMpstTwo<S0, S1, S3, S5, R2, R3, RoleCDual<RoleEnd>>,\n\n RoleAtoAll<R4, R5>,\n\n RoleA<RoleEnd>,\n\n>;\n", "file_path": "src/functionmpst/choose.rs", "rank": 6, "score": 721310.3694468541 }, { "content": "type ShortMeshedChannelsCtoAll<S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5> = MeshedChannels<\n\n ShortChooseMpstOne<S2, S4, S0, S1, R0, R1, RoleADual<RoleEnd>>,\n\n ShortChooseMpstTwo<\n\n <S3 as Session>::Dual,\n\n <S5 as Session>::Dual,\n\n <S0 as Session>::Dual,\n\n <S1 as Session>::Dual,\n\n R2,\n\n R3,\n\n RoleBDual<RoleEnd>,\n\n >,\n\n RoleCtoAll<R4, R5>,\n\n RoleC<RoleEnd>,\n\n>;\n\n\n\n#[doc(hidden)]\n\nmacro_rules! choose_mpst_a {\n\n (\n\n $session_1:ty,\n\n $session_2:ty,\n", "file_path": "src/functionmpst/choose.rs", "rank": 7, "score": 721310.3694468541 }, { "content": "type ShortMeshedChannelsBtoAll<S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5> = MeshedChannels<\n\n ShortChooseMpstOne<S0, S1, S2, S4, R0, R1, RoleADual<RoleEnd>>,\n\n ShortChooseMpstTwo<\n\n <S3 as Session>::Dual,\n\n <S5 as Session>::Dual,\n\n <S0 as Session>::Dual,\n\n <S1 as Session>::Dual,\n\n R2,\n\n R3,\n\n RoleCDual<RoleEnd>,\n\n >,\n\n RoleBtoAll<R4, R5>,\n\n RoleB<RoleEnd>,\n\n>;\n", "file_path": "src/functionmpst/choose.rs", "rank": 8, "score": 721310.3694468541 }, { "content": "type MeshedChannelsToBFromC<S1, S2, S3, S4, S5, R1, R2> = MeshedChannels<\n\n S5,\n\n OfferMpst<S1, S2, S3, S4, R1, R2, RoleB<RoleEnd>>,\n\n RoleAlltoC<RoleEnd, RoleEnd>,\n\n RoleB<RoleEnd>,\n\n>;\n", "file_path": "src/functionmpst/offer.rs", "rank": 9, "score": 678073.4003803196 }, { "content": "type MeshedChannelsToCFromB<S1, S2, S3, S4, S5, R1, R2> = MeshedChannels<\n\n S5,\n\n OfferMpst<S1, S2, S3, S4, R1, R2, RoleC<RoleEnd>>,\n\n RoleAlltoB<RoleEnd, RoleEnd>,\n\n RoleC<RoleEnd>,\n\n>;\n\n\n", "file_path": "src/functionmpst/offer.rs", "rank": 10, "score": 678073.4003803196 }, { "content": "/// Creates and returns a tuple of three child processes for\n\n/// three [`MeshedChannels`] linked\n\n/// together.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use std::error::Error;\n\n///\n\n/// use mpstthree::binary::struct_trait::{end::End, recv::Recv, send::Send, session::Session};\n\n/// use mpstthree::functionmpst::fork::fork_mpst;\n\n/// use mpstthree::meshedchannels::MeshedChannels;\n\n///\n\n/// use mpstthree::functionmpst::close::close_mpst;\n\n///\n\n/// use mpstthree::role::a::RoleA;\n\n/// use mpstthree::role::b::RoleB;\n\n/// use mpstthree::role::c::RoleC;\n\n/// use mpstthree::role::end::RoleEnd;\n\n///\n\n/// use mpstthree::functionmpst::recv::recv_mpst_a_from_c;\n\n/// use mpstthree::functionmpst::recv::recv_mpst_b_from_a;\n\n/// use mpstthree::functionmpst::recv::recv_mpst_c_from_b;\n\n///\n\n/// use mpstthree::functionmpst::send::send_mpst_a_to_b;\n\n/// use mpstthree::functionmpst::send::send_mpst_b_to_c;\n\n/// use mpstthree::functionmpst::send::send_mpst_c_to_a;\n\n///\n\n/// type AtoB<N> = Send<N, End>;\n\n/// type AtoC<N> = Recv<N, End>;\n\n///\n\n/// type BtoA<N> = <AtoB<N> as Session>::Dual;\n\n/// type BtoC<N> = Send<N, End>;\n\n///\n\n/// type CtoA<N> = <AtoC<N> as Session>::Dual;\n\n/// type CtoB<N> = <BtoC<N> as Session>::Dual;\n\n///\n\n/// type StackA = RoleB<RoleC<RoleEnd>>;\n\n/// type StackB = RoleA<RoleC<RoleEnd>>;\n\n/// type StackC = RoleA<RoleB<RoleEnd>>;\n\n///\n\n/// type EndpointA<N> = MeshedChannels<AtoB<N>, AtoC<N>, StackA, RoleA<RoleEnd>>;\n\n/// type EndpointB<N> = MeshedChannels<BtoA<N>, BtoC<N>, StackB, RoleB<RoleEnd>>;\n\n/// type EndpointC<N> = MeshedChannels<CtoA<N>, CtoB<N>, StackC, RoleC<RoleEnd>>;\n\n///\n\n/// fn endpoint_a(s: EndpointA<i32>) -> Result<(), Box<dyn Error>> {\n\n/// let s = send_mpst_a_to_b(1, s);\n\n/// let (_x, s) = recv_mpst_a_from_c(s)?;\n\n/// close_mpst(s)\n\n/// }\n\n///\n\n/// /// Single test for B\n\n/// fn endpoint_b(s: EndpointB<i32>) -> Result<(), Box<dyn Error>> {\n\n/// let (_x, s) = recv_mpst_b_from_a(s)?;\n\n/// let s = send_mpst_b_to_c(2, s);\n\n/// close_mpst(s)\n\n/// }\n\n///\n\n/// /// Single test for C\n\n/// fn endpoint_c(s: EndpointC<i32>) -> Result<(), Box<dyn Error>> {\n\n/// let s = send_mpst_c_to_a(3, s);\n\n/// let (_x, s) = recv_mpst_c_from_b(s)?;\n\n/// close_mpst(s)\n\n/// }\n\n/// let (thread_a, thread_b, thread_c) = fork_mpst(endpoint_a, endpoint_b, endpoint_c);\n\n///\n\n/// thread_a.join().unwrap();\n\n/// thread_b.join().unwrap();\n\n/// thread_c.join().unwrap();\n\n/// ```\n\n///\n\n/// Creates 3 pairs of endpoints, each pair of type `S` and\n\n/// `S::Dual`. Creates 3 `Role` for each stack.\n\n/// Creates 3 `MeshedChannels`, linked together with the pairs\n\n/// of endpoints, and get the related child processes.\n\n///\n\n/// [`MeshedChannels`]: crate::meshedchannels::MeshedChannels\n\npub fn fork_mpst<S0, S1, S2, R0, R1, R2, N0, N1, N2, F0, F1, F2>(\n\n f0: F0,\n\n f1: F1,\n\n f2: F2,\n\n) -> (JoinHandle<()>, JoinHandle<()>, JoinHandle<()>)\n\nwhere\n\n S0: Session + 'static,\n\n S1: Session + 'static,\n\n S2: Session + 'static,\n\n R0: Role + 'static,\n\n R1: Role + 'static,\n\n R2: Role + 'static,\n\n N0: Role + 'static,\n\n N1: Role + 'static,\n\n N2: Role + 'static,\n\n F0: FnOnce(MeshedChannels<S0, S1, R0, N0>) -> Result<(), Box<dyn Error>>\n\n + marker::Send\n\n + 'static,\n\n F1: FnOnce(MeshedChannels<<S0 as Session>::Dual, S2, R1, N1>) -> Result<(), Box<dyn Error>>\n\n + marker::Send\n", "file_path": "src/functionmpst/fork.rs", "rank": 11, "score": 638567.193028666 }, { "content": "type ReturnType<S1, S2, R> = MeshedChannels<S1, S2, R, RoleC<RoleEnd>>;\n", "file_path": "src/meshedchannels/impl_c.rs", "rank": 12, "score": 627574.2945796833 }, { "content": "type ReturnType<S1, S2, R> = MeshedChannels<S1, S2, R, RoleB<RoleEnd>>;\n", "file_path": "src/meshedchannels/impl_b.rs", "rank": 13, "score": 627564.8296856354 }, { "content": "type MeshedChannelsToAFromC<S1, S2, S3, S4, S5, R1, R2> = MeshedChannels<\n\n S5,\n\n OfferMpst<S1, S2, S3, S4, R1, R2, RoleA<RoleEnd>>,\n\n RoleAlltoC<RoleEnd, RoleEnd>,\n\n RoleA<RoleEnd>,\n\n>;\n", "file_path": "src/functionmpst/offer.rs", "rank": 14, "score": 624299.3118540617 }, { "content": "type MeshedChannelsToCFromA<S1, S2, S3, S4, S5, R1, R2> = MeshedChannels<\n\n OfferMpst<S1, S2, S3, S4, R1, R2, RoleC<RoleEnd>>,\n\n S5,\n\n RoleAlltoA<RoleEnd, RoleEnd>,\n\n RoleC<RoleEnd>,\n\n>;\n", "file_path": "src/functionmpst/offer.rs", "rank": 15, "score": 624299.3118540617 }, { "content": "type MeshedChannelsToBFromA<S1, S2, S3, S4, S5, R1, R2> = MeshedChannels<\n\n OfferMpst<S1, S2, S3, S4, R1, R2, RoleB<RoleEnd>>,\n\n S5,\n\n RoleAlltoA<RoleEnd, RoleEnd>,\n\n RoleB<RoleEnd>,\n\n>;\n", "file_path": "src/functionmpst/offer.rs", "rank": 16, "score": 624289.5989734321 }, { "content": "type MeshedChannelsToAFromB<S1, S2, S3, S4, S5, R1, R2> = MeshedChannels<\n\n OfferMpst<S1, S2, S3, S4, R1, R2, RoleA<RoleEnd>>,\n\n S5,\n\n RoleAlltoB<RoleEnd, RoleEnd>,\n\n RoleA<RoleEnd>,\n\n>;\n", "file_path": "src/functionmpst/offer.rs", "rank": 17, "score": 624289.598973432 }, { "content": "/// Given a choice from C, to other processes, between two\n\n/// [`MeshedChannels`], choose the\n\n/// second option for each.\n\n///\n\n/// C has to encapsulate all possible\n\n/// [`MeshedChannels`] for each other\n\n/// role. This function creates the 6 new binary\n\n/// [`Session`], the 3 new\n\n/// [`Role`] related to each second option\n\n/// then the related [`mpstthree::meshedchannels::\n\n/// MeshedChannels`]. It then sends those options to the\n\n/// related processes.\n\n///\n\n/// * S0: dual session from A to B on left branch\n\n/// * S1: dual session from A to B on right branch\n\n/// * S2: session from C to A on left branch\n\n/// * S3: session from C to B on left branch\n\n/// * S4: session from C to A on right branch\n\n/// * S5: session from C to B on right branch\n\n///\n\n/// * R0: dual stack of A on left branch\n\n/// * R1: dual stack of A on right branch\n\n/// * R2: dual stack of B on left branch\n\n/// * R3: dual stack of B on right branch\n\n/// * R4: stack of C on left branch\n\n/// * R5: stack of C on right branch\n\n///\n\n/// [`MeshedChannels`]: crate::meshedchannels::MeshedChannels\n\n/// [`Session`]: crate::binary::struct_trait::session::Session\n\n/// [`Role`]: crate::role::Role\n\npub fn choose_right_mpst_session_c_to_all<'a, S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5>(\n\n s: ShortMeshedChannelsCtoAll<S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5>,\n\n) -> MeshedChannels<S4, S5, R5, RoleC<RoleEnd>>\n\nwhere\n\n S0: Session + 'a,\n\n S1: Session + 'a,\n\n S2: Session + 'a,\n\n S3: Session + 'a,\n\n S4: Session + 'a,\n\n S5: Session + 'a,\n\n R0: Role + 'a,\n\n R1: Role + 'a,\n\n R2: Role + 'a,\n\n R3: Role + 'a,\n\n R4: Role + 'a,\n\n R5: Role + 'a,\n\n{\n\n choose_mpst_c!(\n\n S1,\n\n S4,\n", "file_path": "src/functionmpst/choose.rs", "rank": 18, "score": 623475.7220046656 }, { "content": "/// Given a choice from B, to other processes, between two\n\n/// [`MeshedChannels`], choose the\n\n/// first option for each.\n\n///\n\n/// B has to encapsulate all possible\n\n/// [`MeshedChannels`] for each other\n\n/// role. This function creates the 6 new binary\n\n/// [`Session`], the 3 new\n\n/// [`Role`] related to each first option\n\n/// then the related [`mpstthree::meshedchannels::\n\n/// MeshedChannels`]. It then sends those options to the\n\n/// related processes.\n\n///\n\n/// * S0: dual session from A to C on left branch\n\n/// * S1: dual session from A to C on right branch\n\n/// * S2: session from B to A on left branch\n\n/// * S3: session from B to C on left branch\n\n/// * S4: session from B to A on right branch\n\n/// * S5: session from B to C on right branch\n\n///\n\n/// * R0: dual stack of A on left branch\n\n/// * R1: dual stack of A on right branch\n\n/// * R2: dual stack of C on left branch\n\n/// * R3: dual stack of C on right branch\n\n/// * R4: stack of B on left branch\n\n/// * R5: stack of B on right branch\n\n///\n\n/// [`MeshedChannels`]: crate::meshedchannels::MeshedChannels\n\n/// [`Session`]: crate::binary::struct_trait::session::Session\n\n/// [`Role`]: crate::role::Role\n\npub fn choose_left_mpst_session_b_to_all<'a, S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5>(\n\n s: ShortMeshedChannelsBtoAll<S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5>,\n\n) -> MeshedChannels<S2, S3, R4, RoleB<RoleEnd>>\n\nwhere\n\n S0: Session + 'a,\n\n S1: Session + 'a,\n\n S2: Session + 'a,\n\n S3: Session + 'a,\n\n S4: Session + 'a,\n\n S5: Session + 'a,\n\n R0: Role + 'a,\n\n R1: Role + 'a,\n\n R2: Role + 'a,\n\n R3: Role + 'a,\n\n R4: Role + 'a,\n\n R5: Role + 'a,\n\n{\n\n choose_mpst_b!(\n\n S2,\n\n S0,\n", "file_path": "src/functionmpst/choose.rs", "rank": 19, "score": 623475.7214059853 }, { "content": "/// Given a choice from B, to other processes, between two\n\n/// [`MeshedChannels`], choose the\n\n/// second option for each.\n\n///\n\n/// B has to encapsulate all possible\n\n/// [`MeshedChannels`] for each other\n\n/// role. This function creates the 6 new binary\n\n/// [`Session`], the 3 new\n\n/// [`Role`] related to each second option\n\n/// then the related [`mpstthree::meshedchannels::\n\n/// MeshedChannels`]. It then sends those options to the\n\n/// related processes.\n\n///\n\n/// * S0: dual session from A to C on left branch\n\n/// * S1: dual session from A to C on right branch\n\n/// * S2: session from B to A on left branch\n\n/// * S3: session from B to C on left branch\n\n/// * S4: session from B to A on right branch\n\n/// * S5: session from B to C on right branch\n\n///\n\n/// * R0: dual stack of A on left branch\n\n/// * R1: dual stack of A on right branch\n\n/// * R2: dual stack of C on left branch\n\n/// * R3: dual stack of C on right branch\n\n/// * R4: stack of B on left branch\n\n/// * R5: stack of B on right branch\n\n///\n\n/// [`MeshedChannels`]: crate::meshedchannels::MeshedChannels\n\n/// [`Session`]: crate::binary::struct_trait::session::Session\n\n/// [`Role`]: crate::role::Role\n\npub fn choose_right_mpst_session_b_to_all<'a, S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5>(\n\n s: ShortMeshedChannelsBtoAll<S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5>,\n\n) -> MeshedChannels<S4, S5, R5, RoleB<RoleEnd>>\n\nwhere\n\n S0: Session + 'a,\n\n S1: Session + 'a,\n\n S2: Session + 'a,\n\n S3: Session + 'a,\n\n S4: Session + 'a,\n\n S5: Session + 'a,\n\n R0: Role + 'a,\n\n R1: Role + 'a,\n\n R2: Role + 'a,\n\n R3: Role + 'a,\n\n R4: Role + 'a,\n\n R5: Role + 'a,\n\n{\n\n choose_mpst_b!(\n\n S4,\n\n S1,\n", "file_path": "src/functionmpst/choose.rs", "rank": 20, "score": 623475.7214059852 }, { "content": "/// Given a choice from C, to other processes, between two\n\n/// [`MeshedChannels`], choose the\n\n/// first option for each.\n\n///\n\n/// C has to encapsulate all possible\n\n/// [`MeshedChannels`] for each other\n\n/// role. This function creates the 6 new binary\n\n/// [`Session`], the 3 new\n\n/// [`Role`] related to each first option\n\n/// then the related [`mpstthree::meshedchannels::\n\n/// MeshedChannels`]. It then sends those options to the\n\n/// related processes.\n\n///\n\n/// * S0: dual session from A to B on left branch\n\n/// * S1: dual session from A to B on right branch\n\n/// * S2: session from C to A on left branch\n\n/// * S3: session from C to B on left branch\n\n/// * S4: session from C to A on right branch\n\n/// * S5: session from C to B on right branch\n\n///\n\n/// * R0: dual stack of A on left branch\n\n/// * R1: dual stack of A on right branch\n\n/// * R2: dual stack of B on left branch\n\n/// * R3: dual stack of B on right branch\n\n/// * R4: stack of C on left branch\n\n/// * R5: stack of C on right branch\n\n///\n\n/// [`MeshedChannels`]: crate::meshedchannels::MeshedChannels\n\n/// [`Session`]: crate::binary::struct_trait::session::Session\n\n/// [`Role`]: crate::role::Role\n\npub fn choose_left_mpst_session_c_to_all<'a, S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5>(\n\n s: ShortMeshedChannelsCtoAll<S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5>,\n\n) -> MeshedChannels<S2, S3, R4, RoleC<RoleEnd>>\n\nwhere\n\n S0: Session + 'a,\n\n S1: Session + 'a,\n\n S2: Session + 'a,\n\n S3: Session + 'a,\n\n S4: Session + 'a,\n\n S5: Session + 'a,\n\n R0: Role + 'a,\n\n R1: Role + 'a,\n\n R2: Role + 'a,\n\n R3: Role + 'a,\n\n R4: Role + 'a,\n\n R5: Role + 'a,\n\n{\n\n choose_mpst_c!(\n\n S0,\n\n S2,\n", "file_path": "src/functionmpst/choose.rs", "rank": 21, "score": 623475.7220046658 }, { "content": "/// Given a choice from A, to other processes, between two\n\n/// [`MeshedChannels`], choose the\n\n/// second option for each.\n\n///\n\n/// A has to encapsulate all possible\n\n/// [`MeshedChannels`] for each other\n\n/// role. This function creates the 6 new binary\n\n/// [`Session`], the 3 new\n\n/// [`Role`] related to each second option\n\n/// then the related [`mpstthree::meshedchannels::\n\n/// MeshedChannels`]. It then sends those options to the\n\n/// related processes.\n\n///\n\n/// * S0: dual session from B to C on left branch\n\n/// * S1: dual session from B to C on right branch\n\n/// * S2: session from A to B on left branch\n\n/// * S3: session from A to C on left branch\n\n/// * S4: session from A to B on right branch\n\n/// * S5: session from A to C on right branch\n\n///\n\n/// * R0: dual stack of B on left branch\n\n/// * R1: dual stack of B on right branch\n\n/// * R2: dual stack of C on left branch\n\n/// * R3: dual stack of C on right branch\n\n/// * R4: stack of A on left branch\n\n/// * R5: stack of A on right branch\n\n///\n\n/// [`MeshedChannels`]: crate::meshedchannels::MeshedChannels\n\n/// [`Session`]: crate::binary::struct_trait::session::Session\n\n/// [`Role`]: crate::role::Role\n\npub fn choose_right_mpst_session_a_to_all<'a, S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5>(\n\n s: ShortMeshedChannelsAtoAll<S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5>,\n\n) -> MeshedChannels<S4, S5, R5, RoleA<RoleEnd>>\n\nwhere\n\n S0: Session + 'a,\n\n S1: Session + 'a,\n\n S2: Session + 'a,\n\n S3: Session + 'a,\n\n S4: Session + 'a,\n\n S5: Session + 'a,\n\n R0: Role + 'a,\n\n R0: Role + 'a,\n\n R1: Role + 'a,\n\n R2: Role + 'a,\n\n R3: Role + 'a,\n\n R4: Role + 'a,\n\n R5: Role + 'a,\n\n{\n\n choose_mpst_a!(\n\n S4,\n", "file_path": "src/functionmpst/choose.rs", "rank": 22, "score": 623475.4807963098 }, { "content": "/// Given a choice from A, to other processes, between two\n\n/// [`MeshedChannels`], choose the\n\n/// first option for each.\n\n///\n\n/// A has to encapsulate all possible\n\n/// [`MeshedChannels`] for each other\n\n/// role. This function creates the 6 new binary\n\n/// [`Session`], the 3 new\n\n/// [`Role`] related to each first option\n\n/// then the related [`mpstthree::meshedchannels::\n\n/// MeshedChannels`]. It then sends those options to the\n\n/// related processes.\n\n///\n\n/// * S0: dual session from B to C on left branch\n\n/// * S1: dual session from B to C on right branch\n\n/// * S2: session from A to B on left branch\n\n/// * S3: session from A to C on left branch\n\n/// * S4: session from A to B on right branch\n\n/// * S5: session from A to C on right branch\n\n///\n\n/// * R0: dual stack of B on left branch\n\n/// * R1: dual stack of B on right branch\n\n/// * R2: dual stack of C on left branch\n\n/// * R3: dual stack of C on right branch\n\n/// * R4: stack of A on left branch\n\n/// * R5: stack of A on right branch\n\n///\n\n/// [`MeshedChannels`]: crate::meshedchannels::MeshedChannels\n\n/// [`Session`]: crate::binary::struct_trait::session::Session\n\n/// [`Role`]: crate::role::Role\n\npub fn choose_left_mpst_session_a_to_all<'a, S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5>(\n\n s: ShortMeshedChannelsAtoAll<S0, S1, S2, S3, S4, S5, R0, R1, R2, R3, R4, R5>,\n\n) -> MeshedChannels<S2, S3, R4, RoleA<RoleEnd>>\n\nwhere\n\n S0: Session + 'a,\n\n S1: Session + 'a,\n\n S2: Session + 'a,\n\n S3: Session + 'a,\n\n S4: Session + 'a,\n\n S5: Session + 'a,\n\n R0: Role + 'a,\n\n R1: Role + 'a,\n\n R2: Role + 'a,\n\n R3: Role + 'a,\n\n R4: Role + 'a,\n\n R5: Role + 'a,\n\n{\n\n choose_mpst_a!(\n\n S2,\n\n S3,\n", "file_path": "src/functionmpst/choose.rs", "rank": 23, "score": 623475.4807963099 }, { "content": "#[doc(hidden)]\n\npub fn fork_mpst_solo<S0, S1, S2, R0, R1, R2, N0, N1, N2, F>(f: F) -> Result<(), Box<dyn Error>>\n\nwhere\n\n S0: Session + 'static,\n\n S1: Session + 'static,\n\n S2: Session + 'static,\n\n R0: Role + 'static,\n\n R1: Role + 'static,\n\n R2: Role + 'static,\n\n N0: Role + 'static,\n\n N1: Role + 'static,\n\n N2: Role + 'static,\n\n F: FnOnce(\n\n MeshedChannels<S0, S1, R0, N0>,\n\n MeshedChannels<<S0 as Session>::Dual, S2, R1, N1>,\n\n MeshedChannels<<S1 as Session>::Dual, <S2 as Session>::Dual, R2, N2>,\n\n ) -> Result<(), Box<dyn Error>>\n\n + marker::Send\n\n + 'static,\n\n{\n\n let (channel_ab, channel_ba) = S0::new();\n", "file_path": "src/interleaved/fork.rs", "rank": 24, "score": 597278.4401793674 }, { "content": "type ReturnType<S1, S2, R> = MeshedChannels<S1, S2, R, RoleA<RoleEnd>>;\n", "file_path": "src/meshedchannels/impl_a.rs", "rank": 25, "score": 573196.6654805413 }, { "content": "type EndpointC = MeshedChannelsThree<End, Send<i32, End>, RoleB<RoleEnd>, NameC>;\n\n\n", "file_path": "tests/cancel_mod/cancel_04.rs", "rank": 26, "score": 540833.3385311114 }, { "content": "// Types\n\ntype EndpointA = MeshedChannelsThree<Send<i32, End>, Send<i32, End>, RoleB<RoleC<RoleEnd>>, NameA>;\n", "file_path": "tests/cancel_mod/cancel_05.rs", "rank": 27, "score": 535739.6337671615 }, { "content": "type EndpointB = MeshedChannels<Recv<i32, End>, Send<i32, End>, RoleA<RoleC<RoleEnd>>, NameB>;\n", "file_path": "tests/cancel_mod/cancel_00.rs", "rank": 28, "score": 520163.7638053002 }, { "content": "type EndpointB = MeshedChannelsThree<Recv<i32, End>, Send<i32, End>, RoleA<RoleC<RoleEnd>>, NameB>;\n", "file_path": "tests/cancel_mod/cancel_01.rs", "rank": 29, "score": 517242.24287937017 }, { "content": "type EndpointB = MeshedChannelsThree<Recv<i32, End>, Send<i32, End>, RoleA<RoleC<RoleEnd>>, NameB>;\n", "file_path": "tests/cancel_mod/cancel_16.rs", "rank": 30, "score": 517242.24287937017 }, { "content": "type EndpointB = MeshedChannelsThree<Recv<i32, End>, Send<i32, End>, RoleA<RoleC<RoleEnd>>, NameB>;\n", "file_path": "tests/cancel_mod/cancel_15.rs", "rank": 31, "score": 517242.24287937017 }, { "content": "// Types\n\ntype EndpointA = MeshedChannelsThree<Send<i32, End>, Recv<i32, End>, RoleC<RoleB<RoleEnd>>, NameA>;\n", "file_path": "tests/cancel_mod/cancel_02.rs", "rank": 32, "score": 504455.47516805725 }, { "content": "type PawnC = MeshedChannels<End, End, End, End, RoleEnd, NameC>;\n", "file_path": "tests/interleaved_mod/macro_multi_send_recv_meshedchannels_solo.rs", "rank": 33, "score": 502429.4606268904 }, { "content": "type PawnC = MeshedChannels<End, End, End, End, RoleEnd, NameC>;\n", "file_path": "tests/macros_multiple_mod/macro_multi_send_recv_meshedchannels.rs", "rank": 34, "score": 502429.4606268904 }, { "content": "type PawnC = MeshedChannels<End, End, End, End, RoleEnd, NameC>;\n", "file_path": "tests/baking_mod/macros_baking/macro_multi_send_recv_meshedchannels.rs", "rank": 35, "score": 499624.5669813182 }, { "content": "type EndpointC = MeshedChannelsFour<End, End, Send<i32, End>, RoleD<RoleEnd>, NameC>;\n", "file_path": "tests/cancel_mod/cancel_03.rs", "rank": 36, "score": 488507.3771022579 }, { "content": "type EndpointC = MeshedChannelsThree<Send<i32, End>, End, RoleA<RoleEnd>, NameC>;\n\n\n", "file_path": "tests/cancel_mod/cancel_02.rs", "rank": 37, "score": 487061.3868427657 }, { "content": "type ChoiceCNo = MeshedChannelsThree<Send<i32, End>, End, RoleA<RoleEnd>, NameC>;\n", "file_path": "benches/main_all/baking/travel_three.rs", "rank": 38, "score": 484657.74184609426 }, { "content": "type SendMeshedChannelsD<N> = MeshedChannels<End, Send<N, End>, End, End, NameB, NameD>;\n\n\n", "file_path": "tests/interleaved_mod/macro_multi_send_recv_meshedchannels_solo.rs", "rank": 39, "score": 484064.0599849168 }, { "content": "type SendMeshedChannelsD<N> = MeshedChannels<End, Send<N, End>, End, End, NameB, NameD>;\n\n\n", "file_path": "tests/macros_multiple_mod/macro_multi_send_recv_meshedchannels.rs", "rank": 40, "score": 484064.0599849168 }, { "content": "// Creating the binary sessions\n\ntype AtoB<N> = Send<N, End>;\n", "file_path": "tests/baking_mod/unit_meshedchannels.rs", "rank": 41, "score": 483773.153988098 }, { "content": "type SendMeshedChannelsD<N> = MeshedChannels<End, Send<N, End>, End, End, NameB, NameD>;\n\n\n", "file_path": "tests/baking_mod/macros_baking/macro_multi_send_recv_meshedchannels.rs", "rank": 42, "score": 481562.52415958507 }, { "content": "type Choose0fromCtoB = Send<Branches0BtoC, End>;\n\n\n", "file_path": "benches/main_all/baking/video_stream.rs", "rank": 43, "score": 480067.6766166302 }, { "content": "// Creating the binary sessions\n\ntype AtoB<N> = Send<N, End>;\n", "file_path": "tests/macros_simple_mod/unit_meshedchannels.rs", "rank": 44, "score": 479573.4807847577 }, { "content": "// Creating the binary sessions\n\ntype AtoB<N> = Send<N, End>;\n", "file_path": "tests/macros_multiple_mod/unit_meshedchannels.rs", "rank": 45, "score": 479573.4807847577 }, { "content": "type EndpointCDone<N> = MeshedChannelsThree<End, Send<N, End>, RoleS<RoleEnd>, NameC>;\n\n\n", "file_path": "examples/o_auth_2.rs", "rank": 46, "score": 479551.07670107775 }, { "content": "// Types\n\ntype EndpointA = MeshedChannelsFour<Send<i32, End>, End, End, RoleB<RoleEnd>, NameA>;\n", "file_path": "tests/cancel_mod/cancel_03.rs", "rank": 47, "score": 477774.45224983495 }, { "content": "type EndpointCDone<N> = MeshedChannelsThree<End, Send<N, End>, RoleS<RoleEnd>, NameC>;\n\n\n", "file_path": "tests/http/o_auth.rs", "rank": 48, "score": 477195.5682363941 }, { "content": "type EndpointAFail = MeshedChannelsThree<Send<Fail, End>, End, RoleC<RoleEnd>, NameA>;\n", "file_path": "examples/o_auth.rs", "rank": 49, "score": 476978.2911224168 }, { "content": "// Types\n\ntype EndpointA = MeshedChannels<Send<i32, End>, End, RoleB<RoleEnd>, NameA>;\n", "file_path": "tests/cancel_mod/cancel_00.rs", "rank": 50, "score": 476975.5460576504 }, { "content": "type Choose0fromBtoC = Send<Branches0CtoB, End>;\n", "file_path": "tests/checking_mod/basics/checking_recursion.rs", "rank": 51, "score": 476098.27923851693 }, { "content": "type EndpointAFail = MeshedChannelsThree<Send<Fail, End>, End, RoleC<RoleEnd>, NameA>;\n", "file_path": "examples/o_auth_checking.rs", "rank": 52, "score": 474320.50038967247 }, { "content": "// Types\n\ntype EndpointA = MeshedChannelsThree<Send<i32, End>, End, RoleB<RoleEnd>, NameA>;\n", "file_path": "tests/cancel_mod/cancel_01.rs", "rank": 53, "score": 474317.8777148475 }, { "content": "// Types\n\ntype EndpointA = MeshedChannelsThree<Send<i32, End>, End, RoleB<RoleEnd>, NameA>;\n", "file_path": "tests/cancel_mod/cancel_15.rs", "rank": 54, "score": 474317.8777148475 }, { "content": "// Types\n\ntype EndpointA = MeshedChannelsThree<Send<i32, End>, End, RoleB<RoleEnd>, NameA>;\n", "file_path": "tests/cancel_mod/cancel_04.rs", "rank": 55, "score": 474317.8777148475 }, { "content": "// Types\n\ntype EndpointA = MeshedChannelsThree<Send<i32, End>, End, RoleB<RoleEnd>, NameA>;\n", "file_path": "tests/cancel_mod/cancel_16.rs", "rank": 56, "score": 474317.8777148475 }, { "content": "type EndpointCDone<N> = MeshedChannelsThree<End, Send<N, End>, RoleS<RoleEnd>, NameC>;\n\n\n", "file_path": "tests/http/o_auth_fail_too_false.rs", "rank": 57, "score": 472622.8853058373 }, { "content": "type EndpointCDone<N> = MeshedChannelsThree<End, Send<N, End>, RoleS<RoleEnd>, NameC>;\n\n\n", "file_path": "tests/http/o_auth_fail_too_true.rs", "rank": 58, "score": 472622.8853058373 }, { "content": "type Choose0fromCtoB = Send<Branches0BtoC, End>;\n\n\n", "file_path": "tests/baking_mod/simple_baking/c_usecase_recursive.rs", "rank": 59, "score": 472232.8827379293 }, { "content": "type Choose0fromBtoC = Send<Branches0CtoB, End>;\n", "file_path": "tests/basics_mod/simple_basics/b_usecase_recursive.rs", "rank": 60, "score": 472232.88273792923 }, { "content": "type Choose0fromBtoC = Send<Branches0CtoB, End>;\n", "file_path": "tests/baking_mod/simple_baking/b_usecase_recursive.rs", "rank": 61, "score": 472232.88273792923 }, { "content": "type Choose0fromCtoB = Send<Branches0BtoC, End>;\n\n\n", "file_path": "tests/basics_mod/simple_basics/c_usecase_recursive.rs", "rank": 62, "score": 472232.8827379293 }, { "content": "type EndpointAFail = MeshedChannelsThree<Send<Fail, End>, End, RoleC<RoleEnd>, NameA>;\n", "file_path": "benches/main_all/basic/o_auth.rs", "rank": 63, "score": 471723.44590757304 }, { "content": "type EndpointAFail = MeshedChannelsThree<Send<Fail, End>, End, RoleC<RoleEnd>, NameA>;\n", "file_path": "benches/main_all/baking/o_auth.rs", "rank": 64, "score": 471723.44590757304 }, { "content": "type Choose0fromCtoB<N> = Send<Branches0BtoC<N>, End>;\n\n\n", "file_path": "examples/video_stream.rs", "rank": 65, "score": 469877.27184036956 }, { "content": "type EndpointADone<N> = MeshedChannelsThree<Send<N, End>, End, RoleC<RoleEnd>, NameA>;\n\n\n", "file_path": "examples/o_auth_2.rs", "rank": 66, "score": 469213.83524465596 }, { "content": "type EndpointADone<N> = MeshedChannelsThree<Send<N, End>, End, RoleC<RoleEnd>, NameA>;\n\n\n", "file_path": "tests/http/o_auth.rs", "rank": 67, "score": 466616.78076255653 }, { "content": "type Choose0fromCtoB<N> = Send<Branches0BtoC<N>, End>;\n\n\n", "file_path": "benches/main_all/basic/video_stream.rs", "rank": 68, "score": 462042.4779616686 }, { "content": "type EndpointADone<N> = MeshedChannelsThree<Send<N, End>, End, RoleC<RoleEnd>, NameA>;\n\n\n", "file_path": "tests/http/o_auth_fail_too_false.rs", "rank": 69, "score": 461596.3444437933 }, { "content": "type EndpointADone<N> = MeshedChannelsThree<Send<N, End>, End, RoleC<RoleEnd>, NameA>;\n\n\n", "file_path": "tests/http/o_auth_fail_too_true.rs", "rank": 70, "score": 461596.3444437933 }, { "content": "type Choose0fromCtoB<N> = Send<Branches0BtoC<N>, End>;\n\n\n", "file_path": "tests/macros_multiple_mod/macro_recursive.rs", "rank": 71, "score": 458277.0478524583 }, { "content": "type Choose0fromDtoC = Send<(End, Branching0fromDtoC), End>;\n\n\n", "file_path": "tests/cancel_mod/cancel_08.rs", "rank": 72, "score": 456278.3196422248 }, { "content": "type Choose0fromDtoC = Send<(End, Branching0fromDtoC), End>;\n\n\n", "file_path": "tests/cancel_mod/cancel_12.rs", "rank": 73, "score": 456278.3196422248 }, { "content": "type Choose0fromDtoC = Send<(End, Branching0fromDtoC), End>;\n\n\n", "file_path": "tests/cancel_mod/cancel_13.rs", "rank": 74, "score": 456278.3196422248 }, { "content": "type Choose0fromDtoB = Send<(End, Branching0fromDtoB), End>;\n", "file_path": "tests/cancel_mod/cancel_08.rs", "rank": 75, "score": 456265.9600706644 }, { "content": "type Choose0fromDtoB = Send<(End, Branching0fromDtoB), End>;\n", "file_path": "tests/cancel_mod/cancel_12.rs", "rank": 76, "score": 456265.9600706644 }, { "content": "type Choose0fromDtoB = Send<(End, Branching0fromDtoB), End>;\n", "file_path": "tests/cancel_mod/cancel_13.rs", "rank": 77, "score": 456265.9600706644 }, { "content": "type EndpointS0 = MeshedChannelsTwo<Send<(), Offer0fromCtoS>, RoleC<RoleC<RoleEnd>>, NameS>;\n\n\n", "file_path": "examples/smtp.rs", "rank": 78, "score": 454767.3163457031 }, { "content": "type Choose0fromCtoB<N> = Send<Branches0BtoC<N>, End>;\n\n\n", "file_path": "tests/baking_mod/macros_baking/macro_recursive.rs", "rank": 79, "score": 454607.7556632458 }, { "content": "type Choose0fromHtoC = Send<(End, Branching0fromHtoC), End>;\n", "file_path": "example/ring/ring_eight_broadcast_cancel.rs", "rank": 80, "score": 450741.8735854485 }, { "content": "type Choose0fromJtoC = Send<(End, Branching0fromJtoC), End>;\n", "file_path": "benches/ring_all/cancel_broadcast/ring_ten.rs", "rank": 81, "score": 450741.8735854485 }, { "content": "type Choose0fromJtoC = Send<(End, Branching0fromJtoC), End>;\n", "file_path": "example/ring/ring_ten_broadcast_cancel.rs", "rank": 82, "score": 450741.8735854485 }, { "content": "type Choose0fromKtoC = Send<(End, Branching0fromKtoC), End>;\n", "file_path": "example/ring/ring_eleven_broadcast_cancel.rs", "rank": 83, "score": 450741.8735854485 }, { "content": "type Choose0fromGtoC = Send<(End, Branching0fromGtoC), End>;\n", "file_path": "benches/ring_all/cancel_broadcast/ring_seven.rs", "rank": 84, "score": 450741.8735854485 }, { "content": "type Choose0fromDtoC = Send<(End, Branching0fromDtoC), End>;\n", "file_path": "benches/ring_all/cancel_broadcast/ring_four.rs", "rank": 85, "score": 450741.8735854485 }, { "content": "type Choose0fromFtoC = Send<(End, Branching0fromFtoC), End>;\n", "file_path": "benches/ring_all/cancel_broadcast/ring_six.rs", "rank": 86, "score": 450741.8735854485 }, { "content": "type Choose0fromItoC = Send<(End, Branching0fromItoC), End>;\n", "file_path": "benches/ring_all/cancel_broadcast/ring_nine.rs", "rank": 87, "score": 450741.8735854485 }, { "content": "type Choose0fromHtoC = Send<(End, Branching0fromHtoC), End>;\n", "file_path": "benches/ring_all/cancel_broadcast/ring_eight.rs", "rank": 88, "score": 450741.8735854485 }, { "content": "type Choose0fromDtoC = Send<(End, Branching0fromDtoC), End>;\n", "file_path": "example/ring/ring_four_broadcast_cancel.rs", "rank": 89, "score": 450741.8735854485 }, { "content": "type Choose0fromFtoC = Send<(End, Branching0fromFtoC), End>;\n", "file_path": "example/ring/ring_six_broadcast_cancel.rs", "rank": 90, "score": 450741.8735854485 }, { "content": "type Choose0fromItoC = Send<(End, Branching0fromItoC), End>;\n", "file_path": "example/ring/ring_nine_broadcast_cancel.rs", "rank": 91, "score": 450741.8735854485 }, { "content": "type Choose0fromKtoC = Send<(End, Branching0fromKtoC), End>;\n", "file_path": "benches/ring_all/cancel_broadcast/ring_eleven.rs", "rank": 92, "score": 450741.8735854485 }, { "content": "type Choose0fromGtoC = Send<(End, Branching0fromGtoC), End>;\n", "file_path": "example/ring/ring_seven_broadcast_cancel.rs", "rank": 93, "score": 450741.8735854485 }, { "content": "type Choose0fromEtoC = Send<(End, Branching0fromEtoC), End>;\n", "file_path": "example/ring/ring_five_broadcast_cancel.rs", "rank": 94, "score": 450741.8735854485 }, { "content": "type Choose0fromEtoC = Send<(End, Branching0fromEtoC), End>;\n", "file_path": "benches/ring_all/cancel_broadcast/ring_five.rs", "rank": 95, "score": 450741.8735854485 }, { "content": "type Choose0fromTtoC = Send<(End, Branching0fromTtoC), End>;\n", "file_path": "example/ring/ring_twenty_broadcast_cancel.rs", "rank": 96, "score": 450741.8735854485 }, { "content": "type Choose0fromTtoC = Send<(End, Branching0fromTtoC), End>;\n", "file_path": "benches/ring_all/cancel_broadcast/ring_twenty.rs", "rank": 97, "score": 450741.8735854485 }, { "content": "type Choose0fromTtoB = Send<(End, Branching0fromTtoB), End>;\n", "file_path": "benches/ring_all/cancel_broadcast/ring_twenty.rs", "rank": 98, "score": 450729.7166706814 }, { "content": "type Choose0fromEtoB = Send<(End, Branching0fromEtoB), End>;\n", "file_path": "benches/ring_all/cancel_broadcast/ring_five.rs", "rank": 99, "score": 450729.7166706814 } ]
Rust
src/managers/memory.rs
aajtodd/riam
2b72f7d2eb5bac184631c2926f41629a44de6fd5
use crate::{Policy, PolicyManager, Result, RiamError}; use std::collections::HashMap; use std::collections::HashSet; use uuid::Uuid; pub struct MemoryManager { by_principal: HashMap<String, HashSet<Uuid>>, by_id: HashMap<Uuid, Policy>, } impl MemoryManager { pub fn new() -> Self { MemoryManager { by_principal: HashMap::new(), by_id: HashMap::new(), } } } impl PolicyManager for MemoryManager { fn create(&mut self, mut policy: Policy) -> Result<Uuid> { if !policy.is_valid() { return Err(RiamError::InvalidPolicy); } let id = Uuid::new_v4(); policy.id = Some(id); self.by_id.insert(id, policy); Ok(id) } fn update(&mut self, policy: &Policy) -> Result<()> { match policy.id { Some(id) => match self.by_id.get_mut(&id) { Some(p) => { *p = policy.clone(); } None => return Err(RiamError::UnknownPolicy), }, None => return Err(RiamError::InvalidPolicy), } Ok(()) } fn get(&self, id: &Uuid) -> Result<&Policy> { if let Some(p) = self.by_id.get(&id) { return Ok(p); } Err(RiamError::UnknownPolicy) } fn delete(&mut self, id: &Uuid) -> Result<()> { if let Some(_) = self.by_id.remove(&id) { self.by_principal.retain(|_principal, pset| { pset.remove(&id); return pset.len() > 0; }) } else { return Err(RiamError::UnknownPolicy); } Ok(()) } fn list(&self) -> Result<Vec<Policy>> { Ok(Vec::new()) } fn get_policies_for_principal(&self, principal: &str) -> Result<Option<Vec<Policy>>> { if let Some(policy_ids) = self.by_principal.get(principal) { let mut policies: Vec<Policy> = Vec::with_capacity(policy_ids.len()); for id in policy_ids { let p = self.by_id.get(id).unwrap(); policies.push(p.clone()); } return Ok(Some(policies)); } Ok(None) } fn attach(&mut self, principal: &str, id: &Uuid) -> Result<()> { self.by_principal .entry(principal.to_owned()) .or_insert(HashSet::new()) .insert(*id); Ok(()) } fn detach(&mut self, principal: &str, id: &Uuid) -> Result<()> { if let Some(pset) = self.by_principal.get_mut(principal) { pset.remove(id); if pset.len() == 0 { self.by_principal.remove(principal); } } Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_memory_manager_attach_detach() { let mut mgr = MemoryManager::new(); let jsp = r#" { "name": "Account policy", "statements": [ { "sid": "Grant account access", "effect": "allow", "actions": ["account:list", "account:get"], "resources": ["resource:account:1235"] } ] } "#; let policy: Policy = serde_json::from_str(jsp).unwrap(); let id = mgr.create(policy).unwrap(); let principal = "user:test-user"; mgr.attach(principal, &id).unwrap(); let actual = mgr.get_policies_for_principal(principal).unwrap().unwrap(); assert_eq!(actual.len(), 1); mgr.detach(principal, &id).unwrap(); let actual = mgr.get_policies_for_principal(principal).unwrap(); assert_eq!(actual.is_none(), true); } #[test] fn test_memory_manager_update() { let mut mgr = MemoryManager::new(); let jsp = r#" { "name": "Account policy", "statements": [ { "sid": "Grant account access", "effect": "allow", "actions": ["account:list", "account:get"], "resources": ["resource:account:1235"] } ] } "#; let policy: Policy = serde_json::from_str(jsp).unwrap(); let id = mgr.create(policy).unwrap(); let mut policy = mgr.get(&id).unwrap().clone(); assert_eq!(policy.name, Some("Account policy".to_owned())); policy.name = Some("Modified Name".to_owned()); mgr.update(&policy).unwrap(); let policy = mgr.get(&id).unwrap().clone(); assert_eq!(policy.name, Some("Modified Name".to_owned())); } #[test] fn test_memory_manager_delete() { let mut mgr = MemoryManager::new(); let jsp = r#" { "name": "Account policy", "statements": [ { "sid": "Grant account access", "effect": "allow", "actions": ["account:list", "account:get"], "resources": ["resource:account:1235"] } ] } "#; let p1: Policy = serde_json::from_str(jsp).unwrap(); let id1 = mgr.create(p1).unwrap(); let jsp = r#" { "name": "Blog policy", "statements": [ { "sid": "Deny blog access", "effect": "deny", "actions": ["blog:list"], "resources": ["resource:blog:*"] } ] } "#; let p2: Policy = serde_json::from_str(jsp).unwrap(); let id2 = mgr.create(p2).unwrap(); let principal = "users:test-user"; mgr.attach(principal, &id1).unwrap(); mgr.attach(principal, &id2).unwrap(); let actual = mgr.get_policies_for_principal(principal).unwrap().unwrap(); assert_eq!(actual.len(), 2); mgr.delete(&id1).unwrap(); let actual = mgr.get_policies_for_principal(principal).unwrap().unwrap(); assert_eq!(actual.len(), 1); } }
use crate::{Policy, PolicyManager, Result, RiamError}; use std::collections::HashMap; use std::collections::HashSet; use uuid::Uuid; pub struct MemoryManager { by_principal: HashMap<String, HashSet<Uuid>>, by_id: HashMap<Uuid, Policy>, } impl MemoryManager { pub fn new() -> Self { MemoryManager { by_principal: HashMap::new(), by_id: HashMap::new(), } } } impl PolicyManager for MemoryManager { fn create(&mut self, mut policy: Policy) -> Result<Uuid> { if !policy.is_valid() { return Err(RiamError::InvalidPolicy); } let id = Uuid::new_v4(); policy.id = Some(id); self.by_id.insert(id, policy); Ok(id) } fn update(&mut self, policy: &Policy) -> Result<()> { match policy.id { Some(id) => match self.by_id.get_mut(&id) { Some(p) => { *p = policy.clone(); } None => return Err(RiamError::UnknownPolicy), }, None => return Err(RiamError::InvalidPolicy), } Ok(()) } fn get(&self, id: &Uuid) -> Result<&Policy> { if let Some(p) = self.by_id.get(&id) { return Ok(p); } Err(RiamError::UnknownPolicy) } fn delete(&mut self, id: &Uuid) -> Result<()> { if let Some(_) = self.by_id.rem
fn list(&self) -> Result<Vec<Policy>> { Ok(Vec::new()) } fn get_policies_for_principal(&self, principal: &str) -> Result<Option<Vec<Policy>>> { if let Some(policy_ids) = self.by_principal.get(principal) { let mut policies: Vec<Policy> = Vec::with_capacity(policy_ids.len()); for id in policy_ids { let p = self.by_id.get(id).unwrap(); policies.push(p.clone()); } return Ok(Some(policies)); } Ok(None) } fn attach(&mut self, principal: &str, id: &Uuid) -> Result<()> { self.by_principal .entry(principal.to_owned()) .or_insert(HashSet::new()) .insert(*id); Ok(()) } fn detach(&mut self, principal: &str, id: &Uuid) -> Result<()> { if let Some(pset) = self.by_principal.get_mut(principal) { pset.remove(id); if pset.len() == 0 { self.by_principal.remove(principal); } } Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_memory_manager_attach_detach() { let mut mgr = MemoryManager::new(); let jsp = r#" { "name": "Account policy", "statements": [ { "sid": "Grant account access", "effect": "allow", "actions": ["account:list", "account:get"], "resources": ["resource:account:1235"] } ] } "#; let policy: Policy = serde_json::from_str(jsp).unwrap(); let id = mgr.create(policy).unwrap(); let principal = "user:test-user"; mgr.attach(principal, &id).unwrap(); let actual = mgr.get_policies_for_principal(principal).unwrap().unwrap(); assert_eq!(actual.len(), 1); mgr.detach(principal, &id).unwrap(); let actual = mgr.get_policies_for_principal(principal).unwrap(); assert_eq!(actual.is_none(), true); } #[test] fn test_memory_manager_update() { let mut mgr = MemoryManager::new(); let jsp = r#" { "name": "Account policy", "statements": [ { "sid": "Grant account access", "effect": "allow", "actions": ["account:list", "account:get"], "resources": ["resource:account:1235"] } ] } "#; let policy: Policy = serde_json::from_str(jsp).unwrap(); let id = mgr.create(policy).unwrap(); let mut policy = mgr.get(&id).unwrap().clone(); assert_eq!(policy.name, Some("Account policy".to_owned())); policy.name = Some("Modified Name".to_owned()); mgr.update(&policy).unwrap(); let policy = mgr.get(&id).unwrap().clone(); assert_eq!(policy.name, Some("Modified Name".to_owned())); } #[test] fn test_memory_manager_delete() { let mut mgr = MemoryManager::new(); let jsp = r#" { "name": "Account policy", "statements": [ { "sid": "Grant account access", "effect": "allow", "actions": ["account:list", "account:get"], "resources": ["resource:account:1235"] } ] } "#; let p1: Policy = serde_json::from_str(jsp).unwrap(); let id1 = mgr.create(p1).unwrap(); let jsp = r#" { "name": "Blog policy", "statements": [ { "sid": "Deny blog access", "effect": "deny", "actions": ["blog:list"], "resources": ["resource:blog:*"] } ] } "#; let p2: Policy = serde_json::from_str(jsp).unwrap(); let id2 = mgr.create(p2).unwrap(); let principal = "users:test-user"; mgr.attach(principal, &id1).unwrap(); mgr.attach(principal, &id2).unwrap(); let actual = mgr.get_policies_for_principal(principal).unwrap().unwrap(); assert_eq!(actual.len(), 2); mgr.delete(&id1).unwrap(); let actual = mgr.get_policies_for_principal(principal).unwrap().unwrap(); assert_eq!(actual.len(), 1); } }
ove(&id) { self.by_principal.retain(|_principal, pset| { pset.remove(&id); return pset.len() > 0; }) } else { return Err(RiamError::UnknownPolicy); } Ok(()) }
function_block-function_prefixed
[ { "content": "// custom serialize for Vec<String> for policy statements. If the vec length is\n\n// 1 the output will be flattened to just that single string. Otherwise it will\n\n// serialize normally to a sequence\n\n// e.g.\n\n// vec![\"actions:list\"] -> \"actions:list\"\n\n// vec![\"actions:list\", \"actions:get\"] -> [\"actions:list\", \"actions:get\"]\n\nfn se_scalar_or_seq_string<S>(x: &Vec<String>, s: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n if x.len() == 1 {\n\n return s.serialize_str(&x[0]);\n\n }\n\n\n\n let mut seq = s.serialize_seq(Some(x.len()))?;\n\n for e in x {\n\n seq.serialize_element(e)?;\n\n }\n\n seq.end()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use serde_test::{assert_tokens, Token};\n\n\n", "file_path": "src/policy.rs", "rank": 0, "score": 58599.68195410349 }, { "content": "/// Manage creation, storage/retrieval, and deletion of policies.\n\npub trait PolicyManager {\n\n /// Create and store a new policy. The policy ID is returned\n\n fn create(&mut self, policy: Policy) -> Result<Uuid>;\n\n\n\n /// Update an existing policy\n\n fn update(&mut self, policy: &Policy) -> Result<()>;\n\n\n\n /// Delete a policy\n\n ///\n\n /// Note: This will detach the policy from all principals it is currently used by\n\n fn delete(&mut self, id: &Uuid) -> Result<()>;\n\n\n\n /// Get a policy by id\n\n fn get(&self, id: &Uuid) -> Result<&Policy>;\n\n\n\n /// List all policies\n\n fn list(&self) -> Result<Vec<Policy>>;\n\n\n\n /// Attach a policy to a principal\n\n fn attach(&mut self, principal: &str, id: &Uuid) -> Result<()>;\n", "file_path": "src/engine.rs", "rank": 1, "score": 46251.278407415986 }, { "content": "#[test]\n\nfn get_policies_by_principal() {\n\n let jsp = r#\"\n\n [\n\n {\n\n \"name\": \"Account policy\",\n\n \"statements\": [\n\n {\n\n \"sid\": \"Grant account access\",\n\n \"effect\": \"allow\",\n\n \"actions\": [\"account:list\", \"account:get\"],\n\n \"resources\": [\"resource:account:1235\"]\n\n }\n\n ]\n\n },\n\n {\n\n \"name\": \"Blog policy\",\n\n \"statements\": [\n\n {\n\n \"effect\": \"allow\",\n\n \"actions\": [\"blog:list\", \"blog:get\"],\n", "file_path": "tests/memory_manager.rs", "rank": 2, "score": 44712.21998101847 }, { "content": "// custom deserialize for policy statements which deserializes scalar strings and sequence\n\n// of strings both to Vec<String>.\n\n// If the input is a scalar string it serializes it to vec of length 1, otherwise if it is\n\n// a normal sequence/array it will use the builtin sequence deserializer.\n\n// e.g.\n\n// \"actions:list\" -> vec![\"actions:list\"]\n\n// [\"actions:list\", \"actions:get\"] -> vec![\"actions:list\", \"actions:get\"]\n\nfn de_scalar_or_seq_string<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct StringOrVec(PhantomData<Vec<String>>);\n\n\n\n impl<'de> de::Visitor<'de> for StringOrVec {\n\n type Value = Vec<String>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"string or list of strings\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Ok(vec![value.to_owned()])\n\n }\n\n\n", "file_path": "src/policy.rs", "rank": 3, "score": 43526.846093141685 }, { "content": " serialize_with = \"se_scalar_or_seq_string\",\n\n deserialize_with = \"de_scalar_or_seq_string\"\n\n )]\n\n pub resources: Vec<String>,\n\n}\n\n\n\n/// Policy represents an access control policy which is used to either grant or deny a\n\n/// principal (users/groups/roles/etc) actions on specific resources.\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct Policy {\n\n /// The unique ID assigned to the policy\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub id: Option<Uuid>,\n\n\n\n /// The policy name (e.g. \"FullAdminAccess\")\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub name: Option<String>,\n\n\n\n /// The body of the policy\n\n pub statements: Vec<Statement>,\n", "file_path": "src/policy.rs", "rank": 4, "score": 20999.923173487397 }, { "content": " }\n\n\n\n #[test]\n\n fn test_policy_is_valid() {\n\n let mut policy = Policy {\n\n name: None,\n\n id: None,\n\n statements: Vec::new(),\n\n };\n\n\n\n assert_eq!(false, policy.is_valid());\n\n\n\n let st1 = Statement {\n\n sid: None,\n\n effect: Effect::Allow,\n\n actions: vec_of_strings![\"blog:list\"],\n\n resources: vec_of_strings![\"resources:blog:123\", \"resources:blog:*\"],\n\n };\n\n\n\n // invalid statement\n", "file_path": "src/policy.rs", "rank": 5, "score": 20998.74280284525 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse uuid::Uuid;\n\n\n\nuse std::fmt;\n\nuse std::marker::PhantomData;\n\n\n\nuse serde::de::{self, Deserializer};\n\nuse serde::ser::{SerializeSeq, Serializer};\n\n\n\n/// Effect indicates whether a policy statement allows or denies access\n\n#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)]\n\npub enum Effect {\n\n /// Allow access\n\n #[serde(rename = \"allow\")]\n\n Allow,\n\n\n\n /// Deny access\n\n #[serde(rename = \"deny\")]\n\n Deny,\n\n}\n", "file_path": "src/policy.rs", "rank": 6, "score": 20996.51174348063 }, { "content": " Token::Str(\"actions:list\"),\n\n Token::Str(\"resources\"),\n\n Token::Str(\"resources:123\"),\n\n Token::StructEnd,\n\n ],\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_policy_serialization() {\n\n let policy = Policy {\n\n name: Some(\"my policy\".into()),\n\n id: None,\n\n statements: vec![Statement {\n\n sid: Some(\"my statement\".into()),\n\n effect: Effect::Allow,\n\n actions: vec_of_strings![\"blog:list\", \"blog:get\"],\n\n resources: vec_of_strings![\"resources:blog:123\", \"resources:blog:*\"],\n\n }],\n\n };\n", "file_path": "src/policy.rs", "rank": 7, "score": 20995.857074549516 }, { "content": "\n\n/// Statement contains information about a single permission\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct Statement {\n\n /// An optional statement id. This is used to differentiate statements e.g. \"Grant read access to resource:xyz\"\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub sid: Option<String>,\n\n\n\n /// Allow or Deny the actions\n\n pub effect: Effect,\n\n\n\n /// One or more actions that apply to the resources\n\n #[serde(\n\n serialize_with = \"se_scalar_or_seq_string\",\n\n deserialize_with = \"de_scalar_or_seq_string\"\n\n )]\n\n pub actions: Vec<String>,\n\n\n\n /// The resources the statement applies to\n\n #[serde(\n", "file_path": "src/policy.rs", "rank": 8, "score": 20995.50430299158 }, { "content": " macro_rules! vec_of_strings {\n\n ($($x:expr),*) => (vec![$($x.to_string()),*]);\n\n }\n\n\n\n #[test]\n\n fn test_statement_serialization_no_sid() {\n\n // sid should be left off serialized json when not set\n\n let statement = Statement {\n\n sid: None,\n\n effect: Effect::Deny,\n\n actions: Vec::new(),\n\n resources: Vec::new(),\n\n };\n\n\n\n assert_tokens(\n\n &statement,\n\n &[\n\n Token::Struct {\n\n name: \"Statement\",\n\n len: 3,\n", "file_path": "src/policy.rs", "rank": 9, "score": 20994.773708897996 }, { "content": " fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error>\n\n where\n\n S: de::SeqAccess<'de>,\n\n {\n\n Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor))\n\n }\n\n }\n\n\n\n deserializer.deserialize_any(StringOrVec(PhantomData))\n\n}\n\n\n", "file_path": "src/policy.rs", "rank": 10, "score": 20994.645415371582 }, { "content": " let st2 = Statement {\n\n sid: None,\n\n effect: Effect::Deny,\n\n actions: vec_of_strings![\"account:list\"],\n\n resources: Vec::new(),\n\n };\n\n\n\n policy.statements.push(st1);\n\n policy.statements.push(st2);\n\n assert_eq!(false, policy.is_valid());\n\n\n\n policy.statements[1]\n\n .resources\n\n .push(\"resource:account\".into());\n\n assert_eq!(true, policy.is_valid());\n\n }\n\n}\n", "file_path": "src/policy.rs", "rank": 11, "score": 20994.620483509607 }, { "content": "}\n\n\n\nimpl Policy {\n\n /// Check if the policy is (structurally) valid\n\n pub fn is_valid(&self) -> bool {\n\n // TODO - validate resource names and action names follow whatever grammar we define for them\n\n return !(self.statements.is_empty()\n\n || self\n\n .statements\n\n .iter()\n\n .any(|x| x.actions.is_empty() || x.resources.is_empty()));\n\n }\n\n}\n\n\n\n// custom deserialize for policy statements which deserializes scalar strings and sequence\n\n// of strings both to Vec<String>.\n\n// If the input is a scalar string it serializes it to vec of length 1, otherwise if it is\n\n// a normal sequence/array it will use the builtin sequence deserializer.\n\n// e.g.\n\n// \"actions:list\" -> vec![\"actions:list\"]\n\n// [\"actions:list\", \"actions:get\"] -> vec![\"actions:list\", \"actions:get\"]\n", "file_path": "src/policy.rs", "rank": 12, "score": 20994.60126354151 }, { "content": "\n\n assert_tokens(\n\n &policy,\n\n &[\n\n Token::Struct {\n\n name: \"Policy\",\n\n len: 2,\n\n },\n\n Token::Str(\"name\"),\n\n Token::Some,\n\n Token::Str(\"my policy\"),\n\n Token::Str(\"statements\"),\n\n Token::Seq { len: Some(1) },\n\n Token::Struct {\n\n name: \"Statement\",\n\n len: 4,\n\n },\n\n Token::Str(\"sid\"),\n\n Token::Some,\n\n Token::Str(\"my statement\"),\n", "file_path": "src/policy.rs", "rank": 13, "score": 20993.590183734617 }, { "content": " let statement = Statement {\n\n sid: None,\n\n effect: Effect::Deny,\n\n actions: vec_of_strings![\"actions:list\"],\n\n resources: vec_of_strings![\"resources:123\"],\n\n };\n\n\n\n assert_tokens(\n\n &statement,\n\n &[\n\n Token::Struct {\n\n name: \"Statement\",\n\n len: 3,\n\n },\n\n Token::Str(\"effect\"),\n\n Token::UnitVariant {\n\n name: \"Effect\",\n\n variant: \"deny\",\n\n },\n\n Token::Str(\"actions\"),\n", "file_path": "src/policy.rs", "rank": 14, "score": 20992.469318437135 }, { "content": " Token::Str(\"effect\"),\n\n Token::UnitVariant {\n\n name: \"Effect\",\n\n variant: \"allow\",\n\n },\n\n Token::Str(\"actions\"),\n\n Token::Seq { len: Some(2) },\n\n Token::Str(\"blog:list\"),\n\n Token::Str(\"blog:get\"),\n\n Token::SeqEnd,\n\n Token::Str(\"resources\"),\n\n Token::Seq { len: Some(2) },\n\n Token::Str(\"resources:blog:123\"),\n\n Token::Str(\"resources:blog:*\"),\n\n Token::SeqEnd,\n\n Token::StructEnd,\n\n Token::SeqEnd,\n\n Token::StructEnd,\n\n ],\n\n );\n", "file_path": "src/policy.rs", "rank": 15, "score": 20991.131015587558 }, { "content": " },\n\n Token::Str(\"effect\"),\n\n Token::UnitVariant {\n\n name: \"Effect\",\n\n variant: \"deny\",\n\n },\n\n Token::Str(\"actions\"),\n\n Token::Seq { len: Some(0) },\n\n Token::SeqEnd,\n\n Token::Str(\"resources\"),\n\n Token::Seq { len: Some(0) },\n\n Token::SeqEnd,\n\n Token::StructEnd,\n\n ],\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_statement_serialization_scalars() {\n\n // test serialization/deserialization with scalar fields\n", "file_path": "src/policy.rs", "rank": 16, "score": 20990.647015320024 }, { "content": " /// Create a new engine with the given policy manager\n\n pub fn new(manager: T) -> Self {\n\n Engine { manager: manager }\n\n }\n\n\n\n /// Check if an action is allowed or not\n\n pub fn is_allowed(&mut self, req: &AuthRequest) -> Result<bool> {\n\n let policies = self.manager.get_policies_for_principal(&req.principal)?;\n\n\n\n if policies.is_none() {\n\n // no policies for the given principal\n\n return Ok(false);\n\n }\n\n let policies = policies.unwrap();\n\n\n\n let mut allowed = false;\n\n\n\n // we have to iterate over all the policies since policy statements may be contradictory\n\n // (e.g. one allows, another explicitly denies). Explicit denies take precedence over\n\n // the\n", "file_path": "src/engine.rs", "rank": 23, "score": 14.014501352007 }, { "content": "\n\n /// Detach a policy from a principal\n\n fn detach(&mut self, principal: &str, id: &Uuid) -> Result<()>;\n\n\n\n /// Get all policies for a given principal\n\n fn get_policies_for_principal(&self, principal: &str) -> Result<Option<Vec<Policy>>>;\n\n}\n\n\n\n/// Engine implements the logic to check if a specific request (action)\n\n/// by a principal is allowed or not on a particular resource.\n\n///\n\n/// An action is allowed if and only if there is an explicit \"allow\" statement that can be applied. Any explicit \"deny\" statements will override an \"allow\".\n\n/// If no statement matches then a request is implicitly denied by default.\n\n///\n\npub struct Engine<T: PolicyManager> {\n\n /// The underlying policy manager/storage mechanism\n\n pub manager: T,\n\n}\n\n\n\nimpl<T: PolicyManager> Engine<T> {\n", "file_path": "src/engine.rs", "rank": 24, "score": 12.274147928363199 }, { "content": "use crate::wildcard;\n\nuse crate::{AuthRequest, Effect, Policy, Result};\n\nuse uuid::Uuid;\n\n\n\n/// Manage creation, storage/retrieval, and deletion of policies.\n", "file_path": "src/engine.rs", "rank": 25, "score": 10.03538930543046 }, { "content": "#![cfg_attr(all(test, feature = \"nightly\"), feature(test))]\n\n#![deny(missing_docs)]\n\n//! # riam\n\n//! riam is a decision/policy engine inspired by AWS IAM policies.\n\n//!\n\n//! See the [REPO] README for additional information.\n\n//!\n\n//! [REPO]: https://github.com/aajtodd/riam\n\n\n\n#[cfg(all(test, feature = \"nightly\"))]\n\nextern crate test;\n\n\n\nmod engine;\n\nmod error;\n\npub mod managers;\n\nmod policy;\n\nmod request;\n\nmod wildcard;\n\n\n\npub use engine::{Engine, PolicyManager};\n\npub use error::{Result, RiamError};\n\npub use policy::{Effect, Policy, Statement};\n\npub use request::AuthRequest;\n", "file_path": "src/lib.rs", "rank": 26, "score": 8.52747788431667 }, { "content": " /// The policy does not exist\n\n #[fail(display = \"Invalid policy\")]\n\n UnknownPolicy,\n\n}\n\n\n\nimpl From<io::Error> for RiamError {\n\n fn from(err: io::Error) -> RiamError {\n\n RiamError::Io(err)\n\n }\n\n}\n\n\n\nimpl From<serde_json::Error> for RiamError {\n\n fn from(err: serde_json::Error) -> RiamError {\n\n RiamError::Serde(err)\n\n }\n\n}\n\n\n\n/// Result type for riam\n\npub type Result<T> = std::result::Result<T, RiamError>;\n", "file_path": "src/error.rs", "rank": 27, "score": 8.471711411284197 }, { "content": " use super::*;\n\n use crate::managers::MemoryManager;\n\n\n\n #[cfg(feature = \"nightly\")]\n\n use test::Bencher;\n\n\n\n #[test]\n\n fn test_engine_is_allowed() {\n\n let mut engine = Engine::new(MemoryManager::new());\n\n\n\n let jsp = r#\"\n\n {\n\n \"name\": \"Account policy\",\n\n \"statements\": [\n\n {\n\n \"sid\": \"Grant account list access\",\n\n \"effect\": \"allow\",\n\n \"actions\": [\"account:list\"],\n\n \"resources\": [\"resource:account:*\"]\n\n },\n", "file_path": "src/engine.rs", "rank": 29, "score": 7.706119335477717 }, { "content": " let mut mgr = MemoryManager::new();\n\n for p in policies.into_iter() {\n\n let principal = if p.name.as_ref().unwrap().contains(\"Blog\") {\n\n \"users:test-user-1\"\n\n } else {\n\n \"users:test-user-2\"\n\n };\n\n\n\n let id = mgr.create(p).unwrap();\n\n mgr.attach(&principal, &id).unwrap();\n\n }\n\n\n\n let actual = mgr\n\n .get_policies_for_principal(\"users:test-user-2\")\n\n .unwrap()\n\n .unwrap();\n\n\n\n assert_eq!(actual.len(), 2);\n\n}\n", "file_path": "tests/memory_manager.rs", "rank": 30, "score": 7.1955762728932005 }, { "content": "//! Concrete policy managers\n\n\n\nmod memory;\n\npub use memory::MemoryManager;\n", "file_path": "src/managers/mod.rs", "rank": 31, "score": 6.754560084205581 }, { "content": "/// Test whether the incoming pattern string 's' matches the given pattern string 'pattern'.\n\n///\n\n/// A wildcard '*' character may appear anywhere in the `pattern`. A wildcard character matches\n\n/// any character any number of times until the next character in the pattern is seen. If the\n\n/// wildcard is the last char in the pattern then the remaining input string must be a match.\n\n///\n\n/// e.g. `abc*xyz` matches `abcdefghijkxyz`\n\n///\n\n/// If no wildcard is present in the pattern the inputs must be an exact match.\n\n///\n\npub(crate) fn matches(pattern: &str, s: &str) -> bool {\n\n let mut piter = pattern.chars();\n\n let mut siter = s.chars();\n\n\n\n let mut pc = piter.next();\n\n let mut sc = siter.next();\n\n\n\n while pc.is_some() && sc.is_some() {\n\n let pcurr = pc.unwrap();\n\n let scurr = sc.unwrap();\n", "file_path": "src/wildcard.rs", "rank": 32, "score": 6.500892839726159 }, { "content": "## Multiple Statements and Multiple Policies\n\n\n\nIf you want to define more than one permission for a principal, you can use multiple statements in a single policy. You can also attach multiple policies. If you try to define multiple permissions in a single statement, your policy might not grant the access that you expect. As a best practice, break up policies by resource type(s).\n\n\n\nIt's a good idea to create functional groupings of permissions in policies. For example, maybe you have a forum website, you might create one policy for user management, one for managing posts, and another for moderator access. Regardless of the combination of multiple statements and multiple policies, riam [evaluates](#policy-evaluation) your policies the same way. \n\n\n\n\n\n## Policy Evaluation\n\n\n\nriam decides if a (authorization) request is allowed or denied (for a specific principal) using the following logic:\n\n\n\n* By default, all requests are implicitly denied. \n\n* An explicit allow overrides the default deny.\n\n* An explicit deny in **any** policy overrides any allow(s).\n\n\n\nIf a policy includes multiple statements, riam applies a logical `OR` across the statements when evaluating them. If multiple policies apply to a request, riam applies a logical `OR` across all of those policies when evaluating them. \n\n\n\n\n\n## Wildcards\n\n\n\nA wildcard character `*` is allowed in any policy statement action or resource. Wildcards are greedy and will match any character up to the next character in the pattern. If a pattern ends with a wildcard then the result will be a match (assuming the prefix was already a match). \n\n\n\nActions and Resources are matched character for character for equality and _are_ case sensitive. \n\n\n\nExamples\n\n\n\n| Pattern | Input | Matches | \n\n|--------------|-----------------|---------| \n\n| abc*xyz | abcdefghgkxyz | true |\n\n| a* | abcdefghgkxyz | true |\n\n| a*c | abd | false |\n\n| a*C | abc | false |\n\n\n\n\n\n## Conditions\n\n\n\nTODO - placeholder for conditions\n\n\n\n\n\n# Guidelines\n\n\n\n- TODO guidelines on naming principals, actions, and resources\n\n- TODO guidelines on securing HTTP API if/when available\n", "file_path": "README.md", "rank": 34, "score": 5.934945074032868 }, { "content": "use failure::Fail;\n\nuse std::io;\n\n\n\n/// Error type for riam\n\n#[derive(Fail, Debug)]\n\npub enum RiamError {\n\n /// IO error\n\n #[fail(display = \"IO error: {}\", _0)]\n\n Io(#[cause] io::Error),\n\n\n\n /// Serialization or deserialization error\n\n #[fail(display = \"serde_json error: {}\", _0)]\n\n Serde(#[cause] serde_json::Error),\n\n\n\n /// Invalid policy\n\n /// The policy is not well formed.\n\n #[fail(display = \"Invalid policy\")]\n\n InvalidPolicy,\n\n\n\n /// NonExistant policy\n", "file_path": "src/error.rs", "rank": 35, "score": 5.803160425124317 }, { "content": " assert_eq!(expected, actual, \"req: {:?}\", req);\n\n }\n\n }\n\n\n\n #[cfg(feature = \"nightly\")]\n\n #[bench]\n\n fn bench_is_allowed(b: &mut Bencher) {\n\n let mut engine = Engine::new(MemoryManager::new());\n\n let jsp = r#\"\n\n {\n\n \"name\": \"Account policy\",\n\n \"statements\": [\n\n {\n\n \"sid\": \"Grant account list access\",\n\n \"effect\": \"allow\",\n\n \"actions\": [\"account:list\"],\n\n \"resources\": [\"resource:account:*\"]\n\n },\n\n {\n\n \"sid\": \"Deny root account access\",\n", "file_path": "src/engine.rs", "rank": 36, "score": 5.685253959217457 }, { "content": "#![warn(rust_2018_idioms)]\n\n\n\nuse riam::managers::MemoryManager;\n\nuse riam::{Policy, PolicyManager};\n\n\n\n#[test]\n", "file_path": "tests/memory_manager.rs", "rank": 38, "score": 4.998736241857127 }, { "content": " ];\n\n\n\n for x in cases {\n\n let (pattern, input, expected) = x;\n\n let actual = matches(pattern, input);\n\n assert_eq!(expected, actual, \"pattern: {}, input: {}\", pattern, input);\n\n }\n\n }\n\n\n\n #[cfg(feature = \"nightly\")]\n\n #[bench]\n\n fn bench_match_wildcard(b: &mut Bencher) {\n\n let pattern = \"actions:*:list:123\";\n\n let input = \"actions:accounts:list:123\";\n\n b.iter(|| matches(pattern, input));\n\n }\n\n\n\n #[cfg(feature = \"nightly\")]\n\n #[bench]\n\n fn bench_match_exact(b: &mut Bencher) {\n\n let pattern = \"actions:accounts:list:123\";\n\n let input = \"actions:accounts:list:123\";\n\n b.iter(|| matches(pattern, input));\n\n }\n\n}\n", "file_path": "src/wildcard.rs", "rank": 39, "score": 4.927422396306922 }, { "content": " for p in policies.iter() {\n\n // check the policy statements\n\n for stmt in p.statements.iter() {\n\n // check if any of the actions match\n\n if !stmt\n\n .actions\n\n .iter()\n\n .any(|action| wildcard::matches(action, &req.action))\n\n {\n\n continue;\n\n }\n\n\n\n // check if any of the resources match\n\n if !stmt\n\n .resources\n\n .iter()\n\n .any(|resource| wildcard::matches(resource, &req.resource))\n\n {\n\n continue;\n\n }\n", "file_path": "src/engine.rs", "rank": 40, "score": 4.863601504140826 }, { "content": "\n\n // the current statement is a candidate, check the intended effect\n\n match stmt.effect {\n\n Effect::Allow => {\n\n allowed = true;\n\n }\n\n Effect::Deny => {\n\n // explicit deny\n\n return Ok(false);\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(allowed)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/engine.rs", "rank": 41, "score": 4.739604758456748 }, { "content": "/// AuthRequest represents an attempted action on a resource. It describes who, what, and how\n\n/// the resource in question is to be accessed and is used to make authorization decisions.\n\n#[derive(Debug)]\n\npub struct AuthRequest {\n\n /// The subject (user/group/etc) attempting the action\n\n pub principal: String,\n\n\n\n /// The name of the action being taken\n\n pub action: String,\n\n\n\n /// The resources being acted upon\n\n pub resource: String,\n\n}\n", "file_path": "src/request.rs", "rank": 42, "score": 4.562163060880071 }, { "content": "A JSON policy document includes these elements:\n\n\n\n* **name**: (Optional) The name given to your policy\n\n* **statements**: One or more statements (permissions)\n\n\n\nA statement describes a single permission (a group of one or more actions allowed or denied on one or more resources). \n\n\n\n* **sid**: (Optional) Include an optional statement ID to differentiate between your statements\n\n* **effect**: Use `allow` or `deny` to indicate whether the policy allows or denies access.\n\n* **actions**: Include a list of actions that the policy allows or denies. This may be a single scalar string `view` or a list of actions `[\"view\", \"edit\"]`\n\n* **resources**: A list of resources to which the actions apply. Like actions this can be scalar string or a list of strings.\n\n\n\n\n", "file_path": "README.md", "rank": 44, "score": 4.303338129001974 }, { "content": " \"effect\": \"deny\",\n\n \"actions\": [\"account:list\"],\n\n \"resources\": [\"resource:account:123\"]\n\n },\n\n {\n\n \"sid\": \"Grant all read access on specific account\",\n\n \"effect\": \"allow\",\n\n \"actions\": [\"account:describe:*\"],\n\n \"resources\": [\"resource:account:789\"]\n\n }\n\n ]\n\n }\n\n \"#;\n\n\n\n let policy: Policy = serde_json::from_str(jsp).unwrap();\n\n let id = engine.manager.create(policy).unwrap();\n\n let principal = \"user:test-user\";\n\n engine.manager.attach(principal, &id).unwrap();\n\n\n\n let (principal, action, resource) = (\n", "file_path": "src/engine.rs", "rank": 45, "score": 4.061348003584419 }, { "content": " {\n\n \"sid\": \"Deny root account access\",\n\n \"effect\": \"deny\",\n\n \"actions\": [\"account:list\"],\n\n \"resources\": [\"resource:account:123\"]\n\n },\n\n {\n\n \"sid\": \"Grant all read access on specific account\",\n\n \"effect\": \"allow\",\n\n \"actions\": \"account:describe:*\",\n\n \"resources\": \"resource:account:789\"\n\n }\n\n ]\n\n }\n\n \"#;\n\n\n\n let policy: Policy = serde_json::from_str(jsp).unwrap();\n\n let id = engine.manager.create(policy).unwrap();\n\n let principal = \"user:test-user\";\n\n engine.manager.attach(principal, &id).unwrap();\n", "file_path": "src/engine.rs", "rank": 46, "score": 4.010868466671529 }, { "content": "\n\n // Case 1: '*' is seen in pattern\n\n // look at next char in pattern if any\n\n // - if any chars left, grab that char and consume chars until that char is seen, if we exhaust 's' then no match\n\n // - if none, short circuit. pattern ends in a '*' and matches the remaining string whatever it is\n\n if pcurr == '*' {\n\n pc = piter.next();\n\n // peek ahead\n\n if let Some(nc) = pc {\n\n if nc == '*' {\n\n // special case '**', consume first '*' in pattern and cont\n\n continue;\n\n }\n\n\n\n // consume chars in sc until that char is found or it is exhausted\n\n let pnext = pc.unwrap();\n\n while sc.is_some() && sc != Some(pnext) {\n\n sc = siter.next();\n\n }\n\n } else {\n", "file_path": "src/wildcard.rs", "rank": 47, "score": 3.551227205096475 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[cfg(feature = \"nightly\")]\n\n use test::Bencher;\n\n\n\n #[test]\n\n fn test_matches() {\n\n let cases = vec![\n\n (\"resources:blog:123\", \"resources:blog:123\", true),\n\n (\"re*123\", \"resources:blog:123\", true),\n\n (\"resources:blog:*\", \"resources:blog:123\", true),\n\n (\"resources:*\", \"resources:blog:123\", true),\n\n (\"resources:blog:123\", \"resources:blog:789\", false),\n\n (\"accounts:123\", \"resources:blog:789\", false),\n\n (\"actions:*:123\", \"actions:accounts:list:123\", false),\n\n (\"actions:*:list:*\", \"actions:accounts:list:123\", true),\n\n (\"actions:*:*:123\", \"actions:accounts:list:123\", true),\n\n (\"actions:**123\", \"actions:accounts:list:123\", true),\n", "file_path": "src/wildcard.rs", "rank": 48, "score": 3.2480633214150556 }, { "content": "# Concepts\n\n\n\nPolicies in `riam` are very similar to [AWS IAM polcies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html). If you are unfamiliar with them I would look at the provided documentation link to get an idea of what they are and how they work. IAM (like) policies are an alternative to Role Based Access Control ([RBAC](https://en.wikipedia.org/wiki/Role-based_access_control)) or Access Control Lists ([ACL[(https://en.wikipedia.org/wiki/Access-control_list)). \n\n\n\nNotable differences from AWS IAM policies:\n\n - riam policies only offer \"identity\" based policies in AWS terms. In riam's case an identity is abstract though and can represent whatever you want (user, group, machine/service, etc)\n\n - `Actions` and `Resources` are also abstract and not predefined. You model your own actions and resources that fit your domain. See the guidelines on [naming](#guidelines)\n\n\n\n**Important:** riam is not an identity provider. It doesn't do authentication and knows nothing about authenticated users. In terms of users (principals) it only stores which policies are attached to a particular principal. It's sole purpose is to evaluate authorization decisions *after authentication has already taken place*.\n\n\n\nPolicies are JSON documents that grant or deny access to carry out actions on one or more resources.\n\n\n\n```json\n\n{\n\n \"name\": \"Blog policy\",\n\n \"statements\": [\n\n {\n\n \"sid\": \"Grant access to specific post\",\n\n \"effect\": \"allow\",\n\n \"actions\": [\"blog:edit\", \"blog:delete\"],\n\n \"resources\": [\"resource:blog:123\"]\n\n },\n\n {\n\n \"sid\": \"Grant access to view all blogs\",\n\n \"effect\": \"allow\",\n\n \"actions\": \"blog:view\",\n\n \"resources\": \"resource:blog:*\"\n\n }\n\n ]\n\n}\n\n```\n\n\n\nThis policy allows \"edit\" and \"delete\" actions on a specific resource (\"resource:blog:123\") and allows \"view\" action on all blog resources (\"resource:blog:*\") via the use of a [wildcard](#wildcards).\n\n\n", "file_path": "README.md", "rank": 49, "score": 2.6008642565798303 }, { "content": " \"resources\": [\"resource:blog:*\"]\n\n }\n\n ]\n\n },\n\n {\n\n \"name\": \"S3 policy\",\n\n \"statements\": [\n\n {\n\n \"sid\": \"Grant S3 read\",\n\n \"effect\": \"allow\",\n\n \"actions\": [\"s3:list\"],\n\n \"resources\": [\"resource:s3:bucket:*\"]\n\n }\n\n ]\n\n }\n\n ]\n\n \"#;\n\n\n\n let policies: Vec<Policy> = serde_json::from_str(jsp).unwrap();\n\n\n", "file_path": "tests/memory_manager.rs", "rank": 50, "score": 2.1761400762449155 }, { "content": "# Riam\n\n\n\nIAM inspired policy engine written for Rust.\n\n\n\n[![Crates.io][crates-badge]][crates-url]\n\n[![Documentation](https://docs.rs/riam/badge.svg)](https://docs.rs/riam/)\n\n[![Build Status][azure-badge]][azure-url]\n\n[![MIT licensed][mit-badge]][mit-url]\n\n\n\n[crates-badge]: https://img.shields.io/crates/v/riam?label=riam\n\n[crates-url]: https://crates.io/crates/riam\n\n[mit-badge]: https://img.shields.io/badge/license-MIT-blue.svg\n\n[mit-url]: LICENSE\n\n[azure-badge]: https://dev.azure.com/aajtodd0847/riam/_apis/build/status/aajtodd.riam?branchName=master\n\n[azure-url]: https://dev.azure.com/aajtodd0847/riam/_build/latest?definitionId=1&branchName=master\n\n\n\n# Overview\n\n\n\nRiam is an access control library that provides a means of evaluating authorization decisions. In other words it answers the question of\n\n\n\n> Is some entity allowed to carry out an action on a given resource?\n\n\n\nYou manage access to resources by creating policies and attaching them to a principal (users, group, server, etc). A policy is an object that, when associated with a principle, defines their permissions. When a principal entity (user/service/etc) attempts an action you would formulate this as an authorization request to this library to evaluate if that principal should be allowed to carry out that action. \n\n\n\nNOTE: this library is a rather low level primitive, unlike in AWS where policies are evaluated for you automatically when requests are made. \n\n\n\nIn the future a higher level service will be built on top of this library to provide an authorization endpoint for evaluating decisions. Additionally an authorization proxy that can be placed in front of an HTTP API would allow for authorization decisions to be made automatically by mappting routes to permissions (e.g. from a JWT).\n\n\n\n\n", "file_path": "README.md", "rank": 52, "score": 1.8936134340841684 }, { "content": " // pattern is exhausted and ends in a '*', short circuit because it matches anything remaining in 's'\n\n return true;\n\n }\n\n } else if pcurr != scurr {\n\n // Case 2: both pattern and incoming 's' chars are regular chars, compare 1-1 for equality\n\n return false;\n\n }\n\n\n\n pc = piter.next();\n\n sc = siter.next();\n\n }\n\n\n\n // one or more iterators is exhausted\n\n if pc.is_some() || sc.is_some() {\n\n return false;\n\n }\n\n\n\n true\n\n}\n\n\n", "file_path": "src/wildcard.rs", "rank": 53, "score": 1.8584715985106715 }, { "content": "\n\n #[rustfmt::skip]\n\n let cases = vec![\n\n // principal, action, resource, expected\n\n ( \"user:test-user\", \"account:list\", \"resource:account:567\", true,), // statement 1\n\n ( \"user:test-user\", \"account:list\", \"resource:account:789\", true,), // statement 1\n\n ( \"user:test-user-2\", \"account:list\", \"resource:account:789\", false,), // non-existent principal\n\n ( \"user:test-user\", \"account:list\", \"resource:account:123\", false,), // statement 2 (explicit deny w/allowed match on other statements)\n\n ( \"user:test-user\", \"account:describe:limits\", \"resource:account:123\", false,), // no matching statements\n\n ( \"user:test-user\", \"account:describe:limits\", \"resource:account:789\", true,), // statement 3\n\n ];\n\n for x in cases {\n\n let (principal, action, resource, expected) = x;\n\n let req = AuthRequest {\n\n principal: principal.to_string(),\n\n action: action.to_string(),\n\n resource: resource.to_string(),\n\n };\n\n\n\n let actual = engine.is_allowed(&req).unwrap();\n", "file_path": "src/engine.rs", "rank": 55, "score": 1.682975381347224 } ]
Rust
src/hir/constructor2enum.rs
KeenS/webml
60f4d899d623d5872c325412054bd0d77e37c4aa
use crate::config::Config; use crate::hir::util::Transform; use crate::hir::*; use crate::pass::Pass; use std::collections::{HashMap, HashSet}; pub struct ConstructorToEnumPass { enum_likes: HashSet<Symbol>, symbol_table: SymbolTable, } fn rewrite_ty(enum_likes: &HashSet<Symbol>, ty: HTy) -> HTy { use HTy::*; match ty { Datatype(name) if enum_likes.contains(&name) => HTy::Int, Fun(arg, ret) => Fun( Box::new(rewrite_ty(enum_likes, *arg)), Box::new(rewrite_ty(enum_likes, *ret)), ), Tuple(tuple) => Tuple( tuple .into_iter() .map(|t| rewrite_ty(enum_likes, t)) .collect(), ), ty => ty, } } impl ConstructorToEnumPass { fn new(symbol_table: SymbolTable) -> Self { let (enum_likes, types) = symbol_table .types .into_iter() .partition::<HashMap<Symbol, TypeInfo>, _>(|(_, type_info)| { type_info.constructors.iter().all(|(_, arg)| arg.is_none()) }); let enum_likes = enum_likes.into_iter().map(|(name, _)| name).collect(); let types = types .into_iter() .map(|(name, mut type_info)| { type_info.constructors = type_info .constructors .into_iter() .map(|(descriminant, argty)| match argty { Some(ty) => (descriminant, Some(rewrite_ty(&enum_likes, ty))), argty @ None => (descriminant, argty), }) .collect(); (name, type_info) }) .collect(); let symbol_table = SymbolTable { types }; let mut this = Self { enum_likes, symbol_table, }; this.rewrite_table(); this } fn rewrite_table(&mut self) {} fn is_enum_like(&self, name: &Symbol) -> bool { self.enum_likes.contains(name) } fn rewrite_ty(&self, ty: HTy) -> HTy { rewrite_ty(&self.enum_likes, ty) } } impl Transform for ConstructorToEnumPass { fn transform_val(&mut self, mut val: Val) -> Val { val.ty = self.rewrite_ty(val.ty); val.expr = self.transform_expr(val.expr); val } fn transform_binds(&mut self, ty: HTy, bind: Box<Val>, ret: Box<Expr>) -> Expr { let ty = self.rewrite_ty(ty); Expr::Let { ty, bind: Box::new(self.transform_val(*bind)), ret: Box::new(self.transform_expr(*ret)), } } fn transform_fun( &mut self, param: (HTy, Symbol), body_ty: HTy, body: Box<Expr>, captures: Vec<(HTy, Symbol)>, ) -> Expr { let (param_ty, param) = param; let param_ty = self.rewrite_ty(param_ty); let param = (param_ty, param); let body_ty = self.rewrite_ty(body_ty); let captures = captures .into_iter() .map(|(ty, name)| (self.rewrite_ty(ty), name)) .collect(); Expr::Fun { param, body_ty, captures, body: Box::new(self.transform_expr(*body)), } } fn transform_closure( &mut self, envs: Vec<(HTy, Symbol)>, param_ty: HTy, body_ty: HTy, fname: Symbol, ) -> Expr { let envs = envs .into_iter() .map(|(ty, name)| (self.rewrite_ty(ty), name)) .collect(); let param_ty = self.rewrite_ty(param_ty); let body_ty = self.rewrite_ty(body_ty); Expr::Closure { envs, param_ty, body_ty, fname, } } fn transform_builtin_call(&mut self, ty: HTy, fun: BIF, args: Vec<Expr>) -> Expr { let ty = self.rewrite_ty(ty); Expr::BuiltinCall { ty, fun, args: args .into_iter() .map(|arg| self.transform_expr(arg)) .collect(), } } fn transform_extern_call( &mut self, ty: HTy, module: String, fun: String, args: Vec<Expr>, ) -> Expr { let ty = self.rewrite_ty(ty); Expr::ExternCall { ty, module, fun, args: args .into_iter() .map(|arg| self.transform_expr(arg)) .collect(), } } fn transform_app(&mut self, ty: HTy, fun: Box<Expr>, arg: Box<Expr>) -> Expr { let ty = self.rewrite_ty(ty); Expr::App { ty, fun: Box::new(self.transform_expr(*fun)), arg: Box::new(self.transform_expr(*arg)), } } fn transform_case(&mut self, ty: HTy, cond: Box<Expr>, arms: Vec<(Pattern, Expr)>) -> Expr { let ty = self.rewrite_ty(ty); let mut arms = arms; arms = arms .into_iter() .map(|(pat, expr)| { use Pattern::*; let pat = match pat { Constant { value, ty } => { let ty = self.rewrite_ty(ty); Constant { value, ty } } Char { value, ty } => { let ty = self.rewrite_ty(ty); Char { value, ty } } Tuple { tys, tuple } => { let tys = tys.into_iter().map(|ty| self.rewrite_ty(ty)).collect(); Tuple { tys, tuple } } Constructor { descriminant, ty, arg, } => match ty { HTy::Datatype(name) if self.is_enum_like(&name) => Constant { ty: HTy::Int, value: descriminant as i64, }, ty => Constructor { descriminant, ty, arg, }, }, Var { name, ty } => Var { name, ty: self.rewrite_ty(ty), }, }; (pat, expr) }) .collect(); Expr::Case { ty, expr: Box::new(self.transform_expr(*cond)), arms: arms .into_iter() .map(|(pat, expr)| (pat, self.transform_expr(expr))) .collect(), } } fn transform_tuple(&mut self, tys: Vec<HTy>, tuple: Vec<Expr>) -> Expr { let tys = tys.into_iter().map(|ty| self.rewrite_ty(ty)).collect(); Expr::Tuple { tys, tuple: tuple.into_iter().map(|e| self.transform_expr(e)).collect(), } } fn transform_proj(&mut self, ty: HTy, index: u32, tuple: Box<Expr>) -> Expr { let ty = self.rewrite_ty(ty); Expr::Proj { ty, index, tuple: Box::new(self.transform_expr(*tuple)), } } fn transform_constructor( &mut self, ty: HTy, arg: Option<Box<Expr>>, descriminant: u32, ) -> Expr { let name = match &ty { HTy::Datatype(name) => name, _ => unreachable!(), }; if self.is_enum_like(name) { Expr::Lit { ty: HTy::Int, value: Literal::Int(descriminant as i64), } } else { Expr::Constructor { ty, arg, descriminant, } } } fn transform_sym(&mut self, ty: HTy, name: Symbol) -> Expr { let ty = self.rewrite_ty(ty); Expr::Sym { ty, name } } fn transform_lit(&mut self, ty: HTy, value: Literal) -> Expr { let ty = self.rewrite_ty(ty); Expr::Lit { ty, value } } } pub struct ConstructorToEnum {} impl ConstructorToEnum { pub fn new() -> Self { Self {} } } impl<E> Pass<Context, E> for ConstructorToEnum { type Target = Context; fn trans( &mut self, Context(symbol_table, hir): Context, _: &Config, ) -> ::std::result::Result<Self::Target, E> { let mut pass = ConstructorToEnumPass::new(symbol_table); let hir = pass.transform_hir(hir); let symbol_table = pass.symbol_table; Ok(Context(symbol_table, hir)) } }
use crate::config::Config; use crate::hir::util::Transform; use crate::hir::*; use crate::pass::Pass; use std::collections::{HashMap, HashSet}; pub struct ConstructorToEnumPass { enum_likes: HashSet<Symbol>, symbol_table: SymbolTable, } fn rewrite_ty(enum_likes: &HashSet<Symbol>, ty: HTy) -> HTy { use HTy::*; match ty { Datatype(name) if enum_likes.contains(&name) => HTy::Int, Fun(arg, ret) => Fun( Box::new(rewrite_ty(enum_likes, *arg)), Box::new(rewrite_ty(enum_likes, *ret)), ), Tuple(tuple) => Tuple( tuple .into_iter() .map(|t| rewrite_ty(enum_likes, t)) .collect(), ), ty => ty, } } impl ConstructorToEnumPass { fn new(symbol_table: SymbolTable) -> Self { let (enum_likes, types) = symbol_table .types .into_iter() .partition::<HashMap<Symbol, TypeInfo>, _>(|(_, type_info)| { type_info.constructors.iter().all(|(_, arg)| arg.is_none()) }); let enum_likes = enum_likes.into_iter().map(|(name, _)| name).collect(); let types = types .into_iter() .map(|(name, mut type_info)| { type_info.constructors = type_info .constructors .into_iter() .map(|(descriminant, argty)| match argty { Some(ty) => (descriminant, Some(rewrite_ty(&enum_likes, ty))), argty @ None => (descriminant, argty), }) .collect(); (name, type_info) }) .collect(); let symbol_table = SymbolTable { types }; let mut this = Self { enum_likes, symbol_table, }; this.rewrite_table(); this } fn rewrite_table(&mut self) {} fn is_enum_like(&self, name: &Symbol) -> bool { self.enum_likes.contains(name) } fn rewrite_ty(&self, ty: HTy) -> HTy { rewrite_ty(&self.enum_likes, ty) } } impl Transform for ConstructorToEnumPass { fn transform_val(&mut self, mut val: Val) -> Val { val.ty = self.rewrite_ty(val.ty); val.expr = self.transform_expr(val.expr); val } fn transform_binds(&mut self, ty: HTy, bind: Box<Val>, ret: Box<Expr>) -> Expr { let ty = self.rewrite_ty(ty); Expr::Let { ty, bind: Box::new(self.transform_val(*bind)), ret: Box::new(self.transform_expr(*ret)), } } fn transform_fun( &mut self, param: (HTy, Symbol), body_ty: HTy, body: Box<Expr>, captures: Vec<(HTy, Symbol)>, ) -> Expr { let (param_ty, param) = param; let param_ty = self.rewrite_ty(param_ty); let param = (param_ty, param); let body_ty = self.rewrite_ty(body_ty); let captures = captures .into_iter() .map(|(ty, name)| (self.rewrite_ty(ty), name)) .collect(); Expr::Fun { param, body_ty, captures, body: Box::new(self.transform_expr(*body)), } } fn transform_closure( &mut self, envs: Vec<(HTy, Symbol)>, param_ty: HTy, body_ty: HTy, fname: Symbol, ) -> Expr { let envs = envs .into_iter() .map(|(ty, name)| (self.rewrite_ty(ty), name)) .collect(); let param_ty = self.rewrite_ty(param_ty); let body_ty = self.rewrite_ty(body_ty); Expr::Closure { envs, param_ty, body_ty, fname, } } fn transform_builtin_call(&mut self, ty: HTy, fun: BIF, args: Vec<Expr>) -> Expr { let ty = self.rewrite_ty(ty); Expr::BuiltinCall { ty, fun, args: args .into_iter() .map(|arg| self.transform_expr(arg)) .collect(), } } fn transform_extern_call( &mut self, ty: HTy, module: String, fun: String, args: Vec<Expr>, ) -> Expr { let ty = self.rewrite_ty(ty); Expr::ExternCall { ty, module, fun, args: args .into_iter() .map(|arg| self.transform_expr(arg)) .collect(), } } fn transform_app(&mut self, ty: HTy, fun: Box<Expr>, arg: Box<Expr>) -> Expr { let ty = self.rewrite_ty(ty); Expr::App { ty, fun: Box::new(self.transform_expr(*fun)), arg: Box::new(self.transform_expr(*arg)), } } fn transform_case(&mut self, ty: HTy, cond: Box<Expr>, arms: Vec<(Pattern, Expr)>) -> Expr { let ty = self.rewrite_ty(ty); let mut arms = arms; arms = arms .into_iter() .map(|(pat, expr)| { use Pattern::*; let pat = match pat { Constant { value, ty } => { let ty = self.rewrite_ty(ty); Constant { value, ty } } Char { value, ty } => { let ty = self.rewrite_ty(ty); Char { value, ty } } Tuple { tys, tuple } => { let tys = tys.into_iter().map(|ty| self.rewrite_ty(ty)).collect(); Tuple { tys, tuple } } Constructor { descriminant, ty, arg, } =>
, Var { name, ty } => Var { name, ty: self.rewrite_ty(ty), }, }; (pat, expr) }) .collect(); Expr::Case { ty, expr: Box::new(self.transform_expr(*cond)), arms: arms .into_iter() .map(|(pat, expr)| (pat, self.transform_expr(expr))) .collect(), } } fn transform_tuple(&mut self, tys: Vec<HTy>, tuple: Vec<Expr>) -> Expr { let tys = tys.into_iter().map(|ty| self.rewrite_ty(ty)).collect(); Expr::Tuple { tys, tuple: tuple.into_iter().map(|e| self.transform_expr(e)).collect(), } } fn transform_proj(&mut self, ty: HTy, index: u32, tuple: Box<Expr>) -> Expr { let ty = self.rewrite_ty(ty); Expr::Proj { ty, index, tuple: Box::new(self.transform_expr(*tuple)), } } fn transform_constructor( &mut self, ty: HTy, arg: Option<Box<Expr>>, descriminant: u32, ) -> Expr { let name = match &ty { HTy::Datatype(name) => name, _ => unreachable!(), }; if self.is_enum_like(name) { Expr::Lit { ty: HTy::Int, value: Literal::Int(descriminant as i64), } } else { Expr::Constructor { ty, arg, descriminant, } } } fn transform_sym(&mut self, ty: HTy, name: Symbol) -> Expr { let ty = self.rewrite_ty(ty); Expr::Sym { ty, name } } fn transform_lit(&mut self, ty: HTy, value: Literal) -> Expr { let ty = self.rewrite_ty(ty); Expr::Lit { ty, value } } } pub struct ConstructorToEnum {} impl ConstructorToEnum { pub fn new() -> Self { Self {} } } impl<E> Pass<Context, E> for ConstructorToEnum { type Target = Context; fn trans( &mut self, Context(symbol_table, hir): Context, _: &Config, ) -> ::std::result::Result<Self::Target, E> { let mut pass = ConstructorToEnumPass::new(symbol_table); let hir = pass.transform_hir(hir); let symbol_table = pass.symbol_table; Ok(Context(symbol_table, hir)) } }
match ty { HTy::Datatype(name) if self.is_enum_like(&name) => Constant { ty: HTy::Int, value: descriminant as i64, }, ty => Constructor { descriminant, ty, arg, }, }
if_condition
[ { "content": "fn take_binds(expr: Expr) -> (Expr, Vec<Val>) {\n\n use crate::hir::Expr::*;\n\n match expr {\n\n Let { bind, ret, .. } => {\n\n let (expr, mut binds) = take_binds(*ret);\n\n binds.insert(0, *bind);\n\n (expr, binds)\n\n }\n\n BuiltinCall { args, ty, fun } => {\n\n let (args, bindss): (_, Vec<_>) = args.into_iter().map(take_binds).unzip();\n\n let expr = BuiltinCall { fun, args, ty };\n\n (expr, bindss.into_iter().flat_map(Vec::into_iter).collect())\n\n }\n\n ExternCall {\n\n args,\n\n ty,\n\n module,\n\n fun,\n\n } => {\n\n let (args, bindss): (_, Vec<_>) = args.into_iter().map(take_binds).unzip();\n", "file_path": "src/hir/flat_let.rs", "rank": 0, "score": 211401.72633793007 }, { "content": "fn walk_dir(name: impl AsRef<Path>, mut callback: impl for<'a> FnMut(PathBuf)) {\n\n use walkdir::WalkDir;\n\n\n\n for entry in WalkDir::new(name.as_ref())\n\n .into_iter()\n\n .filter(|e| e.as_ref().map(|e| e.file_type().is_file()).unwrap_or(false))\n\n {\n\n let path = entry.unwrap().into_path();\n\n callback(path)\n\n }\n\n}\n\n\n", "file_path": "tests/tests/compile.rs", "rank": 1, "score": 174514.13233169512 }, { "content": "pub trait Transform<Ty> {\n\n fn transform_ast(&mut self, ast: Core<Ty>) -> Core<Ty> {\n\n AST(ast\n\n .0\n\n .into_iter()\n\n .map(|decl| self.transform_statement(decl))\n\n .collect())\n\n }\n\n\n\n fn transform_statement(&mut self, decl: CoreDeclaration<Ty>) -> CoreDeclaration<Ty> {\n\n use Declaration::*;\n\n match decl {\n\n Datatype { name, constructors } => self.transform_datatype(name, constructors),\n\n Val { rec, pattern, expr } => self.transform_val(rec, pattern, expr),\n\n D(d) => match d {},\n\n }\n\n }\n\n\n\n fn transform_datatype(\n\n &mut self,\n", "file_path": "src/ast/util.rs", "rank": 2, "score": 166001.72655514954 }, { "content": "#[derive(Debug)]\n\nstruct TyEnv {\n\n env: HashMap<Symbol, NodeId>,\n\n symbol_table: SymbolTable,\n\n pool: TypePool,\n\n}\n\n\n", "file_path": "src/ast/typing.rs", "rank": 3, "score": 165919.15688844354 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen)]\n\npub fn compile_string(input: String) -> Result<Vec<u8>, JsValue> {\n\n let mut prelude = include_str!(\"../ml_src/prelude.sml\").to_string();\n\n prelude.push_str(&input);\n\n\n\n let config = Config::default();\n\n compile_str(&prelude, &config).map_err(|e| format!(\"Compile failed: {}\", e).into())\n\n}\n", "file_path": "src/lib.rs", "rank": 4, "score": 154708.52757730283 }, { "content": "fn conv_ty(pool: &UnificationPool<Typing>, ty: Typing) -> Type {\n\n use Typing::*;\n\n match ty {\n\n Variable(id) => Type::Variable(id),\n\n Char => Type::Char,\n\n Int => Type::Int,\n\n Real => Type::Real,\n\n Fun(param, body) => Type::Fun(\n\n Box::new(resolve(pool, param)),\n\n Box::new(resolve(pool, body)),\n\n ),\n\n Tuple(tys) => Type::Tuple(tys.into_iter().map(|ty| resolve(pool, ty)).collect()),\n\n Datatype(type_id) => Type::Datatype(type_id),\n\n OverloadedNum => Type::Int,\n\n OverloadedNumText => Type::Int,\n\n }\n\n}\n\n\n", "file_path": "src/ast/typing.rs", "rank": 5, "score": 153347.60319047267 }, { "content": "fn force_symbol(e: hir::Expr) -> Symbol {\n\n match e {\n\n hir::Expr::Sym { name, .. } => name,\n\n e => panic!(\"not a symbol, {:?}\", e),\n\n }\n\n}\n\n\n\nimpl<E> Pass<hir::Context, E> for HIR2MIR {\n\n type Target = Context;\n\n\n\n fn trans(\n\n &mut self,\n\n hir::Context(symbol_table, hir): hir::Context,\n\n _: &Config,\n\n ) -> ::std::result::Result<Self::Target, E> {\n\n let mut pass = self.generate_pass(symbol_table);\n\n let mir = pass.trans_hir(hir);\n\n let symbol_table = pass.generate_symbol_table();\n\n Ok(Context(symbol_table, mir))\n\n }\n\n}\n", "file_path": "src/mir/hir2mir.rs", "rank": 6, "score": 151577.8711583268 }, { "content": "fn conv_ty(ty: ast::Type) -> HTy {\n\n use crate::ast::Type::*;\n\n match ty {\n\n Char => HTy::Char,\n\n Int => HTy::Int,\n\n Real => HTy::Real,\n\n Tuple(tys) => HTy::Tuple(tys.into_iter().map(|ty| conv_ty(ty)).collect()),\n\n Fun(arg, ret) => HTy::fun(conv_ty(*arg), conv_ty(*ret)),\n\n Datatype(name) => HTy::Datatype(name),\n\n Variable(_) => panic!(\"polymorphism is not supported yet\"),\n\n }\n\n}\n\n\n\nimpl AST2HIRPass {\n\n fn new(symbol_table: ast::SymbolTable, id: Id) -> Self {\n\n Self { symbol_table, id }\n\n }\n\n fn symbol_table(&self) -> &ast::SymbolTable {\n\n &self.symbol_table\n\n }\n", "file_path": "src/hir/ast2hir.rs", "rank": 8, "score": 150713.32629863685 }, { "content": "fn read_and_append_to_string(path: impl AsRef<Path>, buf: &mut String) -> io::Result<usize> {\n\n let file = fs::File::open(path)?;\n\n let mut input = io::BufReader::new(file);\n\n input.read_to_string(buf)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 150419.01389869006 }, { "content": "pub fn nspaces(n: usize) -> String {\n\n let mut s = String::new();\n\n for _ in 0..n {\n\n s.push(' ');\n\n }\n\n s\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! inter_iter {\n\n ($v:expr, $inter: expr, |$e: pat| => $body: expr) => {\n\n loop {\n\n let mut itr = $v.into_iter();\n\n let $e = match itr.next() {\n\n Some(e) => e,\n\n None => break,\n\n };\n\n $body;\n\n while let Some($e) = itr.next() {\n\n $inter;\n\n $body;\n\n }\n\n break;\n\n }\n\n };\n\n}\n", "file_path": "src/util.rs", "rank": 10, "score": 147849.98483208747 }, { "content": "fn read_and_append_to_string(path: impl AsRef<Path>, buf: &mut String) -> io::Result<usize> {\n\n let file = fs::File::open(path)?;\n\n let mut input = io::BufReader::new(file);\n\n input.read_to_string(buf)\n\n}\n\n\n", "file_path": "tests/tests/compile.rs", "rank": 11, "score": 147443.62776258626 }, { "content": "pub fn compile_string(input: String, config: &Config) -> Result<Vec<u8>, TypeError> {\n\n use crate::pass::PrintablePass;\n\n use wasm::Dump;\n\n\n\n let id = id::Id::new();\n\n\n\n let mut passes = compile_pass![\n\n parse: parser::Parser::new(),\n\n desugar: ast::Desugar::new(id.clone()),\n\n rename: ast::Rename::new(id.clone()),\n\n var_to_constructor: ast::VarToConstructor::new(id.clone()),\n\n typing: ast::Typer::new(),\n\n case_simplify: ast::CaseSimplify::new(id.clone()),\n\n ast_to_hir: hir::AST2HIR::new(id.clone()),\n\n constructor_to_enum: hir::ConstructorToEnum::new(),\n\n simplify: hir::Simplify::new(id.clone()),\n\n flattening_expression: hir::FlatExpr::new(id.clone()),\n\n flattening_let: hir::FlatLet::new(),\n\n unnest_functions: hir::UnnestFunc::new(id.clone()),\n\n closure_conversion: hir::ForceClosure::new(),\n", "file_path": "src/lib.rs", "rank": 12, "score": 142962.80046347814 }, { "content": "fn conv_type_info(type_info: ast::TypeInfo) -> TypeInfo {\n\n TypeInfo {\n\n constructors: type_info\n\n .constructors\n\n .into_iter()\n\n .enumerate()\n\n .map(|(des, (_, arg))| (des as u32, arg.map(|ty| conv_ty(ty))))\n\n .collect(),\n\n }\n\n}\n\n\n", "file_path": "src/hir/ast2hir.rs", "rank": 13, "score": 129812.68775464801 }, { "content": "fn lty_to_valuetype(t: &lir::LTy) -> ValueType {\n\n lty_to_valuetype_opt(t).unwrap_or(ValueType::I32)\n\n}\n\n\n", "file_path": "src/backend/wasm.rs", "rank": 14, "score": 123927.45068275726 }, { "content": "pub trait Transform {\n\n fn transform_hir(&mut self, mut hir: HIR) -> HIR {\n\n hir.0 = hir\n\n .0\n\n .into_iter()\n\n .map(|val| self.transform_val(val))\n\n .collect();\n\n hir\n\n }\n\n\n\n fn transform_val(&mut self, mut val: Val) -> Val {\n\n val.expr = self.transform_expr(val.expr);\n\n val\n\n }\n\n\n\n fn transform_expr(&mut self, expr: Expr) -> Expr {\n\n use crate::hir::Expr::*;\n\n match expr {\n\n Let { ty, bind, ret } => self.transform_binds(ty, bind, ret),\n\n Fun {\n", "file_path": "src/hir/util.rs", "rank": 15, "score": 118678.86711771972 }, { "content": "fn add_rt_module(linker: &mut Linker) {\n\n let module_data =\n\n include_bytes!(\"../../webml-rt/target/wasm32-unknown-unknown/release/webml_rt.wasm\");\n\n let module =\n\n Module::from_binary(linker.store(), module_data).expect(\"failed to compile webml_rt\");\n\n let instance = linker\n\n .instantiate(&module)\n\n .expect(\"failed to instanciate webml_rt\");\n\n linker\n\n .instance(\"webml-rt\", &instance)\n\n .expect(\"failed to import webml-rt\");\n\n}\n\n\n", "file_path": "webml-interp/src/lib.rs", "rank": 16, "score": 117223.97305942362 }, { "content": "fn add_ffi_module(linker: &mut Linker) {\n\n linker\n\n .func(\"js-ffi\", \"print\", |x: i32| println!(\"{}\", x))\n\n .expect(\"failed to add ffi functions\");\n\n}\n", "file_path": "webml-interp/src/lib.rs", "rank": 17, "score": 117223.97305942362 }, { "content": "fn add_rt_module(linker: &mut Linker) {\n\n let module_data =\n\n include_bytes!(\"../../webml-rt/target/wasm32-unknown-unknown/release/webml_rt.wasm\");\n\n let module =\n\n Module::from_binary(linker.store(), module_data).expect(\"failed to compile webml_rt\");\n\n let instance = linker\n\n .instantiate(&module)\n\n .expect(\"failed to instanciate webml_rt\");\n\n linker\n\n .instance(\"webml-rt\", &instance)\n\n .expect(\"failed to import webml-rt\");\n\n}\n\n\n", "file_path": "webml-test/src/lib.rs", "rank": 18, "score": 117223.97305942362 }, { "content": "fn add_ffi_module(linker: &mut Linker) {\n\n linker\n\n .func(\"js-ffi\", \"print\", |x: i32| {\n\n OUTPUT\n\n .with(|out| writeln!(out.borrow_mut(), \"{}\", x))\n\n .expect(\"failed to write\");\n\n })\n\n .expect(\"failed to add ffi functions\");\n\n}\n", "file_path": "webml-test/src/lib.rs", "rank": 19, "score": 117223.97305942362 }, { "content": "#[test]\n\nfn parse_funarg_pattern() {\n\n let input = r#\"fun xor (SOME _) (SOME _) = NONE | xor NONE (SOME x) = SOME x | xor (SOME x) NONE = SOME x | xor NONE NONE = NONE\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::D(DerivedDeclaration::Fun {\n\n name: Symbol::new(\"xor\"),\n\n clauses: vec![\n\n (\n\n vec![\n\n Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Constructor {\n\n name: Symbol::new(\"SOME\"),\n\n arg: Some(Box::new(Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Wildcard {}\n\n }))\n\n }\n\n },\n", "file_path": "tests/tests/parser.rs", "rank": 20, "score": 117182.72301267562 }, { "content": "#[test]\n\nfn parse_fun_pattern() {\n\n let input = r#\"fun f (x, y) = x\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::D(DerivedDeclaration::Fun {\n\n name: Symbol::new(\"f\"),\n\n clauses: vec![(\n\n vec![Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Tuple {\n\n tuple: vec![\n\n Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n Pattern {\n\n ty: Empty {},\n", "file_path": "tests/tests/parser.rs", "rank": 21, "score": 117145.2500327373 }, { "content": "fn lty_to_valuetype_opt(t: &lir::LTy) -> Option<ValueType> {\n\n use crate::lir::LTy::*;\n\n match *t {\n\n Unit => None,\n\n I32 => Some(ValueType::I32),\n\n U32 => Some(ValueType::I32),\n\n I64 => Some(ValueType::I64),\n\n U64 => Some(ValueType::I64),\n\n F32 => Some(ValueType::F32),\n\n F64 => Some(ValueType::F64),\n\n FPtr => Some(ValueType::I32),\n\n Ptr => Some(ValueType::I32),\n\n }\n\n}\n\n\n", "file_path": "src/backend/wasm.rs", "rank": 22, "score": 115112.72859694425 }, { "content": "fn with_compile_result(path: impl AsRef<Path>, callback: impl FnOnce(Result<Vec<u8>, TypeError>)) {\n\n let path = path.as_ref();\n\n let mut input = include_str!(\"../../ml_src/prelude.sml\").to_string();\n\n let config = Config::default();\n\n read_and_append_to_string(&path, &mut input).expect(\"failed to load file\");\n\n let result = compile_string(input, &config);\n\n println!(\"{}\", path.to_str().unwrap());\n\n callback(result)\n\n}\n\n\n", "file_path": "tests/tests/compile.rs", "rank": 23, "score": 114668.75598574211 }, { "content": "#[test]\n\nfn test_pattern_in_funarg() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/pattern_in_funarg.sml\"));\n\n\n\n tester.test_output(&module, \"\");\n\n}\n", "file_path": "webml-test/src/lib.rs", "rank": 24, "score": 112964.50101795208 }, { "content": "#[test]\n\nfn test_tuple_pattern() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/tuple_pattern.sml\"));\n\n\n\n tester.test_output(&module, \"2\\n\");\n\n}\n", "file_path": "webml-test/src/lib.rs", "rank": 25, "score": 112952.33031137072 }, { "content": "fn try_unify<'b>(pool: &'b mut UnificationPool<Typing>, t1: Typing, t2: Typing) -> Result<Typing> {\n\n use Typing::*;\n\n match (t1, t2) {\n\n (t1, t2) if t1 == t2 => Ok(t1),\n\n (Int, OverloadedNum) | (OverloadedNum, Int) => Ok(Int),\n\n (Int, OverloadedNumText) | (OverloadedNumText, Int) => Ok(Int),\n\n (Char, OverloadedNumText) | (OverloadedNumText, Char) => Ok(Char),\n\n (Real, OverloadedNum) | (OverloadedNum, Real) => Ok(Real),\n\n (Real, OverloadedNumText) | (OverloadedNumText, Real) => Ok(Real),\n\n (OverloadedNumText, OverloadedNum) | (OverloadedNum, OverloadedNumText) => {\n\n Ok(OverloadedNumText)\n\n }\n\n (Variable(_), ty) | (ty, Variable(_)) => Ok(ty),\n\n (Fun(p1, b1), Fun(p2, b2)) => {\n\n let p = pool.try_unify_with(p1, p2, try_unify)?;\n\n let b = pool.try_unify_with(b1, b2, try_unify)?;\n\n Ok(Fun(p, b))\n\n }\n\n (Tuple(tu1), Tuple(tu2)) => {\n\n if tu1.len() != tu2.len() {\n", "file_path": "src/ast/typing.rs", "rank": 26, "score": 111726.63397591436 }, { "content": "fn fun_type(f: &lir::Function) -> FuncType {\n\n let &lir::Function {\n\n ref nparams,\n\n ref regs,\n\n ref ret_ty,\n\n ..\n\n } = f;\n\n let mut tys = regs\n\n .iter()\n\n .map(|reg| lty_to_valuetype(reg))\n\n .collect::<Vec<_>>();\n\n let _ = tys.split_off(*nparams as usize);\n\n FuncType {\n\n params: tys,\n\n ret: match ret_ty {\n\n ty => lty_to_valuetype_opt(&ty),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/backend/wasm.rs", "rank": 27, "score": 110511.23585281718 }, { "content": "pub trait Traverse<Ty> {\n\n fn traverse_ast(&mut self, ast: &mut Core<Ty>) {\n\n for decl in ast.0.iter_mut() {\n\n self.traverse_statement(decl)\n\n }\n\n }\n\n\n\n fn traverse_statement(&mut self, decl: &mut CoreDeclaration<Ty>) {\n\n use Declaration::*;\n\n match decl {\n\n Datatype { name, constructors } => self.traverse_datatype(name, constructors),\n\n Val { rec, pattern, expr } => self.traverse_val(rec, pattern, expr),\n\n D(_) => (),\n\n }\n\n }\n\n\n\n fn traverse_datatype(\n\n &mut self,\n\n _name: &mut Symbol,\n\n _constructors: &mut Vec<(Symbol, Option<Type>)>,\n", "file_path": "src/ast/util.rs", "rank": 28, "score": 110472.92931807615 }, { "content": "#[test]\n\nfn test_nested_pattern_in_val() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/nested_pattern_in_val.sml\"));\n\n\n\n tester.test_output(&module, \"\");\n\n}\n", "file_path": "webml-test/src/lib.rs", "rank": 29, "score": 109080.56342332505 }, { "content": "#[test]\n\nfn parse_case_val_pattern_wildcard() {\n\n let input = r#\"val _ = 1\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Wildcard {}\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Literal {\n\n value: Literal::Int(1),\n\n }\n\n },\n\n },])\n\n )\n\n}\n\n\n", "file_path": "tests/tests/parser.rs", "rank": 30, "score": 109080.56342332505 }, { "content": "#[derive(Debug)]\n\nstruct TypePool {\n\n cache: HashMap<Typing, NodeId>,\n\n pool: UnificationPool<Typing>,\n\n id: Id,\n\n}\n\n\n", "file_path": "src/ast/typing.rs", "rank": 31, "score": 107550.44110241915 }, { "content": "struct Scope<'a>(&'a mut Rename);\n\n\n\nimpl<'a> Deref for Scope<'a> {\n\n type Target = Rename;\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl<'a> DerefMut for Scope<'a> {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n\n }\n\n}\n\n\n\nimpl<'a> Drop for Scope<'a> {\n\n fn drop(&mut self) {\n\n self.pos -= 1;\n\n }\n\n}\n", "file_path": "src/ast/rename.rs", "rank": 32, "score": 102890.09176888326 }, { "content": "pub fn linker() -> Linker {\n\n let store = Store::default();\n\n let mut linker = Linker::new(&store);\n\n add_ffi_module(&mut linker);\n\n add_rt_module(&mut linker);\n\n linker\n\n}\n\n\n\npub struct WebmlInterp {\n\n linker: Linker,\n\n}\n\n\n\nimpl WebmlInterp {\n\n pub fn new() -> Self {\n\n Self { linker: linker() }\n\n }\n\n\n\n fn is_wasm(prog: &[u8]) -> bool {\n\n 4 <= prog.len() && &prog[0..4] == b\"\\0asm\"\n\n }\n", "file_path": "webml-interp/src/lib.rs", "rank": 33, "score": 99543.78507938629 }, { "content": "struct Scope<'a>(&'a mut UnnestFunc);\n\n\n\nimpl<'a> Deref for Scope<'a> {\n\n type Target = UnnestFunc;\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl<'a> DerefMut for Scope<'a> {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n\n }\n\n}\n\n\n\nimpl<'a> Drop for Scope<'a> {\n\n fn drop(&mut self) {\n\n self.pos -= 1;\n\n }\n\n}\n", "file_path": "src/hir/unnest_func.rs", "rank": 34, "score": 97465.78782317556 }, { "content": "pub fn test_runtime() -> Linker {\n\n let store = Store::default();\n\n let mut linker = Linker::new(&store);\n\n add_ffi_module(&mut linker);\n\n add_rt_module(&mut linker);\n\n linker\n\n}\n\n\n\npub struct TestRuntime {\n\n linker: Linker,\n\n}\n\n\n\nimpl TestRuntime {\n\n pub fn new() -> Self {\n\n TestRuntime {\n\n linker: test_runtime(),\n\n }\n\n }\n\n\n\n pub fn output(&self) -> Vec<u8> {\n", "file_path": "webml-test/src/lib.rs", "rank": 35, "score": 96972.53212805408 }, { "content": "// bif -> fn x => _builtincall \"bif\"(x)\n\nstruct WrapBIF {\n\n bif_table: HashMap<String, BIF>,\n\n id: Id,\n\n}\n\nimpl WrapBIF {\n\n fn new(id: Id) -> Self {\n\n Self {\n\n bif_table: BUILTIN_FUNCTIONS\n\n .iter()\n\n .map(|(s, bif)| (s.to_string(), *bif))\n\n .collect(),\n\n id,\n\n }\n\n }\n\n\n\n fn gensym(&mut self, name: impl Into<String>) -> Symbol {\n\n let id = self.id.next();\n\n Symbol(name.into(), id)\n\n }\n\n}\n", "file_path": "src/ast/rename.rs", "rank": 36, "score": 96869.46686638403 }, { "content": "fn resolve(pool: &UnificationPool<Typing>, id: NodeId) -> Type {\n\n conv_ty(pool, pool.value_of(id).clone())\n\n}\n\n\n", "file_path": "src/ast/typing.rs", "rank": 37, "score": 95797.4812206706 }, { "content": "struct VarToConstructorPass {\n\n symbol_table: SymbolTable,\n\n id: Id,\n\n}\n\n\n\nimpl VarToConstructorPass {\n\n fn new(symbol_table: SymbolTable, id: Id) -> Self {\n\n Self { symbol_table, id }\n\n }\n\n\n\n fn into_inner(self) -> (SymbolTable, Id) {\n\n (self.symbol_table, self.id)\n\n }\n\n\n\n fn symbol_table(&self) -> &SymbolTable {\n\n &self.symbol_table\n\n }\n\n\n\n fn is_constructor(&self, name: &Symbol) -> bool {\n\n self.symbol_table()\n", "file_path": "src/ast/var2constructor.rs", "rank": 38, "score": 93757.08573813688 }, { "content": "#[test]\n\nfn parse_char() {\n\n let input = r##\"val x = #\"a\"\"##;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Literal {\n\n value: Literal::Char('a' as u32),\n\n }\n\n },\n\n },])\n\n )\n\n}\n\n\n", "file_path": "tests/tests/parser.rs", "rank": 39, "score": 87167.09591953631 }, { "content": "fn assert_compile_fail(path: impl AsRef<Path>) {\n\n let path = path.as_ref();\n\n with_compile_result(path, |res| match res {\n\n Ok(_) => panic!(\"succeded to compile {}, which should fail\", path.display(),),\n\n Err(_) => (),\n\n })\n\n}\n\n\n", "file_path": "tests/tests/compile.rs", "rank": 40, "score": 86470.3452328475 }, { "content": "fn assert_compile_pass(path: impl AsRef<Path>) {\n\n let path = path.as_ref();\n\n with_compile_result(path, |res| match res {\n\n Ok(_) => (),\n\n Err(e) => panic!(\"failed to compile {}: {}\", path.display(), e),\n\n })\n\n}\n\n\n", "file_path": "tests/tests/compile.rs", "rank": 41, "score": 86470.3452328475 }, { "content": "fn conv_symbol_table(symbol_table: ast::SymbolTable) -> SymbolTable {\n\n SymbolTable {\n\n types: symbol_table\n\n .types\n\n .into_iter()\n\n .map(|(k, v)| (k, conv_type_info(v)))\n\n .collect(),\n\n }\n\n}\n\n\n", "file_path": "src/hir/ast2hir.rs", "rank": 42, "score": 84431.53775644513 }, { "content": "#[test]\n\nfn parse_case_constructor() {\n\n let input = r#\"val x = case NONE of SOME x => false | NONE => true\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Case {\n\n cond: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Symbol {\n\n name: Symbol::new(\"NONE\")\n", "file_path": "tests/tests/parser.rs", "rank": 43, "score": 84101.26991351388 }, { "content": "#[test]\n\nfn test_char() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/char.sml\"));\n\n\n\n tester.test_output(&module, \"0\\n1\\n\");\n\n}\n\n\n", "file_path": "webml-test/src/lib.rs", "rank": 44, "score": 84101.26991351388 }, { "content": "#[test]\n\nfn parse_bool_false() {\n\n let input = r#\"val x = false\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Constructor {\n\n arg: None,\n\n name: Symbol::new(\"false\")\n\n }\n\n },\n\n },])\n\n )\n\n}\n\n\n", "file_path": "tests/tests/parser.rs", "rank": 45, "score": 84094.83951745427 }, { "content": "#[test]\n\nfn parse_case_bool() {\n\n let input = r#\"val x = case true of true => false | false => true\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Case {\n\n cond: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Constructor {\n\n arg: None,\n", "file_path": "tests/tests/parser.rs", "rank": 46, "score": 84094.83951745427 }, { "content": "#[test]\n\nfn parse_bool_true() {\n\n let input = r#\"val x = true\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Constructor {\n\n arg: None,\n\n name: Symbol::new(\"true\")\n\n }\n\n },\n\n },])\n\n )\n\n}\n\n\n", "file_path": "tests/tests/parser.rs", "rank": 47, "score": 84094.83951745427 }, { "content": "#[test]\n\nfn parse_datatype_tuple() {\n\n let input = r#\"datatype hoge = Hoge of int * real\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Datatype {\n\n name: Symbol::new(\"hoge\"),\n\n constructors: vec![(\n\n Symbol::new(\"Hoge\"),\n\n Some(Type::Tuple(vec![Type::Int, Type::Real]))\n\n ),]\n\n },])\n\n )\n\n}\n\n\n", "file_path": "tests/tests/parser.rs", "rank": 48, "score": 84088.55552924905 }, { "content": "#[test]\n\nfn parse_apply_tuple() {\n\n let input = r#\"val x = f(x, y)\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::App {\n\n fun: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Symbol {\n\n name: Symbol::new(\"f\")\n", "file_path": "tests/tests/parser.rs", "rank": 49, "score": 84088.55552924905 }, { "content": "#[test]\n\nfn parse_case_tuple() {\n\n let input = r#\"val x = case (1, 2, 3) of (x, y, z) => z\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Case {\n\n cond: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Tuple {\n\n tuple: vec![\n", "file_path": "tests/tests/parser.rs", "rank": 50, "score": 84088.55552924905 }, { "content": "#[test]\n\nfn parse_fun_binary() {\n\n let input = r#\"fun f x y = x\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::D(DerivedDeclaration::Fun {\n\n name: Symbol::new(\"f\"),\n\n clauses: vec![(\n\n vec![\n\n Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"y\"),\n\n }\n", "file_path": "tests/tests/parser.rs", "rank": 51, "score": 84063.79693357559 }, { "content": "#[test]\n\nfn parse_fun_unary() {\n\n let input = r#\"fun f x = x\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::D(DerivedDeclaration::Fun {\n\n name: Symbol::new(\"f\"),\n\n clauses: vec![(\n\n vec![Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n }],\n\n Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Symbol {\n\n name: Symbol::new(\"x\"),\n\n }\n\n }\n\n )]\n\n }),])\n\n )\n\n}\n\n\n", "file_path": "tests/tests/parser.rs", "rank": 52, "score": 84063.79693357559 }, { "content": "#[test]\n\nfn parse_fun_op() {\n\n let input = r#\"fun op+(x, y) = x\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::D(DerivedDeclaration::Fun {\n\n name: Symbol::new(\"+\"),\n\n clauses: vec![(\n\n vec![Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Tuple {\n\n tuple: vec![\n\n Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n Pattern {\n\n ty: Empty {},\n", "file_path": "tests/tests/parser.rs", "rank": 53, "score": 84063.79693357559 }, { "content": "#[test]\n\nfn parse_fun_multiclause() {\n\n let input = r#\"fun f Nil _ = Nil | f _ Nil = Nil\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::D(DerivedDeclaration::Fun {\n\n name: Symbol::new(\"f\"),\n\n clauses: vec![\n\n (\n\n vec![\n\n Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"Nil\"),\n\n }\n\n },\n\n Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Wildcard {}\n\n }\n", "file_path": "tests/tests/parser.rs", "rank": 54, "score": 84063.79693357559 }, { "content": "#[test]\n\nfn parse_pattern_unit() {\n\n let input = r#\"val () = ()\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Tuple { tuple: vec![] }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Tuple { tuple: vec![] }\n\n }\n\n }])\n\n )\n\n}\n\n\n", "file_path": "tests/tests/parser.rs", "rank": 55, "score": 84057.6300537056 }, { "content": "#[test]\n\nfn test_expr_infix_and_app() {\n\n let input = \"true\";\n\n let ret = Parser::new().expr_infix_and_app()(input).unwrap();\n\n assert_eq!(\n\n ret,\n\n (\n\n \"\",\n\n Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Constructor {\n\n arg: None,\n\n name: Symbol::new(\"true\")\n\n }\n\n }\n\n )\n\n )\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 56, "score": 84014.37514426743 }, { "content": "#[test]\n\nfn test_expr_infix_and_app2() {\n\n let input = \"f arg\";\n\n let ret = Parser::new().expr_infix_and_app()(input).unwrap();\n\n assert_eq!(\n\n ret,\n\n (\n\n \"\",\n\n Expr {\n\n ty: Empty {},\n\n inner: ExprKind::App {\n\n fun: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Symbol {\n\n name: Symbol::new(\"f\"),\n\n }\n\n }\n\n .boxed(),\n\n arg: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Symbol {\n", "file_path": "src/parser.rs", "rank": 57, "score": 84014.37514426743 }, { "content": "pub fn compile(input: &str) -> Vec<u8> {\n\n use webml::{compile_string, Config};\n\n let mut prelude = include_str!(\"../../ml_src/prelude.sml\").to_string();\n\n prelude.push_str(input);\n\n compile_string(prelude, &Config::default()).expect(\"failed to compile\")\n\n}\n\n\n", "file_path": "webml-test/src/lib.rs", "rank": 58, "score": 83882.76923992293 }, { "content": "#[test]\n\nfn parse_multistatement_val_datatype() {\n\n let input = r#\"val version = 1 datatype order = GREATER | EQUAL | LESS\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![\n\n Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"version\")\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Literal {\n\n value: Literal::Int(1)\n\n }\n\n }\n", "file_path": "tests/tests/parser.rs", "rank": 59, "score": 81285.46775885695 }, { "content": "#[test]\n\nfn test_datatype_pattern() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/datatype_pattern.sml\"));\n\n\n\n tester.test_output(&module, \"\");\n\n}\n\n\n", "file_path": "webml-test/src/lib.rs", "rank": 60, "score": 81255.86468246032 }, { "content": "#[test]\n\nfn test_multi_clause_fun() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/multi_clause_fun.sml\"));\n\n\n\n tester.test_output(&module, \"\");\n\n}\n", "file_path": "webml-test/src/lib.rs", "rank": 61, "score": 78689.54623483308 }, { "content": "#[test]\n\nfn parse_fun_multiclause_different_fnname() {\n\n let input = r#\"fun f Nil _ = Nil | g _ Nil = Nil\"#;\n\n let ast = parse(input);\n\n assert!(ast.is_err())\n\n}\n\n\n", "file_path": "tests/tests/parser.rs", "rank": 62, "score": 78689.54623483308 }, { "content": "#[test]\n\nfn test_nested_datatype_pattern() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/nested_datatype_pattern.sml\"));\n\n\n\n tester.test_output(&module, \"\");\n\n}\n", "file_path": "webml-test/src/lib.rs", "rank": 63, "score": 78683.88512948356 }, { "content": "#[panic_handler]\n\nfn panic(_: &PanicInfo) -> ! {\n\n // currently no way to handle panic\n\n loop {}\n\n}\n", "file_path": "webml-rt/src/lib.rs", "rank": 64, "score": 68308.35423536207 }, { "content": "struct LIR2WASMPass {\n\n md: ModuleBuilder,\n\n init_fun: FunctionSpaceIndex,\n\n alloc_fun: FunctionSpaceIndex,\n\n extern_functions: HashMap<(String, String), FunctionSpaceIndex>,\n\n function_table: HashMap<Symbol, u32>,\n\n function_type_table: HashMap<FuncType, TypeIndex>,\n\n dynamic_function_table: HashMap<Symbol, u32>,\n\n dynamic_function_elements: Vec<FunctionSpaceIndex>,\n\n}\n\n\n\nimpl LIR2WASMPass {\n\n fn new(\n\n mut md: ModuleBuilder,\n\n extern_functions: HashMap<(String, String), FunctionSpaceIndex>,\n\n mut function_type_table: HashMap<FuncType, TypeIndex>,\n\n ) -> Self {\n\n let init_fun_ty = funtype!(());\n\n let alloc_fun_ty = funtype!((i32) -> i32);\n\n let init_fun_ty_index = md.add_type(init_fun_ty.clone());\n", "file_path": "src/backend/wasm.rs", "rank": 65, "score": 62178.00976125967 }, { "content": "struct AST2HIRPass {\n\n symbol_table: ast::SymbolTable,\n\n id: Id,\n\n}\n\n\n\nimpl AST2HIR {\n\n pub fn new(id: Id) -> Self {\n\n Self { id }\n\n }\n\n\n\n fn generate_pass(&mut self, symbol_table: ast::SymbolTable) -> AST2HIRPass {\n\n AST2HIRPass::new(symbol_table, self.id.clone())\n\n }\n\n}\n\n\n", "file_path": "src/hir/ast2hir.rs", "rank": 66, "score": 62178.00976125967 }, { "content": "struct HIR2MIRPass {\n\n label: u64,\n\n id: Id,\n\n closure_wrapper: HashMap<Symbol, (Symbol, EbbTy, EbbTy)>,\n\n symbol_table: hir::SymbolTable,\n\n}\n\n\n\nimpl HIR2MIRPass {\n\n pub fn new(id: Id, symbol_table: hir::SymbolTable) -> Self {\n\n HIR2MIRPass {\n\n id,\n\n label: 0,\n\n closure_wrapper: HashMap::new(),\n\n symbol_table,\n\n }\n\n }\n\n\n\n fn genlabel(&mut self, name: &str) -> Symbol {\n\n let name = name.to_string();\n\n let label = self.label;\n", "file_path": "src/mir/hir2mir.rs", "rank": 67, "score": 62178.00976125967 }, { "content": "#[repr(C)]\n\nstruct Page {\n\n next: *mut Page,\n\n size: usize,\n\n top: usize,\n\n data: *mut u8,\n\n}\n\n\n\nconst MEMORY: u32 = 0;\n\nconst WASM_PAGE_SIZE: usize = 64 * 1024;\n\n// GC page size including meta data\n\nconst GC_PAGE_SIZE: usize = 1 * WASM_PAGE_SIZE;\n\nstatic mut GC: *mut Page = 0 as *mut _;\n\nstatic mut HEAD: *mut Page = 0 as *mut _;\n\n\n\nunsafe fn new_page() -> *mut Page {\n\n let ret = memory_grow(MEMORY, 1);\n\n // if we failed to allocate a page then panic\n\n if ret == usize::max_value() {\n\n // TODO: collect garbage\n\n panic!(\"memory exhausted\")\n", "file_path": "webml-rt/src/lib.rs", "rank": 68, "score": 62178.00976125967 }, { "content": "#[test]\n\nfn test_fn() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/fn.sml\"));\n\n\n\n tester.test_output(&module, \"\");\n\n}\n", "file_path": "webml-test/src/lib.rs", "rank": 69, "score": 61953.667565095166 }, { "content": "#[test]\n\nfn parse_fn_unary() {\n\n let input = r#\"val f = fn x => x\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"f\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Fn {\n\n param: Symbol::new(\"x\"),\n\n body: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Symbol {\n\n name: Symbol::new(\"x\"),\n\n }\n\n }\n\n .boxed(),\n\n }\n\n },\n\n },])\n\n )\n\n}\n\n\n", "file_path": "tests/tests/parser.rs", "rank": 70, "score": 61953.667565095166 }, { "content": "pub trait PP {\n\n fn pp<W: io::Write>(&self, w: &mut W, indent: usize) -> io::Result<()>;\n\n fn nspaces(n: usize) -> String {\n\n let mut s = String::new();\n\n for _ in 0..n {\n\n s.push(' ');\n\n }\n\n s\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 71, "score": 60796.805510430524 }, { "content": "pub trait Traverse {\n\n fn traverse_hir(&mut self, hir: &mut HIR) {\n\n for val in hir.0.iter_mut() {\n\n self.traverse_val(val)\n\n }\n\n }\n\n\n\n fn traverse_val(&mut self, val: &mut Val) {\n\n self.traverse_expr(&mut val.expr)\n\n }\n\n\n\n fn traverse_expr(&mut self, expr: &mut Expr) {\n\n use crate::hir::Expr::*;\n\n match expr {\n\n Let { ty, bind, ret } => self.traverse_binds(ty, bind, ret),\n\n Fun {\n\n param,\n\n body_ty,\n\n body,\n\n captures,\n", "file_path": "src/hir/util.rs", "rank": 72, "score": 59099.1132966851 }, { "content": "struct Reg<'a> {\n\n t: &'a mut ForceClosure,\n\n bound_name: Option<Symbol>,\n\n}\n\n\n\nimpl<'a> Reg<'a> {\n\n fn new(t: &'a mut ForceClosure, bound_name: Option<Symbol>) -> Self {\n\n Reg { t, bound_name }\n\n }\n\n\n\n fn with_bound_name<F: FnOnce(&mut Self)>(&mut self, bound_name: Option<Symbol>, f: F) {\n\n let prev = self.bound_name.take();\n\n self.bound_name = bound_name;\n\n f(self);\n\n self.bound_name = prev;\n\n }\n\n}\n\n\n\nimpl<'a> Traverse for Reg<'a> {\n\n fn traverse_val(&mut self, val: &mut Val) {\n", "file_path": "src/hir/force_closure.rs", "rank": 73, "score": 59099.1132966851 }, { "content": "struct Trav<'a> {\n\n t: &'a mut ForceClosure,\n\n bound: bool,\n\n}\n\n\n\nimpl<'a> Trav<'a> {\n\n fn new(t: &'a mut ForceClosure, bound: bool) -> Self {\n\n Trav { t, bound }\n\n }\n\n\n\n fn to(&mut self, bound: bool) -> &mut Self {\n\n self.bound = bound;\n\n self\n\n }\n\n\n\n fn bound(&self) -> bool {\n\n self.bound\n\n }\n\n\n\n fn with_bound<F: FnOnce(&mut Self)>(&mut self, bound: bool, f: F) {\n", "file_path": "src/hir/force_closure.rs", "rank": 74, "score": 59099.1132966851 }, { "content": "fn main() {\n\n env_logger::init();\n\n let matches = app_from_crate!()\n\n .arg(\n\n Arg::with_name(\"PRINT_IR\")\n\n .long(\"print-ir\")\n\n .help(\"print the output of IR\")\n\n .value_name(\"IR\")\n\n .takes_value(true)\n\n .multiple(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"INPUT\")\n\n .help(\"file to compile\")\n\n .required(true),\n\n )\n\n .get_matches();\n\n\n\n let filename = matches\n\n .value_of(\"INPUT\")\n", "file_path": "src/main.rs", "rank": 75, "score": 55996.37242192132 }, { "content": "#[test]\n\nfn parse_if() {\n\n let input = r#\"val x = if true then false else true\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::D(DerivedExprKind::If {\n\n cond: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Constructor {\n\n arg: None,\n", "file_path": "tests/tests/parser.rs", "rank": 76, "score": 54158.8016718015 }, { "content": "pub trait Pass<T, E> {\n\n type Target;\n\n fn trans(&mut self, t: T, config: &Config) -> Result<Self::Target, E>;\n\n}\n\n\n\nimpl<In, Out, Err, F> Pass<In, Err> for F\n\nwhere\n\n F: Fn(In) -> Result<Out, Err>,\n\n{\n\n type Target = Out;\n\n fn trans(&mut self, t: In, _: &Config) -> Result<Self::Target, Err> {\n\n let out = self(t)?;\n\n Ok(out)\n\n }\n\n}\n\n\n\npub struct DebugPass<T>(pub T);\n\n\n\nimpl<T, In, Out, Err> Pass<In, Err> for DebugPass<T>\n\nwhere\n", "file_path": "src/pass.rs", "rank": 77, "score": 53081.7233304993 }, { "content": "#[test]\n\nfn parse_apply() {\n\n let input = r#\"val x = f x\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::App {\n\n fun: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Symbol {\n\n name: Symbol::new(\"f\")\n", "file_path": "tests/tests/parser.rs", "rank": 78, "score": 52493.079846708686 }, { "content": "fn main() {\n\n let path = args().nth(1).expect(\"Usage: FILE\");\n\n let mut interp = WebmlInterp::new();\n\n interp.run_file(path)\n\n}\n", "file_path": "webml-interp/src/main.rs", "rank": 79, "score": 52493.079846708686 }, { "content": "#[test]\n\nfn parse_builtincall() {\n\n let input = r#\"val ret = _builtincall \"add\" (x, y)\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"ret\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::BuiltinCall {\n\n fun: BIF::Add,\n\n args: vec![\n\n Expr {\n\n ty: Empty {},\n", "file_path": "tests/tests/parser.rs", "rank": 80, "score": 52493.079846708686 }, { "content": "#[test]\n\nfn parse_externcall() {\n\n let input = r#\"val ret = _externcall (\"module\" . \"add\" : (int, int) -> int) (x, y)\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"ret\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::ExternCall {\n\n module: \"module\".into(),\n\n fun: \"add\".into(),\n\n args: vec![\n\n Expr {\n", "file_path": "tests/tests/parser.rs", "rank": 81, "score": 52493.079846708686 }, { "content": "fn visit(\n\n ret: &mut Vec<EBB>,\n\n dones: &mut HashSet<Symbol>,\n\n cur: EBB,\n\n mut blocks: Vec<EBB>,\n\n) -> Vec<EBB> {\n\n if !dones.contains(&cur.name) {\n\n dones.insert(cur.name.clone());\n\n for (next, forward) in cur.next_ebbs().into_iter().rev() {\n\n if forward {\n\n if let Some(idx) = blocks.iter().position(|ebb| &ebb.name == next) {\n\n let b = blocks.swap_remove(idx);\n\n blocks = visit(ret, dones, b, blocks);\n\n }\n\n }\n\n }\n\n ret.push(cur)\n\n }\n\n blocks\n\n}\n", "file_path": "src/mir/block_arrange.rs", "rank": 82, "score": 52493.079846708686 }, { "content": "#[test]\n\nfn parse_binop() {\n\n let input = r#\"infix 6 + val x = 1 + 2\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![\n\n Declaration::D(DerivedDeclaration::Infix {\n\n priority: Some(6),\n\n names: vec![Symbol::new(\"+\")],\n\n }),\n\n Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n", "file_path": "tests/tests/parser.rs", "rank": 83, "score": 52493.079846708686 }, { "content": "#[test]\n\nfn parse_float() {\n\n let input = r#\"val x = 1.0\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Literal {\n\n value: Literal::Real(1.0),\n\n }\n\n },\n\n },])\n\n )\n\n}\n\n\n", "file_path": "tests/tests/parser.rs", "rank": 84, "score": 52493.079846708686 }, { "content": "#[test]\n\nfn pares_comment() {\n\n let input = r#\"(* comment (* is *) nestable *)\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(ast, AST(vec![]));\n\n}\n", "file_path": "tests/tests/parser.rs", "rank": 85, "score": 52493.079846708686 }, { "content": "#[test]\n\nfn test_if() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/if.sml\"));\n\n\n\n tester.test_output(&module, \"1\\n\");\n\n}\n", "file_path": "webml-test/src/lib.rs", "rank": 86, "score": 52493.079846708686 }, { "content": "fn display_binop(\n\n f: &mut fmt::Formatter,\n\n space: &str,\n\n name: &str,\n\n var: &Symbol,\n\n ty: &EbbTy,\n\n l: &Symbol,\n\n r: &Symbol,\n\n) -> fmt::Result {\n\n write!(f, \"{}{}: {} := {} {} {}\", space, var, ty, l, name, r)?;\n\n Ok(())\n\n}\n\n\n\nimpl fmt::Display for Op {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n use crate::mir::Op::*;\n\n let indent = f.width().unwrap_or(0);\n\n let space = nspaces(indent);\n\n match self {\n\n Lit { var, ty, value } => {\n", "file_path": "src/mir/pp.rs", "rank": 87, "score": 52493.079846708686 }, { "content": "#[test]\n\nfn parse_int() {\n\n let input = r#\"val x = 1\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Literal {\n\n value: Literal::Int(1),\n\n }\n\n },\n\n },])\n\n )\n\n}\n\n\n", "file_path": "tests/tests/parser.rs", "rank": 88, "score": 52493.079846708686 }, { "content": "#[test]\n\nfn parse_unit() {\n\n let input = r#\"val x = ()\"#;\n\n let ast = parse(input).unwrap();\n\n assert_eq!(\n\n ast,\n\n AST(vec![Declaration::Val {\n\n rec: false,\n\n pattern: Pattern {\n\n ty: Empty {},\n\n inner: PatternKind::Variable {\n\n name: Symbol::new(\"x\"),\n\n }\n\n },\n\n expr: Expr {\n\n ty: Empty {},\n\n inner: ExprKind::Tuple { tuple: vec![] }\n\n }\n\n }])\n\n )\n\n}\n\n\n", "file_path": "tests/tests/parser.rs", "rank": 89, "score": 52493.079846708686 }, { "content": "fn map_window3<I>(\n\n iter: I,\n\n mut f: impl FnMut(I::Item, I::Item, I::Item) -> (I::Item, Option<(I::Item, I::Item)>),\n\n) -> Vec<I::Item>\n\nwhere\n\n I: IntoIterator,\n\n{\n\n let mut ret = vec![];\n\n let mut iter = iter.into_iter();\n\n\n\n let mut e1 = match iter.next() {\n\n Some(e) => e,\n\n None => return ret,\n\n };\n\n\n\n let mut e2 = match iter.next() {\n\n Some(e) => e,\n\n None => {\n\n ret.push(e1);\n\n return ret;\n", "file_path": "src/parser.rs", "rank": 90, "score": 52088.72640397665 }, { "content": "fn map_window2<I>(\n\n iter: I,\n\n mut f: impl FnMut(I::Item, I::Item) -> (I::Item, Option<I::Item>),\n\n) -> Vec<I::Item>\n\nwhere\n\n I: IntoIterator,\n\n{\n\n let mut ret = vec![];\n\n let mut iter = iter.into_iter();\n\n let mut e = match iter.next() {\n\n Some(e) => e,\n\n None => return ret,\n\n };\n\n\n\n while let Some(e2) = iter.next() {\n\n let (e1, e2) = f(e, e2);\n\n match e2 {\n\n Some(e2) => {\n\n ret.push(e1);\n\n e = e2;\n\n }\n\n None => e = e1,\n\n }\n\n }\n\n ret.push(e);\n\n ret\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 91, "score": 52088.72640397665 }, { "content": "#[test]\n\nfn test_fibonacci() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/fibonacci.sml\"));\n\n\n\n tester.test_output(&module, \"1\\n1\\n2\\n3\\n5\\n8\\n\");\n\n}\n", "file_path": "webml-test/src/lib.rs", "rank": 92, "score": 50976.17695454388 }, { "content": "#[test]\n\nfn test_closures() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/closures.sml\"));\n\n\n\n tester.test_output(&module, \"3\\n\");\n\n}\n\n\n", "file_path": "webml-test/src/lib.rs", "rank": 93, "score": 50976.17695454388 }, { "content": "#[test]\n\nfn examples_compile_pass() {\n\n walk_dir(\"ml_example\", assert_compile_pass)\n\n}\n\n\n", "file_path": "tests/tests/compile.rs", "rank": 94, "score": 50976.17695454388 }, { "content": "#[test]\n\nfn test_datatype() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/datatype.sml\"));\n\n\n\n tester.test_output(&module, \"\");\n\n}\n\n\n", "file_path": "webml-test/src/lib.rs", "rank": 95, "score": 50976.17695454388 }, { "content": "#[test]\n\nfn test_branches() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/branches.sml\"));\n\n\n\n tester.test_output(&module, \"1\\n\");\n\n}\n\n\n", "file_path": "webml-test/src/lib.rs", "rank": 96, "score": 50976.17695454388 }, { "content": "#[test]\n\nfn test_compile_pass() {\n\n walk_dir(\"tests/compile_pass\", assert_compile_pass)\n\n}\n\n\n", "file_path": "tests/tests/compile.rs", "rank": 97, "score": 50976.17695454388 }, { "content": "#[test]\n\nfn test_infix() {\n\n let mut tester = TestRuntime::new();\n\n let module = compile(include_str!(\"../../ml_example/infix.sml\"));\n\n\n\n tester.test_output(&module, \"\");\n\n}\n", "file_path": "webml-test/src/lib.rs", "rank": 98, "score": 50976.17695454388 }, { "content": "#[test]\n\nfn test_compile_fail() {\n\n walk_dir(\"tests/compile_fail\", assert_compile_fail)\n\n}\n", "file_path": "tests/tests/compile.rs", "rank": 99, "score": 50976.17695454388 } ]
Rust
tests/delete_component.rs
colelawrence/shipyard
bd535706a4ec53f5162e4aae6b8c9480e5f8bc75
use core::any::type_name; use shipyard::error; use shipyard::internal::iterators; use shipyard::prelude::*; #[test] fn no_pack() { let world = World::new(); let (mut entities, mut usizes, mut u32s) = world.borrow::<(EntitiesMut, &mut usize, &mut u32)>(); let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32)); let entity2 = entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32)); usizes.delete(entity1); assert_eq!( (&mut usizes).get(entity1), Err(error::MissingComponent { id: entity1, name: type_name::<usize>(), }) ); assert_eq!((&mut u32s).get(entity1), Ok(&mut 1)); assert_eq!(usizes.get(entity2), Ok(&2)); assert_eq!(u32s.get(entity2), Ok(&3)); } #[test] fn tight() { let world = World::new(); let (mut entities, mut usizes, mut u32s) = world.borrow::<(EntitiesMut, &mut usize, &mut u32)>(); (&mut usizes, &mut u32s).tight_pack(); let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32)); let entity2 = entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32)); Delete::<(usize,)>::delete((&mut usizes, &mut u32s), entity1); assert_eq!( (&mut usizes).get(entity1), Err(error::MissingComponent { id: entity1, name: type_name::<usize>(), }) ); assert_eq!((&mut u32s).get(entity1), Ok(&mut 1)); assert_eq!(usizes.get(entity2), Ok(&2)); assert_eq!(u32s.get(entity2), Ok(&3)); let iter = (&usizes, &u32s).iter(); if let iterators::Iter2::Tight(mut iter) = iter { assert_eq!(iter.next(), Some((&2, &3))); assert_eq!(iter.next(), None); } else { panic!("not packed"); } } #[test] fn loose() { let world = World::new(); let (mut entities, mut usizes, mut u32s) = world.borrow::<(EntitiesMut, &mut usize, &mut u32)>(); (&mut usizes, &mut u32s).loose_pack(); let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32)); let entity2 = entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32)); Delete::<(usize,)>::delete((&mut usizes, &mut u32s), entity1); assert_eq!( (&mut usizes).get(entity1), Err(error::MissingComponent { id: entity1, name: type_name::<usize>(), }) ); assert_eq!((&mut u32s).get(entity1), Ok(&mut 1)); assert_eq!(usizes.get(entity2), Ok(&2)); assert_eq!(u32s.get(entity2), Ok(&3)); let mut iter = (&usizes, &u32s).iter(); assert_eq!(iter.next(), Some((&2, &3))); assert_eq!(iter.next(), None); } #[test] fn tight_loose() { let world = World::new(); let (mut entities, mut usizes, mut u64s, mut u32s) = world.borrow::<(EntitiesMut, &mut usize, &mut u64, &mut u32)>(); (&mut usizes, &mut u64s).tight_pack(); LoosePack::<(u32,)>::loose_pack((&mut u32s, &mut usizes, &mut u64s)); let entity1 = entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (0, 1, 2)); let entity2 = entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (3, 4, 5)); entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (6, 7, 8)); Delete::<(u32,)>::delete((&mut u32s, &mut usizes, &mut u64s), entity1); let mut iter = (&usizes, &u64s).iter(); assert_eq!(iter.next(), Some((&0, &1))); assert_eq!(iter.next(), Some((&3, &4))); assert_eq!(iter.next(), Some((&6, &7))); assert_eq!(iter.next(), None); let iter = (&usizes, &u64s, &u32s).iter(); if let iterators::Iter3::Loose(mut iter) = iter { assert_eq!(iter.next(), Some((&6, &7, &8))); assert_eq!(iter.next(), Some((&3, &4, &5))); assert_eq!(iter.next(), None); } let component = Remove::<(usize,)>::remove((&mut usizes, &mut u32s, &mut u64s), entity2); assert_eq!(component, (Some(3),)); let mut iter = (&usizes, &u64s).iter(); assert_eq!(iter.next(), Some((&0, &1))); assert_eq!(iter.next(), Some((&6, &7))); assert_eq!(iter.next(), None); let mut iter = (&usizes, &u64s, &u32s).iter(); assert_eq!(iter.next(), Some((&6, &7, &8))); assert_eq!(iter.next(), None); } #[test] fn update() { let world = World::new(); let (mut entities, mut usizes) = world.borrow::<(EntitiesMut, &mut usize)>(); usizes.update_pack(); let entity1 = entities.add_entity(&mut usizes, 0); let entity2 = entities.add_entity(&mut usizes, 2); usizes.delete(entity1); assert_eq!( usizes.get(entity1), Err(error::MissingComponent { id: entity1, name: type_name::<usize>(), }) ); assert_eq!(usizes.get(entity2), Ok(&2)); assert_eq!(usizes.len(), 1); assert_eq!(usizes.inserted().len(), 1); assert_eq!(usizes.modified().len(), 0); assert_eq!(usizes.deleted().len(), 1); assert_eq!(usizes.take_deleted(), vec![(entity1, 0)]); } #[test] fn strip() { let world = World::new(); let (entity1, entity2) = world.run::<(EntitiesMut, &mut usize, &mut u32), _, _>( |(mut entities, mut usizes, mut u32s)| { ( entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32)), entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32)), ) }, ); world.run::<AllStorages, _, _>(|mut all_storages| { all_storages.strip(entity1); }); world.run::<(&mut usize, &mut u32), _, _>(|(mut usizes, mut u32s)| { assert_eq!( (&mut usizes).get(entity1), Err(error::MissingComponent { id: entity1, name: type_name::<usize>(), }) ); assert_eq!( (&mut u32s).get(entity1), Err(error::MissingComponent { id: entity1, name: type_name::<u32>(), }) ); assert_eq!(usizes.get(entity2), Ok(&2)); assert_eq!(u32s.get(entity2), Ok(&3)); }); world.run::<AllStorages, _, _>(|mut all_storages| { assert!(all_storages.delete(entity1)); }); }
use core::any::type_name; use shipyard::error; use shipyard::internal::iterators; use shipyard::prelude::*; #[test] fn no_pack() { let world = World::new(); let (mut entities, mut usizes, mut u32s) = world.borrow::<(EntitiesMut, &mut usize, &mut u32)>(); let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32)); let entity2 = entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32)); usizes.delete(entity1); assert_eq!( (&mut usizes).get(entity1), Err(error::MissingComponent { id: entity1, name: type_name::<usize>(), }) ); assert_eq!((&mut u32s).get(entity1), Ok(&mut 1)); assert_eq!(usizes.get(entity2), Ok(&2)); assert_eq!(u32s.get(entity2), Ok(&3)); } #[test] fn tight() { let world = World::new(); let (mut entities, mut usizes, mut u32s) = world.borrow::<(EntitiesMut, &mut usize, &mut u32)>(); (&mut usizes, &mut u32s).tight_pack(); let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32)); let entity2 = entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32)); Delete::<(usize,)>::delete((&mut usizes, &mut u32s), entity1);
#[test] fn loose() { let world = World::new(); let (mut entities, mut usizes, mut u32s) = world.borrow::<(EntitiesMut, &mut usize, &mut u32)>(); (&mut usizes, &mut u32s).loose_pack(); let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32)); let entity2 = entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32)); Delete::<(usize,)>::delete((&mut usizes, &mut u32s), entity1); assert_eq!( (&mut usizes).get(entity1), Err(error::MissingComponent { id: entity1, name: type_name::<usize>(), }) ); assert_eq!((&mut u32s).get(entity1), Ok(&mut 1)); assert_eq!(usizes.get(entity2), Ok(&2)); assert_eq!(u32s.get(entity2), Ok(&3)); let mut iter = (&usizes, &u32s).iter(); assert_eq!(iter.next(), Some((&2, &3))); assert_eq!(iter.next(), None); } #[test] fn tight_loose() { let world = World::new(); let (mut entities, mut usizes, mut u64s, mut u32s) = world.borrow::<(EntitiesMut, &mut usize, &mut u64, &mut u32)>(); (&mut usizes, &mut u64s).tight_pack(); LoosePack::<(u32,)>::loose_pack((&mut u32s, &mut usizes, &mut u64s)); let entity1 = entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (0, 1, 2)); let entity2 = entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (3, 4, 5)); entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (6, 7, 8)); Delete::<(u32,)>::delete((&mut u32s, &mut usizes, &mut u64s), entity1); let mut iter = (&usizes, &u64s).iter(); assert_eq!(iter.next(), Some((&0, &1))); assert_eq!(iter.next(), Some((&3, &4))); assert_eq!(iter.next(), Some((&6, &7))); assert_eq!(iter.next(), None); let iter = (&usizes, &u64s, &u32s).iter(); if let iterators::Iter3::Loose(mut iter) = iter { assert_eq!(iter.next(), Some((&6, &7, &8))); assert_eq!(iter.next(), Some((&3, &4, &5))); assert_eq!(iter.next(), None); } let component = Remove::<(usize,)>::remove((&mut usizes, &mut u32s, &mut u64s), entity2); assert_eq!(component, (Some(3),)); let mut iter = (&usizes, &u64s).iter(); assert_eq!(iter.next(), Some((&0, &1))); assert_eq!(iter.next(), Some((&6, &7))); assert_eq!(iter.next(), None); let mut iter = (&usizes, &u64s, &u32s).iter(); assert_eq!(iter.next(), Some((&6, &7, &8))); assert_eq!(iter.next(), None); } #[test] fn update() { let world = World::new(); let (mut entities, mut usizes) = world.borrow::<(EntitiesMut, &mut usize)>(); usizes.update_pack(); let entity1 = entities.add_entity(&mut usizes, 0); let entity2 = entities.add_entity(&mut usizes, 2); usizes.delete(entity1); assert_eq!( usizes.get(entity1), Err(error::MissingComponent { id: entity1, name: type_name::<usize>(), }) ); assert_eq!(usizes.get(entity2), Ok(&2)); assert_eq!(usizes.len(), 1); assert_eq!(usizes.inserted().len(), 1); assert_eq!(usizes.modified().len(), 0); assert_eq!(usizes.deleted().len(), 1); assert_eq!(usizes.take_deleted(), vec![(entity1, 0)]); } #[test] fn strip() { let world = World::new(); let (entity1, entity2) = world.run::<(EntitiesMut, &mut usize, &mut u32), _, _>( |(mut entities, mut usizes, mut u32s)| { ( entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32)), entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32)), ) }, ); world.run::<AllStorages, _, _>(|mut all_storages| { all_storages.strip(entity1); }); world.run::<(&mut usize, &mut u32), _, _>(|(mut usizes, mut u32s)| { assert_eq!( (&mut usizes).get(entity1), Err(error::MissingComponent { id: entity1, name: type_name::<usize>(), }) ); assert_eq!( (&mut u32s).get(entity1), Err(error::MissingComponent { id: entity1, name: type_name::<u32>(), }) ); assert_eq!(usizes.get(entity2), Ok(&2)); assert_eq!(u32s.get(entity2), Ok(&3)); }); world.run::<AllStorages, _, _>(|mut all_storages| { assert!(all_storages.delete(entity1)); }); }
assert_eq!( (&mut usizes).get(entity1), Err(error::MissingComponent { id: entity1, name: type_name::<usize>(), }) ); assert_eq!((&mut u32s).get(entity1), Ok(&mut 1)); assert_eq!(usizes.get(entity2), Ok(&2)); assert_eq!(u32s.get(entity2), Ok(&3)); let iter = (&usizes, &u32s).iter(); if let iterators::Iter2::Tight(mut iter) = iter { assert_eq!(iter.next(), Some((&2, &3))); assert_eq!(iter.next(), None); } else { panic!("not packed"); } }
function_block-function_prefix_line
[ { "content": "#[system(Test3)]\n\nfn run(_: &mut Entities, _: &mut u32) {}\n\n\n", "file_path": "tests/derive/good.rs", "rank": 0, "score": 274503.01059633156 }, { "content": "#[system(Test1)]\n\nfn run(_: &usize, _: &mut i32, _: &Entities, _: Unique<&u32>, _: Entities) {}\n\n\n", "file_path": "tests/derive/good.rs", "rank": 1, "score": 269259.5767997026 }, { "content": "#[system(Test2)]\n\nfn run(_: EntitiesMut, _: &mut u32) {}\n\n\n", "file_path": "tests/derive/good.rs", "rank": 2, "score": 268706.32310617797 }, { "content": "#[system(Lifetime)]\n\nfn run(_: &'a usize, _: &'b mut i32, _: &'c Entities, _: Unique<&'d u32>, _: Entities) {}\n\n\n", "file_path": "tests/derive/good.rs", "rank": 3, "score": 262344.3792166379 }, { "content": "#[system(Double)]\n\nfn run(_: &u32, _: &mut u32) {}\n\n\n", "file_path": "tests/derive/double_borrow.rs", "rank": 4, "score": 240048.23880777846 }, { "content": "#[test]\n\nfn tight() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32)>();\n\n\n\n (&mut usizes, &mut u32s).tight_pack();\n\n let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0, 1));\n\n assert_eq!((&usizes, &u32s).get(entity1).unwrap(), (&0, &1));\n\n}\n\n\n", "file_path": "tests/add_entity.rs", "rank": 5, "score": 233936.52323839796 }, { "content": "#[test]\n\nfn tight() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32)>();\n\n\n\n (&mut usizes, &mut u32s).tight_pack();\n\n let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32));\n\n let entity2 = entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32));\n\n drop((entities, usizes, u32s));\n\n\n\n let mut all_storages = world.borrow::<AllStorages>();\n\n assert!(all_storages.delete(entity1));\n\n assert!(!all_storages.delete(entity1));\n\n drop(all_storages);\n\n\n\n let (usizes, u32s) = world.borrow::<(&usize, &u32)>();\n\n\n\n assert_eq!(\n\n (&usizes).get(entity1),\n\n Err(error::MissingComponent {\n", "file_path": "tests/delete_entity.rs", "rank": 6, "score": 233936.52323839796 }, { "content": "#[system(DoubleUnique)]\n\nfn run(_: &u32, _: Unique<&mut u32>) {}\n\n\n", "file_path": "tests/derive/double_borrow.rs", "rank": 7, "score": 231922.51593431592 }, { "content": "#[system(UniqueDouble)]\n\nfn run(_: Unique<&u32>, _: &mut u32) {}\n\n\n", "file_path": "tests/derive/double_borrow.rs", "rank": 8, "score": 231922.51593431592 }, { "content": "#[system(UniqueUnique)]\n\nfn run(_: Unique<&u32>, _: Unique<&mut u32>) {}\n\n\n", "file_path": "tests/derive/double_borrow.rs", "rank": 9, "score": 224419.47849058878 }, { "content": "#[system(RefMut)]\n\nfn run(ref mut usizes: &mut usize) {\n\n let _result: Result<&mut usize, _> = usizes.get(EntityId::dead());\n\n}\n\n\n", "file_path": "tests/book/syntactic_peculiarities.rs", "rank": 10, "score": 219130.57665732253 }, { "content": "#[system(Test4)]\n\nfn run(_: &Entities, _: &mut Entities) {}\n\n\n", "file_path": "tests/derive/entities.rs", "rank": 11, "score": 214550.64815409566 }, { "content": "#[system(Test2)]\n\nfn run(_: Entities, _: &mut Entities) {}\n\n\n", "file_path": "tests/derive/entities.rs", "rank": 12, "score": 214550.64815409566 }, { "content": "#[system(DestroyEmpty)]\n\nfn run(_: &mut Entities, _: &mut Empty) {}\n\n\n", "file_path": "tests/book/systems.rs", "rank": 13, "score": 212414.73854082118 }, { "content": "#[system(Modules)]\n\nfn run(_: &mut mod1::Test, _: &mut mod2::Test) {}\n\n\n", "file_path": "tests/derive/good.rs", "rank": 14, "score": 210193.38364568737 }, { "content": "#[test]\n\nfn entity_id_serde() {\n\n let world = World::default();\n\n\n\n //create and check a couple entities\n\n let (entity_id0, _) = world.run::<(EntitiesMut, &mut u32), _, _>(|(mut entities, mut u32s)| {\n\n let entity_id0 = entities.add_entity(&mut u32s, 0);\n\n check_roundtrip(entity_id0, \"[0,0]\");\n\n\n\n let entity_id1 = entities.add_entity(&mut u32s, 1);\n\n check_roundtrip(entity_id1, \"[1,0]\");\n\n\n\n (entity_id0, entity_id1)\n\n });\n\n\n\n //delete the first entity\n\n world.run::<AllStorages, _, _>(|mut all_storages| {\n\n assert!(all_storages.delete(entity_id0));\n\n });\n\n\n\n //add 2 more\n\n world.run::<(EntitiesMut, &mut u32), _, _>(|(mut entities, mut u32s)| {\n\n let entity_id2 = entities.add_entity(&mut u32s, 2);\n\n //version was bumped\n\n check_roundtrip(entity_id2, \"[0,1]\");\n\n\n\n let entity_id3 = entities.add_entity(&mut u32s, 1);\n\n check_roundtrip(entity_id3, \"[2,0]\");\n\n });\n\n}\n\n\n", "file_path": "tests/serde/entity_id.rs", "rank": 15, "score": 204992.38240901136 }, { "content": "#[system(Ref)]\n\nfn run(ref usizes: &usize, ref u32s: &u32) {\n\n (usizes, u32s).get(EntityId::dead()).unwrap();\n\n (usizes, u32s).get(EntityId::dead()).unwrap();\n\n (usizes, u32s).get(EntityId::dead()).unwrap();\n\n (usizes, u32s).get(EntityId::dead()).unwrap();\n\n}\n", "file_path": "tests/book/syntactic_peculiarities.rs", "rank": 16, "score": 203751.33225555572 }, { "content": "#[test]\n\nfn test() {\n\n let _world = World::default();\n\n let _world = World::new();\n\n}\n", "file_path": "tests/book/world.rs", "rank": 17, "score": 199997.7171545101 }, { "content": "#[system(Test8)]\n\nfn run(_: AllStorages, _: &mut Entities) {}\n\n\n", "file_path": "tests/derive/all_storages.rs", "rank": 18, "score": 198280.77024753764 }, { "content": "#[test]\n\nfn not_all_tight() {\n\n let world = World::new();\n\n\n\n let (mut entities, mut u32s, mut u16s, mut f32s) =\n\n world.borrow::<(EntitiesMut, &mut u32, &mut u16, &mut f32)>();\n\n\n\n (&mut u32s, &mut u16s).tight_pack();\n\n\n\n entities.add_entity((&mut u32s, &mut u16s, &mut f32s), (0, 0, 0.));\n\n entities.add_entity((&mut f32s, &mut u16s, &mut u32s), (0., 0, 0));\n\n\n\n assert!((&mut u32s, &mut u16s).iter().count() > 0);\n\n}\n\n\n", "file_path": "tests/add_entity.rs", "rank": 19, "score": 197637.88827641975 }, { "content": "#[test]\n\nfn test() {\n\n let entity_id = EntityId::dead();\n\n let world = World::new();\n\n\n\n world.borrow::<AllStorages>().delete(entity_id);\n\n}\n", "file_path": "tests/book/delete_entities.rs", "rank": 20, "score": 196287.104980521 }, { "content": "#[test]\n\nfn tight() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32)>();\n\n\n\n (&mut usizes, &mut u32s).tight_pack();\n\n let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32));\n\n let entity2 = entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32));\n\n drop((entities, usizes, u32s));\n\n\n\n let mut all_storages = world.borrow::<AllStorages>();\n\n all_storages.clear();\n\n drop(all_storages);\n\n\n\n let (usizes, u32s) = world.borrow::<(&usize, &u32)>();\n\n\n\n assert_eq!(\n\n (&usizes).get(entity1),\n\n Err(error::MissingComponent {\n\n id: entity1,\n", "file_path": "tests/clear.rs", "rank": 21, "score": 194466.8428263943 }, { "content": "#[test]\n\nfn tight() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32)>();\n\n\n\n (&mut usizes, &mut u32s).tight_pack();\n\n let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32));\n\n let entity2 = entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32));\n\n let component = Remove::<(usize,)>::remove((&mut usizes, &mut u32s), entity1);\n\n assert_eq!(component, (Some(0usize),));\n\n assert_eq!(\n\n (&mut usizes).get(entity1),\n\n Err(error::MissingComponent {\n\n id: entity1,\n\n name: type_name::<usize>(),\n\n })\n\n );\n\n assert_eq!((&mut u32s).get(entity1), Ok(&mut 1));\n\n assert_eq!(usizes.get(entity2), Ok(&2));\n\n assert_eq!(u32s.get(entity2), Ok(&3));\n\n let iter = (&usizes, &u32s).iter();\n\n if let iterators::Iter2::Tight(mut iter) = iter {\n\n assert_eq!(iter.next(), Some((&2, &3)));\n\n assert_eq!(iter.next(), None);\n\n } else {\n\n panic!(\"not packed\");\n\n }\n\n}\n\n\n", "file_path": "tests/remove.rs", "rank": 22, "score": 194466.8428263943 }, { "content": "#[test]\n\nfn tight() {\n\n let world = World::new();\n\n\n\n let (mut entities, mut u32s, mut i16s) = world.borrow::<(EntitiesMut, &mut u32, &mut i16)>();\n\n (&mut u32s, &mut i16s).tight_pack();\n\n let entity0 = entities.add_entity((&mut u32s, &mut i16s), (0, 10));\n\n let entity1 = entities.add_entity(&mut u32s, 1);\n\n let entity2 = entities.add_entity((&mut u32s, &mut i16s), (2, 12));\n\n let entity3 = entities.add_entity(&mut i16s, 13);\n\n let entity4 = entities.add_entity((&mut u32s, &mut i16s), (4, 14));\n\n\n\n assert_eq!(u32s.get(entity0), Ok(&0));\n\n assert_eq!(u32s.get(entity1), Ok(&1));\n\n assert_eq!(u32s.get(entity2), Ok(&2));\n\n assert!(u32s.get(entity3).is_err());\n\n assert_eq!(u32s.get(entity4), Ok(&4));\n\n\n\n assert_eq!(i16s.get(entity0), Ok(&10));\n\n assert!(i16s.get(entity1).is_err());\n\n assert_eq!(i16s.get(entity2), Ok(&12));\n\n assert_eq!(i16s.get(entity3), Ok(&13));\n\n assert_eq!(i16s.get(entity4), Ok(&14));\n\n\n\n assert_eq!((&u32s, &i16s).get(entity0), Ok((&0, &10)));\n\n assert!((&u32s, &i16s).get(entity1).is_err());\n\n assert_eq!((&u32s, &i16s).get(entity2), Ok((&2, &12)));\n\n assert!((&u32s, &i16s).get(entity3).is_err());\n\n assert_eq!((&u32s, &i16s).get(entity4), Ok((&4, &14)));\n\n}\n\n\n", "file_path": "tests/get.rs", "rank": 23, "score": 194466.8428263943 }, { "content": "#[test]\n\nfn with_id() {\n\n let world = World::new();\n\n\n\n let (key0, key1, key2) =\n\n world.run::<(EntitiesMut, &mut u32), _, _>(|(mut entities, mut u32s)| {\n\n (\n\n entities.add_entity(&mut u32s, 0),\n\n entities.add_entity(&mut u32s, 1),\n\n entities.add_entity(&mut u32s, 2),\n\n )\n\n });\n\n\n\n let mut vec = Vec::new();\n\n world.run::<&u32, _, _>(|u32s| {\n\n u32s.iter().with_id().for_each(|(id, &x)| vec.push((id, x)));\n\n });\n\n world.run::<&mut u32, _, _>(|mut u32s| {\n\n (&mut u32s)\n\n .iter()\n\n .with_id()\n", "file_path": "tests/iteration/tight/single.rs", "rank": 24, "score": 192538.4694921466 }, { "content": "#[test]\n\nfn with_id() {\n\n let world = World::new();\n\n let (mut entities, mut u32s, mut i16s) = world.borrow::<(EntitiesMut, &mut u32, &mut i16)>();\n\n\n\n (&mut u32s, &mut i16s).tight_pack();\n\n let key0 = entities.add_entity((&mut u32s, &mut i16s), (0, 10));\n\n let key1 = entities.add_entity(&mut u32s, 1);\n\n let key2 = entities.add_entity((&mut u32s, &mut i16s), (2, 12));\n\n let key3 = entities.add_entity(&mut i16s, 13);\n\n let key4 = entities.add_entity((&mut u32s, &mut i16s), (4, 14));\n\n\n\n let mut iter = u32s.iter().with_id();\n\n assert_eq!(iter.next().unwrap(), (key0, &0));\n\n assert_eq!(iter.next().unwrap(), (key2, &2));\n\n assert_eq!(iter.next().unwrap(), (key4, &4));\n\n assert_eq!(iter.next().unwrap(), (key1, &1));\n\n assert!(iter.next().is_none());\n\n let mut iter = (&mut u32s).iter().with_id();\n\n assert_eq!(iter.next().unwrap(), (key0, &mut 0));\n\n assert_eq!(iter.next().unwrap(), (key2, &mut 2));\n", "file_path": "tests/iteration/tight/multiple.rs", "rank": 25, "score": 192538.4694921466 }, { "content": "#[test]\n\nfn tight_loose() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u64s, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u64, &mut u32)>();\n\n\n\n (&mut usizes, &mut u64s).tight_pack();\n\n LoosePack::<(u32,)>::loose_pack((&mut u32s, &mut usizes, &mut u64s));\n\n let entity1 = entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (0, 1, 2));\n\n assert_eq!((&usizes, &u64s, &u32s).get(entity1).unwrap(), (&0, &1, &2));\n\n}\n\n\n", "file_path": "tests/add_entity.rs", "rank": 26, "score": 192274.14430238924 }, { "content": "#[system(Test1)]\n\nfn run(_: Entities, _: EntitiesMut) {}\n\n\n", "file_path": "tests/derive/entities.rs", "rank": 27, "score": 191659.0162480241 }, { "content": "#[system(Test3)]\n\nfn run(_: &Entities, _: EntitiesMut) {}\n\n\n", "file_path": "tests/derive/entities.rs", "rank": 28, "score": 191659.0162480241 }, { "content": "fn check_roundtrip(entity_id: EntityId, expected: &str) {\n\n assert_eq!(expected, serde_json::to_string(&entity_id).unwrap());\n\n let new_entity_id: EntityId = serde_json::from_str(expected).unwrap();\n\n assert_eq!(entity_id, new_entity_id);\n\n}\n", "file_path": "tests/serde/entity_id.rs", "rank": 29, "score": 191006.70948658325 }, { "content": "#[test]\n\nfn no_pack() {\n\n let world = World::default();\n\n world.run::<EntitiesMut, _, _>(|mut entities| {\n\n entities.add_entity((), ());\n\n });\n\n world.run::<(EntitiesMut, &mut usize, &mut u32), _, _>(\n\n |(mut entities, mut usizes, mut u32s)| {\n\n let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0, 1));\n\n assert_eq!((&usizes, &u32s).get(entity1).unwrap(), (&0, &1));\n\n },\n\n );\n\n}\n\n\n", "file_path": "tests/add_entity.rs", "rank": 30, "score": 190279.4258413322 }, { "content": "#[test]\n\nfn no_pack() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32)>();\n\n\n\n let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32));\n\n let entity2 = entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32));\n\n drop((entities, usizes, u32s));\n\n\n\n let mut all_storages = world.borrow::<AllStorages>();\n\n assert!(all_storages.delete(entity1));\n\n assert!(!all_storages.delete(entity1));\n\n drop(all_storages);\n\n\n\n let (usizes, u32s) = world.borrow::<(&usize, &u32)>();\n\n assert_eq!(\n\n (&usizes).get(entity1),\n\n Err(error::MissingComponent {\n\n id: entity1,\n\n name: type_name::<usize>(),\n", "file_path": "tests/delete_entity.rs", "rank": 31, "score": 190279.4258413322 }, { "content": "#[test]\n\nfn tight() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32)>();\n\n\n\n (&mut usizes, &mut u32s).tight_pack();\n\n let entity1 = entities.add_entity((), ());\n\n entities.add_component((&mut usizes, &mut u32s), (0, 1), entity1);\n\n entities.add_component((&mut usizes, &mut u32s), (3usize,), entity1);\n\n assert_eq!((&usizes, &u32s).get(entity1).unwrap(), (&3, &1));\n\n let mut iter = (&usizes, &u32s).iter();\n\n assert_eq!(iter.next(), Some((&3, &1)));\n\n assert_eq!(iter.next(), None);\n\n}\n\n\n", "file_path": "tests/add_component.rs", "rank": 32, "score": 190205.41681257385 }, { "content": "#[test]\n\nfn tight() {\n\n let world = World::new();\n\n\n\n let (mut entities, mut usizes, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32)>();\n\n (&mut usizes, &mut u32s).tight_pack();\n\n\n\n let _entity0 = entities.add_entity(&mut usizes, 0);\n\n let _entity1 = entities.add_entity((&mut usizes, &mut u32s), (1, 11));\n\n}\n\n\n", "file_path": "tests/book/packs.rs", "rank": 33, "score": 190205.41681257382 }, { "content": "#[test]\n\nfn delete_tight_loose() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u64s, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u64, &mut u32)>();\n\n\n\n (&mut usizes, &mut u64s).tight_pack();\n\n LoosePack::<(u32,)>::loose_pack((&mut u32s, &mut usizes, &mut u64s));\n\n let entity1 = entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (0, 1, 2));\n\n let entity2 = entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (3, 4, 5));\n\n\n\n drop((entities, usizes, u64s, u32s));\n\n world.run::<AllStorages, _, _>(|mut all_storages| {\n\n assert!(all_storages.delete(entity1));\n\n assert!(!all_storages.delete(entity1));\n\n });\n\n\n\n world.run::<(&usize, &u64, &u32), _, _>(|(usizes, u64s, u32s)| {\n\n assert_eq!(\n\n (&usizes).get(entity1),\n\n Err(error::MissingComponent {\n", "file_path": "tests/delete_entity.rs", "rank": 35, "score": 187236.06588153064 }, { "content": "#[test]\n\nfn enumerate_map_filter_with_id() {\n\n let world = World::new();\n\n let (mut entities, mut u32s, mut i16s) = world.borrow::<(EntitiesMut, &mut u32, &mut i16)>();\n\n\n\n (&mut u32s, &mut i16s).tight_pack();\n\n let key0 = entities.add_entity((&mut u32s, &mut i16s), (0, 10));\n\n let key1 = entities.add_entity(&mut u32s, 1);\n\n let key2 = entities.add_entity((&mut u32s, &mut i16s), (2, 12));\n\n let key3 = entities.add_entity(&mut i16s, 13);\n\n let key4 = entities.add_entity((&mut u32s, &mut i16s), (4, 14));\n\n\n\n let mut iter = u32s\n\n .iter()\n\n .enumerate()\n\n .map(|(i, x)| (i * 3, x))\n\n .filter(|(_, x)| **x % 2 == 0)\n\n .with_id();\n\n\n\n assert_eq!(iter.next().unwrap(), (key0, (0, &0)));\n\n assert_eq!(iter.next().unwrap(), (key2, (3, &2)));\n", "file_path": "tests/iteration/tight/multiple.rs", "rank": 36, "score": 178266.68780070692 }, { "content": "#[test]\n\nfn enumerate_filter_map_with_id() {\n\n let world = World::new();\n\n let (mut entities, mut u32s, mut i16s) = world.borrow::<(EntitiesMut, &mut u32, &mut i16)>();\n\n\n\n (&mut u32s, &mut i16s).tight_pack();\n\n let key0 = entities.add_entity((&mut u32s, &mut i16s), (0, 10));\n\n let key1 = entities.add_entity(&mut u32s, 1);\n\n let key2 = entities.add_entity((&mut u32s, &mut i16s), (2, 12));\n\n let key3 = entities.add_entity(&mut i16s, 13);\n\n let key4 = entities.add_entity((&mut u32s, &mut i16s), (4, 14));\n\n\n\n let mut iter = u32s\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, x)| **x % 2 == 0)\n\n .map(|(i, x)| (i * 3, x))\n\n .with_id();\n\n\n\n assert_eq!(iter.next().unwrap(), (key0, (0, &0)));\n\n assert_eq!(iter.next().unwrap(), (key2, (3, &2)));\n", "file_path": "tests/iteration/tight/multiple.rs", "rank": 37, "score": 178266.68780070692 }, { "content": "#[test]\n\nfn enumerate_map_filter_with_id() {\n\n let world = World::new();\n\n\n\n let (key0, _, key2) = world.run::<(EntitiesMut, &mut u32), _, _>(|(mut entities, mut u32s)| {\n\n let result = (\n\n entities.add_entity(&mut u32s, 10),\n\n entities.add_entity(&mut u32s, 11),\n\n entities.add_entity(&mut u32s, 12),\n\n );\n\n result\n\n });\n\n\n\n let mut vec = Vec::new();\n\n world.run::<&mut u32, _, _>(|mut u32s| {\n\n (&mut u32s)\n\n .iter()\n\n .enumerate()\n\n .map(|(i, x)| (i * 3, x))\n\n .filter(|&(i, _)| i % 2 == 0)\n\n .with_id()\n\n .for_each(|(id, (i, &mut x))| vec.push((i, id, x)));\n\n });\n\n\n\n assert_eq!(vec, vec![(0, key0, 10), (6, key2, 12)]);\n\n}\n\n\n", "file_path": "tests/iteration/tight/single.rs", "rank": 38, "score": 178266.68780070692 }, { "content": "#[test]\n\nfn enumerate_filter_map_with_id() {\n\n let world = World::new();\n\n\n\n let (key0, _, key2) = world.run::<(EntitiesMut, &mut u32), _, _>(|(mut entities, mut u32s)| {\n\n let result = (\n\n entities.add_entity(&mut u32s, 10),\n\n entities.add_entity(&mut u32s, 11),\n\n entities.add_entity(&mut u32s, 12),\n\n );\n\n result\n\n });\n\n\n\n let mut vec = Vec::new();\n\n world.run::<&mut u32, _, _>(|mut u32s| {\n\n (&mut u32s)\n\n .iter()\n\n .enumerate()\n\n .filter(|&(i, _)| i % 2 == 0)\n\n .map(|(i, x)| (i * 3, x))\n\n .with_id()\n\n .for_each(|(id, (i, &mut x))| vec.push((i, id, x)));\n\n });\n\n\n\n assert_eq!(vec, vec![(0, key0, 10), (6, key2, 12)]);\n\n}\n\n\n", "file_path": "tests/iteration/tight/single.rs", "rank": 39, "score": 178266.68780070692 }, { "content": "#[system(Test6)]\n\nfn run(_: AllStorages, _: EntitiesMut) {}\n\n\n", "file_path": "tests/derive/all_storages.rs", "rank": 40, "score": 175389.13834146608 }, { "content": "#[system(Test4)]\n\nfn run( _: &mut i32, _: &mut AllStorages) {}\n\n\n", "file_path": "tests/derive/all_storages.rs", "rank": 41, "score": 173690.02685487052 }, { "content": "#[test]\n\nfn entity_id() {\n\n let mut entity_id = EntityId::new(0);\n\n assert_eq!(entity_id.index(), 0);\n\n assert_eq!(entity_id.version(), 0);\n\n entity_id.set_index(701);\n\n assert_eq!(entity_id.index(), 701);\n\n assert_eq!(entity_id.version(), 0);\n\n entity_id.bump_version().unwrap();\n\n entity_id.bump_version().unwrap();\n\n entity_id.bump_version().unwrap();\n\n assert_eq!(entity_id.index(), 701);\n\n assert_eq!(entity_id.version(), 3);\n\n entity_id.set_index(554);\n\n assert_eq!(entity_id.index(), 554);\n\n assert_eq!(entity_id.version(), 3);\n\n}\n", "file_path": "src/storage/entity/entity_id/mod.rs", "rank": 42, "score": 169702.444079198 }, { "content": "#[system(Test1)]\n\nfn run(_: AllStorages, _: &mut i32) {}\n\n\n", "file_path": "tests/derive/all_storages.rs", "rank": 43, "score": 158415.08963799232 }, { "content": "#[system(Test2)]\n\nfn run(_: &mut AllStorages, _: &i32) {}\n\n\n", "file_path": "tests/derive/all_storages.rs", "rank": 44, "score": 158415.08963799232 }, { "content": "#[system(Test3)]\n\nfn run( _: &mut i32, _: AllStorages) {}\n\n\n", "file_path": "tests/derive/all_storages.rs", "rank": 45, "score": 158415.08963799232 }, { "content": "#[allow(clippy::cognitive_complexity)]\n\nfn expand_system(name: syn::Ident, mut run: syn::ItemFn) -> Result<TokenStream> {\n\n if run.sig.ident != \"run\" {\n\n return Err(Error::new(\n\n Span::call_site(),\n\n \"Systems have only one method: run\",\n\n ));\n\n }\n\n if !run.sig.generics.params.is_empty() {\n\n return Err(Error::new_spanned(\n\n run.sig.generics,\n\n \"run should not take generic arguments\",\n\n ));\n\n }\n\n if run.sig.generics.where_clause.is_some() {\n\n return Err(Error::new_spanned(\n\n run.sig.generics.where_clause,\n\n \"run should not take a where clause\",\n\n ));\n\n }\n\n\n", "file_path": "shipyard_proc/src/lib.rs", "rank": 46, "score": 156579.92225095304 }, { "content": "#[test]\n\nfn test() {\n\n let world = World::new();\n\n world.run_system::<CreateEmpty>();\n\n world.add_workload::<(CreateEmpty, DestroyEmpty), _>(\"Empty Cycle\");\n\n world.run_workload(\"Empty Cycle\");\n\n world.run_default();\n\n world.run::<(EntitiesMut, &mut Empty), _, _>(|_| {});\n\n}\n", "file_path": "tests/book/systems.rs", "rank": 47, "score": 156255.89167559106 }, { "content": "#[system(Test)]\n\nfn test() {}\n\n\n", "file_path": "tests/derive/not_run.rs", "rank": 48, "score": 156255.81143790236 }, { "content": "#[system(Test)]\n\nfn run(_: Not<Entities>) {}\n\n\n", "file_path": "tests/derive/not_entities.rs", "rank": 49, "score": 154631.6392156721 }, { "content": "#[test]\n\nfn with_id() {\n\n let world = World::new();\n\n\n\n let positions = world.borrow::<&Position>();\n\n\n\n (&positions).iter().with_id().for_each(|(id, pos)| {\n\n println!(\"Entity {:?} is at {:?}\", id, pos);\n\n });\n\n}\n\n\n", "file_path": "tests/book/iterators.rs", "rank": 50, "score": 154217.3131943062 }, { "content": "#[test]\n\nfn with_id() {\n\n let world = World::new();\n\n let (mut entities, mut u32s, mut i16s) = world.borrow::<(EntitiesMut, &mut u32, &mut i16)>();\n\n\n\n (&mut u32s, &mut i16s).loose_pack();\n\n let key0 = entities.add_entity((&mut u32s, &mut i16s), (0, 10));\n\n let key1 = entities.add_entity(&mut u32s, 1);\n\n let key2 = entities.add_entity((&mut u32s, &mut i16s), (2, 12));\n\n let key3 = entities.add_entity(&mut i16s, 13);\n\n let key4 = entities.add_entity((&mut u32s, &mut i16s), (4, 14));\n\n\n\n let mut iter = u32s.iter().with_id();\n\n assert_eq!(iter.next().unwrap(), (key0, &0));\n\n assert_eq!(iter.next().unwrap(), (key2, &2));\n\n assert_eq!(iter.next().unwrap(), (key4, &4));\n\n assert_eq!(iter.next().unwrap(), (key1, &1));\n\n assert!(iter.next().is_none());\n\n let mut iter = (&mut u32s).iter().with_id();\n\n assert_eq!(iter.next().unwrap(), (key0, &mut 0));\n\n assert_eq!(iter.next().unwrap(), (key2, &mut 2));\n", "file_path": "tests/iteration/loose.rs", "rank": 51, "score": 154217.3131943062 }, { "content": "#[test]\n\nfn not_all_loose() {\n\n let world = World::new();\n\n\n\n let (mut entities, mut u32s, mut u16s, mut f32s) =\n\n world.borrow::<(EntitiesMut, &mut u32, &mut u16, &mut f32)>();\n\n\n\n (&mut u32s, &mut u16s).loose_pack();\n\n\n\n entities.add_entity((&mut u32s, &mut u16s, &mut f32s), (0, 0, 0.));\n\n entities.add_entity((&mut f32s, &mut u16s, &mut u32s), (0., 0, 0));\n\n\n\n assert!((&mut u32s, &mut u16s).iter().count() > 0);\n\n}\n", "file_path": "tests/add_entity.rs", "rank": 52, "score": 153944.4449051815 }, { "content": "#[test]\n\nfn update() {\n\n let world = World::new();\n\n let (mut entities, mut usizes) = world.borrow::<(EntitiesMut, &mut usize)>();\n\n usizes.update_pack();\n\n let entity = entities.add_entity(&mut usizes, 0);\n\n assert_eq!(usizes.inserted().len(), 1);\n\n assert_eq!(usizes[entity], 0);\n\n}\n\n\n", "file_path": "tests/add_entity.rs", "rank": 53, "score": 153944.44490518147 }, { "content": "#[test]\n\nfn update() {\n\n let world = World::new();\n\n let (mut entities, mut usizes) = world.borrow::<(EntitiesMut, &mut usize)>();\n\n\n\n usizes.update_pack();\n\n let entity1 = entities.add_entity(&mut usizes, 0);\n\n let entity2 = entities.add_entity(&mut usizes, 2);\n\n drop((entities, usizes));\n\n\n\n let mut all_storages = world.borrow::<AllStorages>();\n\n assert!(all_storages.delete(entity1));\n\n assert!(!all_storages.delete(entity1));\n\n drop(all_storages);\n\n\n\n let mut usizes = world.borrow::<&mut usize>();\n\n assert_eq!(\n\n (&usizes).get(entity1),\n\n Err(error::MissingComponent {\n\n id: entity1,\n\n name: type_name::<usize>(),\n\n })\n\n );\n\n assert_eq!(usizes.get(entity2), Ok(&2));\n\n assert_eq!(usizes.deleted().len(), 1);\n\n assert_eq!(usizes.take_deleted(), vec![(entity1, 0)]);\n\n}\n", "file_path": "tests/delete_entity.rs", "rank": 54, "score": 153944.44490518147 }, { "content": "#[test]\n\nfn loose() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32)>();\n\n\n\n (&mut usizes, &mut u32s).loose_pack();\n\n let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0, 1));\n\n assert_eq!((&usizes, &u32s).get(entity1).unwrap(), (&0, &1));\n\n}\n\n\n", "file_path": "tests/add_entity.rs", "rank": 55, "score": 153944.4449051815 }, { "content": "fn main() {}\n", "file_path": "tests/derive/entities.rs", "rank": 56, "score": 153939.27319211754 }, { "content": "fn main() {}\n", "file_path": "tests/derive/not_entities.rs", "rank": 57, "score": 153939.27319211754 }, { "content": "#[test]\n\nfn tight_sort() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32)>();\n\n\n\n (&mut usizes, &mut u32s).tight_pack();\n\n entities.add_entity((&mut usizes, &mut u32s), (10usize, 3u32));\n\n entities.add_entity((&mut usizes, &mut u32s), (5usize, 9u32));\n\n entities.add_entity((&mut usizes, &mut u32s), (1usize, 5u32));\n\n entities.add_entity((&mut usizes, &mut u32s), (3usize, 54u32));\n\n\n\n (&mut usizes, &mut u32s)\n\n .sort()\n\n .unstable(|(&x1, &y1), (&x2, &y2)| (x1 + y1 as usize).cmp(&(x2 + y2 as usize)));\n\n\n\n let mut prev = 0;\n\n (&mut usizes, &mut u32s)\n\n .iter()\n\n .for_each(|(&mut x, &mut y)| {\n\n assert!(prev <= x + y as usize);\n\n prev = x + y as usize;\n\n });\n\n}\n\n\n", "file_path": "tests/sort.rs", "rank": 58, "score": 153906.7818505956 }, { "content": "#[test]\n\nfn tight_loose() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u64s, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u64, &mut u32)>();\n\n\n\n (&mut usizes, &mut u64s).tight_pack();\n\n LoosePack::<(u32,)>::loose_pack((&mut u32s, &mut usizes, &mut u64s));\n\n let entity1 = entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (0, 1, 2));\n\n let entity2 = entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (3, 4, 5));\n\n entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (6, 7, 8));\n\n let component = Remove::<(u32,)>::remove((&mut u32s, &mut usizes, &mut u64s), entity1);\n\n assert_eq!(component, (Some(2),));\n\n let mut iter = (&usizes, &u64s).iter();\n\n assert_eq!(iter.next(), Some((&0, &1)));\n\n assert_eq!(iter.next(), Some((&3, &4)));\n\n assert_eq!(iter.next(), Some((&6, &7)));\n\n assert_eq!(iter.next(), None);\n\n let iter = (&usizes, &u64s, &u32s).iter();\n\n if let iterators::Iter3::Loose(mut iter) = iter {\n\n assert_eq!(iter.next(), Some((&6, &7, &8)));\n", "file_path": "tests/remove.rs", "rank": 59, "score": 153906.78185059558 }, { "content": "#[test]\n\nfn with_id() {\n\n let world = World::new();\n\n\n\n let (key0, key1, key2, key3, key4) = world.run::<(EntitiesMut, &mut u32, &mut i16), _, _>(\n\n |(mut entities, mut u32s, mut i16s)| {\n\n (\n\n entities.add_entity((&mut u32s, &mut i16s), (0, 10)),\n\n entities.add_entity(&mut u32s, 1),\n\n entities.add_entity((&mut u32s, &mut i16s), (2, 12)),\n\n entities.add_entity(&mut i16s, 13),\n\n entities.add_entity((&mut u32s, &mut i16s), (4, 14)),\n\n )\n\n },\n\n );\n\n\n\n world.run::<&u32, _, _>(|u32s| {\n\n let mut iter = u32s.iter().with_id();\n\n assert_eq!(iter.next().unwrap(), (key0, &0));\n\n assert_eq!(iter.next().unwrap(), (key1, &1));\n\n assert_eq!(iter.next().unwrap(), (key2, &2));\n", "file_path": "tests/iteration/non_packed.rs", "rank": 60, "score": 150213.002698981 }, { "content": "#[test]\n\nfn with_id() {\n\n let world = World::new();\n\n let (mut entities, mut u32s, mut i16s) = world.borrow::<(EntitiesMut, &mut u32, &mut i16)>();\n\n\n\n u32s.update_pack();\n\n i16s.update_pack();\n\n let key0 = entities.add_entity((&mut u32s, &mut i16s), (0, 10));\n\n let key1 = entities.add_entity(&mut u32s, 1);\n\n let key2 = entities.add_entity((&mut u32s, &mut i16s), (2, 12));\n\n let key3 = entities.add_entity(&mut i16s, 13);\n\n let key4 = entities.add_entity((&mut u32s, &mut i16s), (4, 14));\n\n u32s.clear_inserted();\n\n i16s.clear_inserted();\n\n\n\n drop((u32s, i16s));\n\n world.run::<&u32, _, _>(|u32s| {\n\n let mut iter = (&u32s).iter().with_id();\n\n assert_eq!(iter.next().unwrap(), (key0, &0));\n\n assert_eq!(iter.next().unwrap(), (key1, &1));\n\n assert_eq!(iter.next().unwrap(), (key2, &2));\n", "file_path": "tests/iteration/update/multiple.rs", "rank": 61, "score": 150213.00269898103 }, { "content": "#[test]\n\nfn with_id() {\n\n let world = World::new();\n\n let (mut entities, mut u32s) = world.borrow::<(EntitiesMut, &mut u32)>();\n\n\n\n u32s.update_pack();\n\n let key0 = entities.add_entity(&mut u32s, 0);\n\n let key1 = entities.add_entity(&mut u32s, 1);\n\n let key2 = entities.add_entity(&mut u32s, 2);\n\n u32s.clear_inserted();\n\n\n\n drop(u32s);\n\n let mut vec = Vec::new();\n\n world.run::<&u32, _, _>(|u32s| {\n\n u32s.iter().with_id().for_each(|(id, &x)| vec.push((id, x)));\n\n });\n\n world.run::<&mut u32, _, _>(|mut u32s| {\n\n (&mut u32s)\n\n .iter()\n\n .with_id()\n\n .for_each(|(id, &mut x)| vec.push((id, x)));\n", "file_path": "tests/iteration/update/single.rs", "rank": 62, "score": 150213.002698981 }, { "content": "#[test]\n\nfn test2() {\n\n let world = World::new();\n\n let entity0;\n\n let entity1;\n\n {\n\n let (mut entities, mut u32s, mut f32s) =\n\n world.borrow::<(EntitiesMut, &mut u32, &mut f32)>();\n\n entity0 = entities.add_entity(&mut u32s, 10);\n\n entity1 = entities.add_entity(&mut f32s, 20.0);\n\n let _entity2 = entities.add_entity(&mut u32s, 30);\n\n }\n\n let mut all_storages = world.borrow::<AllStorages>();\n\n all_storages.delete(entity0);\n\n all_storages.delete(entity1);\n\n}\n", "file_path": "tests/book/world_insides.rs", "rank": 63, "score": 149959.0609647451 }, { "content": "#[test]\n\nfn test1() {\n\n let world = World::new();\n\n let (mut entities, mut u32s, mut f32s) = world.borrow::<(EntitiesMut, &mut u32, &mut f32)>();\n\n let _entity0 = entities.add_entity(&mut u32s, 10);\n\n let _entity1 = entities.add_entity(&mut f32s, 20.0);\n\n let _entity2 = entities.add_entity(&mut u32s, 30);\n\n}\n\n\n", "file_path": "tests/book/world_insides.rs", "rank": 64, "score": 149959.0609647451 }, { "content": "#[test]\n\nfn single() {\n\n let world = World::new();\n\n\n\n let (mut entities, mut empties) = world.borrow::<(EntitiesMut, &mut Empty)>();\n\n\n\n let _entity = entities.add_entity(&mut empties, Empty);\n\n}\n\n\n", "file_path": "tests/book/add_entities.rs", "rank": 65, "score": 149948.67750922366 }, { "content": "#[test]\n\nfn none() {\n\n let world = World::new();\n\n\n\n let _entity = world.borrow::<EntitiesMut>().add_entity((), ());\n\n}\n", "file_path": "tests/book/add_entities.rs", "rank": 66, "score": 149948.67750922366 }, { "content": "#[test]\n\nfn modified_update() {\n\n let world = World::new();\n\n let (mut entities, mut usizes) = world.borrow::<(EntitiesMut, &mut usize)>();\n\n usizes.update_pack();\n\n let entity1 = entities.add_entity(&mut usizes, 1);\n\n usizes.clear_inserted_and_modified();\n\n usizes[entity1] = 3;\n\n let entity2 = entities.add_entity(&mut usizes, 2);\n\n assert_eq!(usizes.inserted().len(), 1);\n\n assert_eq!(*usizes.get(entity1).unwrap(), 3);\n\n assert_eq!(*usizes.get(entity2).unwrap(), 2);\n\n}\n\n\n", "file_path": "tests/add_entity.rs", "rank": 67, "score": 149948.67750922366 }, { "content": "#[test]\n\nfn delete_loose() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32)>();\n\n\n\n (&mut usizes, &mut u32s).loose_pack();\n\n let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32));\n\n let entity2 = entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32));\n\n\n\n drop((entities, usizes, u32s));\n\n world.run::<AllStorages, _, _>(|mut all_storages| {\n\n assert!(all_storages.delete(entity1));\n\n assert!(!all_storages.delete(entity1));\n\n });\n\n\n\n world.run::<(&usize, &u32), _, _>(|(usizes, u32s)| {\n\n assert_eq!(\n\n (&usizes).get(entity1),\n\n Err(error::MissingComponent {\n\n id: entity1,\n", "file_path": "tests/delete_entity.rs", "rank": 68, "score": 149948.67750922366 }, { "content": "#[test]\n\nfn multiple() {\n\n let world = World::new();\n\n\n\n let (mut entities, mut empties, mut counts) =\n\n world.borrow::<(EntitiesMut, &mut Empty, &mut Count)>();\n\n\n\n let _entity = entities.add_entity((&mut empties, &mut counts), (Empty, Count(0)));\n\n}\n\n\n", "file_path": "tests/book/add_entities.rs", "rank": 69, "score": 149948.67750922366 }, { "content": "#[test]\n\nfn cleared_update() {\n\n let world = World::new();\n\n let (mut entities, mut usizes) = world.borrow::<(EntitiesMut, &mut usize)>();\n\n usizes.update_pack();\n\n let entity1 = entities.add_entity(&mut usizes, 1);\n\n usizes.clear_inserted_and_modified();\n\n let entity2 = entities.add_entity(&mut usizes, 2);\n\n assert_eq!(usizes.inserted().len(), 1);\n\n assert_eq!(*usizes.get(entity1).unwrap(), 1);\n\n assert_eq!(*usizes.get(entity2).unwrap(), 2);\n\n}\n\n\n", "file_path": "tests/add_entity.rs", "rank": 70, "score": 149948.67750922366 }, { "content": "fn main() {}\n", "file_path": "tests/derive/unique_entities.rs", "rank": 71, "score": 149943.50579615973 }, { "content": "#[test]\n\nfn tight_loose_sort() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u64s, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u64, &mut u32)>();\n\n\n\n (&mut usizes, &mut u64s).tight_pack();\n\n LoosePack::<(u32,)>::loose_pack((&mut u32s, &mut usizes, &mut u64s));\n\n\n\n entities.add_entity((&mut usizes, &mut u64s), (3, 4));\n\n entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (6, 7, 8));\n\n entities.add_entity((&mut usizes,), (5,));\n\n entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (0, 1, 2));\n\n\n\n (&mut usizes, &mut u64s)\n\n .sort()\n\n .unstable(|(&x1, &y1), (&x2, &y2)| (x1 + y1 as usize).cmp(&(x2 + y2 as usize)));\n\n\n\n if let iterators::Iter3::Loose(mut iter) = (&usizes, &u64s, &u32s).iter() {\n\n assert_eq!(iter.next(), Some((&6, &7, &8)));\n\n assert_eq!(iter.next(), Some((&0, &1, &2)));\n", "file_path": "tests/sort.rs", "rank": 72, "score": 149912.19362873246 }, { "content": "#[test]\n\nfn basic() {\n\n let world = World::new();\n\n let (mut entities, mut u32s, mut i16s) = world.borrow::<(EntitiesMut, &mut u32, &mut i16)>();\n\n\n\n (&mut u32s, &mut i16s).tight_pack();\n\n entities.add_entity((&mut u32s, &mut i16s), (0, 10));\n\n entities.add_entity(&mut u32s, 1);\n\n entities.add_entity((&mut u32s, &mut i16s), (2, 12));\n\n entities.add_entity(&mut i16s, 13);\n\n entities.add_entity((&mut u32s, &mut i16s), (4, 14));\n\n\n\n let mut iter = u32s.iter();\n\n assert_eq!(iter.size_hint(), (4, Some(4)));\n\n assert_eq!(iter.next().unwrap(), &0);\n\n assert_eq!(iter.next().unwrap(), &2);\n\n assert_eq!(iter.next().unwrap(), &4);\n\n assert_eq!(iter.next().unwrap(), &1);\n\n assert!(iter.next().is_none());\n\n let mut iter = u32s.iter();\n\n assert_eq!(iter.next().unwrap(), &mut 0);\n", "file_path": "tests/iteration/tight/multiple.rs", "rank": 74, "score": 149912.19362873246 }, { "content": "#[test]\n\nfn map() {\n\n let world = World::new();\n\n\n\n world.run::<(EntitiesMut, &mut u32), _, _>(|(mut entities, mut u32s)| {\n\n entities.add_entity(&mut u32s, 0);\n\n entities.add_entity(&mut u32s, 1);\n\n entities.add_entity(&mut u32s, 2);\n\n });\n\n\n\n let mut vec = Vec::new();\n\n world.run::<&u32, _, _>(|u32s| {\n\n u32s.iter().map(|x| *x + 10).for_each(|x| vec.push(x));\n\n });\n\n world.run::<&mut u32, _, _>(|mut u32s| {\n\n (&mut u32s).iter().map(|x| *x + 1).for_each(|x| vec.push(x));\n\n });\n\n\n\n assert_eq!(vec, vec![10, 11, 12, 1, 2, 3]);\n\n}\n\n\n", "file_path": "tests/iteration/tight/single.rs", "rank": 75, "score": 149912.19362873246 }, { "content": "#[test]\n\nfn off_by_one() {\n\n let world = World::new();\n\n\n\n let (mut entities, mut u32s) = world.borrow::<(EntitiesMut, &mut u32)>();\n\n\n\n entities.add_entity(&mut u32s, 0);\n\n entities.add_entity(&mut u32s, 1);\n\n entities.add_entity(&mut u32s, 2);\n\n\n\n let window = u32s.as_window(1..);\n\n let iter = (&window).iter();\n\n assert_eq!(iter.size_hint(), (2, Some(2)));\n\n assert_eq!(iter.collect::<Vec<_>>(), vec![&1, &2]);\n\n\n\n let window = window.as_window(1..);\n\n let iter = window.iter();\n\n assert_eq!(iter.size_hint(), (1, Some(1)));\n\n assert_eq!(iter.collect::<Vec<_>>(), vec![&2]);\n\n}\n", "file_path": "tests/iteration/tight/single.rs", "rank": 76, "score": 149912.19362873246 }, { "content": "#[test]\n\nfn map() {\n\n let world = World::new();\n\n let (mut entities, mut u32s, mut i16s) = world.borrow::<(EntitiesMut, &mut u32, &mut i16)>();\n\n\n\n (&mut u32s, &mut i16s).tight_pack();\n\n entities.add_entity((&mut u32s, &mut i16s), (0, 10));\n\n entities.add_entity(&mut u32s, 1);\n\n entities.add_entity((&mut u32s, &mut i16s), (2, 12));\n\n entities.add_entity(&mut i16s, 13);\n\n entities.add_entity((&mut u32s, &mut i16s), (4, 14));\n\n\n\n let mut iter = u32s.iter().map(Clone::clone);\n\n assert_eq!(iter.next().unwrap(), 0);\n\n assert_eq!(iter.next().unwrap(), 2);\n\n assert_eq!(iter.next().unwrap(), 4);\n\n assert_eq!(iter.next().unwrap(), 1);\n\n assert!(iter.next().is_none());\n\n let mut iter = u32s.iter().map(|x| *x);\n\n assert_eq!(iter.next().unwrap(), 0);\n\n assert_eq!(iter.next().unwrap(), 2);\n", "file_path": "tests/iteration/tight/multiple.rs", "rank": 77, "score": 149912.19362873246 }, { "content": "#[test]\n\nfn delete_tight_loose() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u64s, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u64, &mut u32)>();\n\n\n\n (&mut usizes, &mut u64s).tight_pack();\n\n LoosePack::<(u32,)>::loose_pack((&mut u32s, &mut usizes, &mut u64s));\n\n let entity1 = entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (0, 1, 2));\n\n let entity2 = entities.add_entity((&mut usizes, &mut u64s, &mut u32s), (3, 4, 5));\n\n\n\n drop((entities, usizes, u64s, u32s));\n\n world.run::<AllStorages, _, _>(|mut all_storages| {\n\n all_storages.clear();\n\n });\n\n\n\n world.run::<(&usize, &u64, &u32), _, _>(|(usizes, u64s, u32s)| {\n\n assert_eq!(\n\n (&usizes).get(entity1),\n\n Err(error::MissingComponent {\n\n id: entity1,\n", "file_path": "tests/clear.rs", "rank": 78, "score": 149912.1936287325 }, { "content": "#[test]\n\nfn basic() {\n\n let world = World::new();\n\n\n\n world.run::<(EntitiesMut, &mut u32), _, _>(|(mut entities, mut u32s)| {\n\n entities.add_entity(&mut u32s, 0);\n\n entities.add_entity(&mut u32s, 1);\n\n entities.add_entity(&mut u32s, 2);\n\n });\n\n\n\n let mut vec = Vec::new();\n\n world.run::<&u32, _, _>(|u32s| {\n\n let iter = u32s.iter();\n\n assert_eq!(iter.size_hint(), (3, Some(3)));\n\n iter.for_each(|&x| vec.push(x));\n\n });\n\n world.run::<&mut u32, _, _>(|mut u32s| {\n\n (&mut u32s).iter().for_each(|&mut x| vec.push(x));\n\n });\n\n\n\n assert_eq!(vec, vec![0, 1, 2, 0, 1, 2]);\n\n}\n\n\n", "file_path": "tests/iteration/tight/single.rs", "rank": 79, "score": 149912.19362873246 }, { "content": "#[test]\n\nfn filter() {\n\n let world = World::new();\n\n\n\n world.run::<(EntitiesMut, &mut u32), _, _>(|(mut entities, mut u32s)| {\n\n entities.add_entity(&mut u32s, 0);\n\n entities.add_entity(&mut u32s, 1);\n\n entities.add_entity(&mut u32s, 2);\n\n });\n\n\n\n let mut vec = Vec::new();\n\n world.run::<&u32, _, _>(|u32s| {\n\n let iter = u32s.iter();\n\n assert_eq!(iter.size_hint(), (3, Some(3)));\n\n iter.filter(|&&x| x % 2 == 0).for_each(|&x| vec.push(x));\n\n });\n\n world.run::<&mut u32, _, _>(|mut u32s| {\n\n (&mut u32s)\n\n .iter()\n\n .filter(|&&mut x| x % 2 != 0)\n\n .for_each(|&mut x| vec.push(x));\n\n });\n\n\n\n assert_eq!(vec, vec![0, 2, 1]);\n\n}\n\n\n", "file_path": "tests/iteration/tight/single.rs", "rank": 80, "score": 149912.19362873246 }, { "content": "#[test]\n\nfn filter() {\n\n let world = World::new();\n\n let (mut entities, mut u32s, mut i16s) = world.borrow::<(EntitiesMut, &mut u32, &mut i16)>();\n\n\n\n (&mut u32s, &mut i16s).tight_pack();\n\n entities.add_entity((&mut u32s, &mut i16s), (0, 10));\n\n entities.add_entity(&mut u32s, 1);\n\n entities.add_entity((&mut u32s, &mut i16s), (2, 12));\n\n entities.add_entity(&mut i16s, 13);\n\n entities.add_entity((&mut u32s, &mut i16s), (4, 14));\n\n\n\n let mut iter = u32s.iter().filter(|x| **x % 2 == 0);\n\n assert_eq!(iter.size_hint(), (0, Some(4)));\n\n assert_eq!(iter.next().unwrap(), &0);\n\n assert_eq!(iter.next().unwrap(), &2);\n\n assert_eq!(iter.next().unwrap(), &4);\n\n assert!(iter.next().is_none());\n\n let mut iter = u32s.iter().filter(|x| **x % 2 != 0);\n\n assert_eq!(iter.next().unwrap(), &mut 1);\n\n assert!(iter.next().is_none());\n", "file_path": "tests/iteration/tight/multiple.rs", "rank": 81, "score": 149912.19362873246 }, { "content": "#[test]\n\nfn off_by_one() {\n\n let world = World::new();\n\n\n\n let (mut entities, mut u32s, mut i16s) = world.borrow::<(EntitiesMut, &mut u32, &mut i16)>();\n\n (&mut u32s, &mut i16s).tight_pack();\n\n\n\n entities.add_entity((&mut u32s, &mut i16s), (0, 10));\n\n entities.add_entity(&mut u32s, 1);\n\n entities.add_entity((&mut u32s, &mut i16s), (2, 12));\n\n entities.add_entity(&mut i16s, 13);\n\n entities.add_entity((&mut u32s, &mut i16s), (4, 14));\n\n\n\n let u32_window = u32s.as_window(1..);\n\n let iter = (&u32_window, &i16s).iter();\n\n assert_eq!(iter.size_hint(), (0, Some(3)));\n\n assert_eq!(iter.collect::<Vec<_>>(), vec![(&2, &12), (&4, &14)]);\n\n\n\n let u32_window = u32_window.as_window(1..);\n\n let iter = (&u32_window, &i16s).iter();\n\n assert_eq!(iter.size_hint(), (0, Some(2)));\n", "file_path": "tests/iteration/tight/multiple.rs", "rank": 82, "score": 149912.19362873246 }, { "content": "#[cfg(feature = \"parallel\")]\n\n#[cfg_attr(miri, ignore)]\n\n#[test]\n\nfn tight_parallel_iterator() {\n\n use iterators::ParIter2;\n\n use rayon::prelude::*;\n\n\n\n let world = World::new();\n\n\n\n world.run::<(EntitiesMut, &mut usize, &mut u32), _, _>(\n\n |(mut entities, mut usizes, mut u32s)| {\n\n (&mut usizes, &mut u32s).tight_pack();\n\n entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32));\n\n entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32));\n\n },\n\n );\n\n\n\n world.run::<(&mut usize, &u32, ThreadPool), _, _>(|(mut usizes, u32s, thread_pool)| {\n\n let counter = std::sync::atomic::AtomicUsize::new(0);\n\n thread_pool.install(|| {\n\n if let ParIter2::Tight(iter) = (&mut usizes, &u32s).par_iter() {\n\n iter.for_each(|(x, y)| {\n\n counter.fetch_add(1, std::sync::atomic::Ordering::SeqCst);\n", "file_path": "tests/lib.rs", "rank": 83, "score": 149911.67979667935 }, { "content": "#[test]\n\nfn no_pack() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32)>();\n\n\n\n let entity1 = entities.add_entity((&mut usizes, &mut u32s), (0usize, 1u32));\n\n let entity2 = entities.add_entity((&mut usizes, &mut u32s), (2usize, 3u32));\n\n let component = usizes.remove(entity1);\n\n assert_eq!(component, Some(0usize));\n\n assert_eq!(\n\n (&mut usizes).get(entity1),\n\n Err(error::MissingComponent {\n\n id: entity1,\n\n name: type_name::<usize>(),\n\n })\n\n );\n\n assert_eq!((&mut u32s).get(entity1), Ok(&mut 1));\n\n assert_eq!(usizes.get(entity2), Ok(&2));\n\n assert_eq!(u32s.get(entity2), Ok(&3));\n\n}\n\n\n", "file_path": "tests/remove.rs", "rank": 84, "score": 149350.3882559934 }, { "content": "#[test]\n\nfn no_pack() {\n\n let world = World::new();\n\n\n\n let (mut entities, mut u32s) = world.borrow::<(EntitiesMut, &mut u32)>();\n\n\n\n entities.add_entity(&mut u32s, 0);\n\n entities.add_entity(&mut u32s, 1);\n\n entities.add_entity(&mut u32s, 2);\n\n\n\n drop((entities, u32s));\n\n world.borrow::<AllStorages>().clear();\n\n\n\n let (mut entities, mut u32s) = world.borrow::<(EntitiesMut, &mut u32)>();\n\n\n\n assert_eq!(u32s.len(), 0);\n\n let entity0 = entities.add_entity(&mut u32s, 3);\n\n let entity1 = entities.add_entity(&mut u32s, 4);\n\n let entity2 = entities.add_entity(&mut u32s, 5);\n\n let entity3 = entities.add_entity(&mut u32s, 5);\n\n\n", "file_path": "tests/clear.rs", "rank": 85, "score": 149350.3882559934 }, { "content": "#[system(Test)]\n\nfn run(_: &usize) where usize: Debug {}\n\n\n", "file_path": "tests/derive/where.rs", "rank": 86, "score": 148996.96434036834 }, { "content": "#[system(Test)]\n\nfn run(_: Unique<Entities>) {}\n\n\n", "file_path": "tests/derive/unique_entities.rs", "rank": 87, "score": 147850.05267874527 }, { "content": "#[test]\n\nfn no_pack() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32)>();\n\n\n\n let entity1 = entities.add_entity((), ());\n\n entities.add_component((&mut usizes, &mut u32s), (0, 1), entity1);\n\n entities.add_component((&mut u32s, &mut usizes), (3, 2), entity1);\n\n assert_eq!((&usizes, &u32s).get(entity1).unwrap(), (&2, &3));\n\n}\n\n\n", "file_path": "tests/add_component.rs", "rank": 88, "score": 146548.31941550807 }, { "content": "#[system(MutNonSendSys)]\n\nfn run(_: NonSend<&mut NonSendStruct>) {}\n\n\n", "file_path": "tests/derive/good_non_send.rs", "rank": 90, "score": 146366.60797278496 }, { "content": "#[system(MutNonSyncSys)]\n\nfn run(_: NonSync<&mut NonSyncStruct>) {}\n\n\n", "file_path": "tests/derive/good_non_sync.rs", "rank": 91, "score": 146366.60797278496 }, { "content": "pub fn register_workloads(world: &World) {\n\n world.add_workload::<(Start, HandleController, Update, Commit, Render, End), _>(TICK);\n\n}\n\n\n", "file_path": "demo/src/systems.rs", "rank": 92, "score": 146253.99806800592 }, { "content": "#[test]\n\nfn tight_sort_too_many_storages() {\n\n let world = World::new();\n\n let (mut usizes, mut u64s, mut u32s) = world.borrow::<(&mut usize, &mut u64, &mut u32)>();\n\n\n\n (&mut usizes, &mut u64s).tight_pack();\n\n assert_eq!(\n\n (&mut usizes, &mut u64s, &mut u32s)\n\n .sort()\n\n .try_unstable(|(&x1, &y1, &z1), (&x2, &y2, &z2)| {\n\n (x1 + y1 as usize + z1 as usize).cmp(&(x2 + y2 as usize + z2 as usize))\n\n })\n\n .err(),\n\n Some(error::Sort::TooManyStorages)\n\n );\n\n}\n\n\n", "file_path": "tests/sort.rs", "rank": 93, "score": 146160.14117838748 }, { "content": "#[test]\n\nfn not_enough_to_tightly_pack() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u32s, mut f32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u32, &mut f32)>();\n\n\n\n (&mut usizes, &mut u32s).tight_pack();\n\n entities.add_entity(&mut u32s, 0);\n\n let entity = entities.add_entity((), ());\n\n entities.add_component((&mut f32s, &mut u32s, &mut usizes), (1., 1), entity);\n\n\n\n let mut iter = (&u32s).iter();\n\n assert_eq!(iter.next(), Some(&0));\n\n assert_eq!(iter.next(), Some(&1));\n\n assert_eq!(iter.next(), None);\n\n\n\n assert_eq!((&u32s, &f32s).get(entity), Ok((&1, &1.)));\n\n}\n\n\n", "file_path": "tests/add_component.rs", "rank": 94, "score": 146160.14117838748 }, { "content": "#[test]\n\nfn tight_sort_missing_storage() {\n\n let world = World::new();\n\n let (mut usizes, mut u64s) = world.borrow::<(&mut usize, &mut u64)>();\n\n\n\n (&mut usizes, &mut u64s).tight_pack();\n\n assert_eq!(\n\n usizes.sort().try_unstable(Ord::cmp).err(),\n\n Some(error::Sort::MissingPackStorage)\n\n );\n\n}\n\n\n", "file_path": "tests/sort.rs", "rank": 95, "score": 146160.14117838748 }, { "content": "#[test]\n\nfn enumerate_filter_map_with_id() {\n\n let world = World::new();\n\n let (mut entities, mut u32s, mut i16s) = world.borrow::<(EntitiesMut, &mut u32, &mut i16)>();\n\n\n\n (&mut u32s, &mut i16s).loose_pack();\n\n\n\n let key0 = entities.add_entity((&mut u32s, &mut i16s), (0, 10));\n\n let key1 = entities.add_entity(&mut u32s, 1);\n\n let key2 = entities.add_entity((&mut u32s, &mut i16s), (2, 12));\n\n let key3 = entities.add_entity(&mut i16s, 13);\n\n let key4 = entities.add_entity((&mut u32s, &mut i16s), (4, 14));\n\n\n\n let mut iter = u32s\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, x)| **x % 2 == 0)\n\n .map(|(i, x)| (i * 3, x))\n\n .with_id();\n\n\n\n assert_eq!(iter.next().unwrap(), (key0, (0, &0)));\n", "file_path": "tests/iteration/loose.rs", "rank": 96, "score": 142912.269819567 }, { "content": "#[test]\n\nfn enumerate_map_filter_with_id() {\n\n let world = World::new();\n\n let (mut entities, mut u32s, mut i16s) = world.borrow::<(EntitiesMut, &mut u32, &mut i16)>();\n\n\n\n (&mut u32s, &mut i16s).loose_pack();\n\n\n\n let key0 = entities.add_entity((&mut u32s, &mut i16s), (0, 10));\n\n let key1 = entities.add_entity(&mut u32s, 1);\n\n let key2 = entities.add_entity((&mut u32s, &mut i16s), (2, 12));\n\n let key3 = entities.add_entity(&mut i16s, 13);\n\n let key4 = entities.add_entity((&mut u32s, &mut i16s), (4, 14));\n\n\n\n let mut iter = u32s\n\n .iter()\n\n .enumerate()\n\n .map(|(i, x)| (i * 3, x))\n\n .filter(|(_, x)| **x % 2 == 0)\n\n .with_id();\n\n\n\n assert_eq!(iter.next().unwrap(), (key0, (0, &0)));\n", "file_path": "tests/iteration/loose.rs", "rank": 97, "score": 142912.269819567 }, { "content": "#[test]\n\nfn tight_loose_add_component() {\n\n let world = World::new();\n\n let (mut entities, mut usizes, mut u64s, mut u32s) =\n\n world.borrow::<(EntitiesMut, &mut usize, &mut u64, &mut u32)>();\n\n\n\n (&mut usizes, &mut u64s).tight_pack();\n\n LoosePack::<(u32,)>::loose_pack((&mut u32s, &mut usizes, &mut u64s));\n\n let entity1 = entities.add_entity((), ());\n\n entities.add_component((&mut usizes, &mut u64s, &mut u32s), (0, 1, 2), entity1);\n\n entities.add_component((&mut u32s, &mut u64s, &mut usizes), (5, 4, 3), entity1);\n\n assert_eq!((&usizes, &u64s, &u32s).get(entity1).unwrap(), (&3, &4, &5));\n\n let mut iter = (&usizes, &u32s, &u64s).iter();\n\n assert_eq!(iter.next(), Some((&3, &5, &4)));\n\n assert_eq!(iter.next(), None);\n\n let mut iter = (&usizes, &u64s).iter();\n\n assert_eq!(iter.next(), Some((&3, &4)));\n\n assert_eq!(iter.next(), None);\n\n}\n\n\n", "file_path": "tests/add_component.rs", "rank": 98, "score": 142629.1865779249 }, { "content": "#[system(Test)]\n\nfn run(_: &usize) -> () {}\n\n\n", "file_path": "tests/derive/return_nothing.rs", "rank": 99, "score": 142349.95135947602 } ]
Rust
solo-machine/src/server/ibc.rs
devashishdxt/ibc-solo-machine
76cb11d81e4777e96d4babbfb81381e50a48d57e
tonic::include_proto!("ibc"); use std::time::SystemTime; use k256::ecdsa::VerifyingKey; use solo_machine_core::{ cosmos::crypto::{PublicKey, PublicKeyAlgo}, ibc::core::ics24_host::identifier::ChainId, service::IbcService as CoreIbcService, DbPool, Event, Signer, }; use tokio::sync::mpsc::UnboundedSender; use tonic::{Request, Response, Status}; use self::ibc_server::Ibc; const DEFAULT_MEMO: &str = "solo-machine-memo"; pub struct IbcService<S> { core_service: CoreIbcService, signer: S, } impl<S> IbcService<S> { pub fn new(db_pool: DbPool, notifier: UnboundedSender<Event>, signer: S) -> Self { let core_service = CoreIbcService::new_with_notifier(db_pool, notifier); Self { core_service, signer, } } } #[tonic::async_trait] impl<S> Ibc for IbcService<S> where S: Signer + Send + Sync + 'static, { async fn connect( &self, request: Request<ConnectRequest>, ) -> Result<Response<ConnectResponse>, Status> { let request = request.into_inner(); let chain_id = request .chain_id .parse() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))?; let memo = request.memo.unwrap_or_else(|| DEFAULT_MEMO.to_owned()); let request_id = request.request_id; let force = request.force; self.core_service .connect(&self.signer, chain_id, request_id, memo, force) .await .map_err(|err| { log::error!("{}", err); Status::internal(err.to_string()) })?; Ok(Response::new(ConnectResponse {})) } async fn mint(&self, request: Request<MintRequest>) -> Result<Response<MintResponse>, Status> { let request = request.into_inner(); let chain_id = request .chain_id .parse() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))?; let request_id = request.request_id; let memo = request.memo.unwrap_or_else(|| DEFAULT_MEMO.to_owned()); let amount = request.amount; let denom = request .denom .parse() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))?; let receiver = request.receiver_address; let transaction_hash = self .core_service .mint( &self.signer, chain_id, request_id, amount, denom, receiver, memo, ) .await .map_err(|err| { log::error!("{}", err); Status::internal(err.to_string()) })?; Ok(Response::new(MintResponse { transaction_hash })) } async fn burn(&self, request: Request<BurnRequest>) -> Result<Response<BurnResponse>, Status> { let request = request.into_inner(); let chain_id = request .chain_id .parse() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))?; let request_id = request.request_id; let memo = request.memo.unwrap_or_else(|| DEFAULT_MEMO.to_owned()); let amount = request.amount; let denom = request .denom .parse() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))?; let transaction_hash = self .core_service .burn(&self.signer, chain_id, request_id, amount, denom, memo) .await .map_err(|err| { log::error!("{}", err); Status::internal(err.to_string()) })?; Ok(Response::new(BurnResponse { transaction_hash })) } async fn update_signer( &self, request: Request<UpdateSignerRequest>, ) -> Result<Response<UpdateSignerResponse>, Status> { let request = request.into_inner(); let chain_id: ChainId = request .chain_id .parse() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))?; let request_id = request.request_id; let memo = request.memo.unwrap_or_else(|| DEFAULT_MEMO.to_owned()); let new_public_key_bytes = hex::decode(&request.new_public_key) .map_err(|err| Status::invalid_argument(err.to_string()))?; let new_verifying_key = VerifyingKey::from_sec1_bytes(&new_public_key_bytes) .map_err(|err| Status::invalid_argument(err.to_string()))?; let public_key_algo = request .public_key_algo .map(|s| s.parse()) .transpose() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))? .unwrap_or(PublicKeyAlgo::Secp256k1); let new_public_key = match public_key_algo { PublicKeyAlgo::Secp256k1 => PublicKey::Secp256k1(new_verifying_key), #[cfg(feature = "ethermint")] PublicKeyAlgo::EthSecp256k1 => PublicKey::EthSecp256k1(new_verifying_key), }; self.core_service .update_signer(&self.signer, chain_id, request_id, new_public_key, memo) .await .map_err(|err| { log::error!("{}", err); Status::internal(err.to_string()) })?; Ok(Response::new(UpdateSignerResponse {})) } async fn query_history( &self, request: Request<QueryHistoryRequest>, ) -> Result<Response<QueryHistoryResponse>, Status> { let request = request.into_inner(); let limit = request.limit.unwrap_or(10); let offset = request.offset.unwrap_or(0); let history = self .core_service .history(&self.signer, limit, offset) .await .map_err(|err| { log::error!("{}", err); Status::internal(err.to_string()) })?; let response = QueryHistoryResponse { operations: history .into_iter() .map(|op| Operation { id: op.id, request_id: op.request_id, address: op.address, denom: op.denom.to_string(), amount: op.amount, operation_type: op.operation_type.to_string(), transaction_hash: op.transaction_hash, created_at: Some(SystemTime::from(op.created_at).into()), }) .collect(), }; Ok(Response::new(response)) } }
tonic::include_proto!("ibc"); use std::time::SystemTime; use k256::ecdsa::VerifyingKey; use solo_machine_core::{ cosmos::crypto::{PublicKey, PublicKeyAlgo}, ibc::core::ics24_host::identifier::ChainId, service::IbcService as CoreIbcService, DbPool, Event, Signer, }; use tokio::sync::mpsc::UnboundedSender; use tonic::{Request, Response, Status}; use self::ibc_server::Ibc; const DEFAULT_MEMO: &str = "solo-machine-memo"; pub struct IbcService<S> { core_service: CoreIbcService, signer: S, } impl<S> IbcService<S> { pub fn new(db_pool: DbPool, notifier: UnboundedSender<Event>, signer: S) -> Self { let core_service = CoreIbcService::new_with_notifier(db_pool, notifier); Self { core_service, signer, } } } #[tonic::async_trait] impl<S> Ibc for IbcService<S> where S: Signer + Send + Sync + 'static, { async fn connect( &self, request: Request<ConnectRequest>, ) -> Result<Response<ConnectResponse>, Status> { let request = request.into_inner(); let chain_id = request .chain_id .parse() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))?; let memo = request.memo.unwrap_or_else(|| DEFAULT_MEMO.to_owned());
async fn mint(&self, request: Request<MintRequest>) -> Result<Response<MintResponse>, Status> { let request = request.into_inner(); let chain_id = request .chain_id .parse() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))?; let request_id = request.request_id; let memo = request.memo.unwrap_or_else(|| DEFAULT_MEMO.to_owned()); let amount = request.amount; let denom = request .denom .parse() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))?; let receiver = request.receiver_address; let transaction_hash = self .core_service .mint( &self.signer, chain_id, request_id, amount, denom, receiver, memo, ) .await .map_err(|err| { log::error!("{}", err); Status::internal(err.to_string()) })?; Ok(Response::new(MintResponse { transaction_hash })) } async fn burn(&self, request: Request<BurnRequest>) -> Result<Response<BurnResponse>, Status> { let request = request.into_inner(); let chain_id = request .chain_id .parse() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))?; let request_id = request.request_id; let memo = request.memo.unwrap_or_else(|| DEFAULT_MEMO.to_owned()); let amount = request.amount; let denom = request .denom .parse() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))?; let transaction_hash = self .core_service .burn(&self.signer, chain_id, request_id, amount, denom, memo) .await .map_err(|err| { log::error!("{}", err); Status::internal(err.to_string()) })?; Ok(Response::new(BurnResponse { transaction_hash })) } async fn update_signer( &self, request: Request<UpdateSignerRequest>, ) -> Result<Response<UpdateSignerResponse>, Status> { let request = request.into_inner(); let chain_id: ChainId = request .chain_id .parse() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))?; let request_id = request.request_id; let memo = request.memo.unwrap_or_else(|| DEFAULT_MEMO.to_owned()); let new_public_key_bytes = hex::decode(&request.new_public_key) .map_err(|err| Status::invalid_argument(err.to_string()))?; let new_verifying_key = VerifyingKey::from_sec1_bytes(&new_public_key_bytes) .map_err(|err| Status::invalid_argument(err.to_string()))?; let public_key_algo = request .public_key_algo .map(|s| s.parse()) .transpose() .map_err(|err: anyhow::Error| Status::invalid_argument(err.to_string()))? .unwrap_or(PublicKeyAlgo::Secp256k1); let new_public_key = match public_key_algo { PublicKeyAlgo::Secp256k1 => PublicKey::Secp256k1(new_verifying_key), #[cfg(feature = "ethermint")] PublicKeyAlgo::EthSecp256k1 => PublicKey::EthSecp256k1(new_verifying_key), }; self.core_service .update_signer(&self.signer, chain_id, request_id, new_public_key, memo) .await .map_err(|err| { log::error!("{}", err); Status::internal(err.to_string()) })?; Ok(Response::new(UpdateSignerResponse {})) } async fn query_history( &self, request: Request<QueryHistoryRequest>, ) -> Result<Response<QueryHistoryResponse>, Status> { let request = request.into_inner(); let limit = request.limit.unwrap_or(10); let offset = request.offset.unwrap_or(0); let history = self .core_service .history(&self.signer, limit, offset) .await .map_err(|err| { log::error!("{}", err); Status::internal(err.to_string()) })?; let response = QueryHistoryResponse { operations: history .into_iter() .map(|op| Operation { id: op.id, request_id: op.request_id, address: op.address, denom: op.denom.to_string(), amount: op.amount, operation_type: op.operation_type.to_string(), transaction_hash: op.transaction_hash, created_at: Some(SystemTime::from(op.created_at).into()), }) .collect(), }; Ok(Response::new(response)) } }
let request_id = request.request_id; let force = request.force; self.core_service .connect(&self.signer, chain_id, request_id, memo, force) .await .map_err(|err| { log::error!("{}", err); Status::internal(err.to_string()) })?; Ok(Response::new(ConnectResponse {})) }
function_block-function_prefix_line
[ { "content": "#[async_trait]\n\npub trait Signer: ToPublicKey + Send + Sync {\n\n /// Signs the given message\n\n async fn sign(&self, request_id: Option<&str>, message: Message<'_>) -> Result<Vec<u8>>;\n\n}\n\n\n\n#[async_trait]\n\nimpl<T: Signer> Signer for &T {\n\n async fn sign(&self, request_id: Option<&str>, message: Message<'_>) -> Result<Vec<u8>> {\n\n (*self).sign(request_id, message).await\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<T: Signer + ?Sized> Signer for Arc<T> {\n\n async fn sign(&self, request_id: Option<&str>, message: Message<'_>) -> Result<Vec<u8>> {\n\n (**self).sign(request_id, message).await\n\n }\n\n}\n\n\n", "file_path": "solo-machine-core/src/signer.rs", "rank": 0, "score": 205859.99324502843 }, { "content": "#[async_trait]\n\npub trait EventHandler: Sync + Send {\n\n /// Handles an event\n\n async fn handle(&self, event: Event) -> Result<()>;\n\n}\n\n\n", "file_path": "solo-machine-core/src/event/event_handler.rs", "rank": 1, "score": 192196.21283274482 }, { "content": "fn add_row(table: &mut Vec<RowStruct>, title: &str, value: impl Display) {\n\n table.push(vec![title.cell().bold(true), value.cell()].row());\n\n}\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 2, "score": 173264.0764636494 }, { "content": "fn extract_attribute(events: &[AbciEvent], event_type: &str, key: &str) -> Result<String> {\n\n let mut attribute = None;\n\n\n\n for event in events {\n\n if event.type_str == event_type {\n\n attribute = Some(get_attribute(&event.attributes, key)?);\n\n }\n\n }\n\n\n\n attribute.ok_or_else(|| {\n\n anyhow!(\n\n \"{}:{} not found in tendermint response events: {:?}\",\n\n event_type,\n\n key,\n\n events\n\n )\n\n })\n\n}\n\n\n", "file_path": "solo-machine-core/src/service/ibc_service.rs", "rank": 3, "score": 156690.23762122067 }, { "content": "fn add_row(table: &mut Vec<RowStruct>, title: &str, value: impl Display) {\n\n table.push(vec![title.cell().bold(true), value.cell()].row());\n\n}\n\n\n\npub(crate) fn print_stream(\n\n stdout: &mut StandardStream,\n\n color_spec: &ColorSpec,\n\n s: impl Display,\n\n) -> Result<()> {\n\n stdout.set_color(color_spec)?;\n\n writeln!(stdout, \"{}\", s).context(\"unable to write to stdout\")?;\n\n stdout.reset().context(\"unable to reset stdout\")?;\n\n\n\n Ok(())\n\n}\n\n\n\npub(crate) fn print_json(color_choice: ColorChoice, json: impl Display) -> Result<()> {\n\n let mut stdout = StandardStream::stdout(color_choice);\n\n writeln!(stdout, \"{:#}\", json).context(\"unable to write to stdout\")?;\n\n stdout.reset().context(\"unable to reset stdout\")\n\n}\n", "file_path": "solo-machine/src/command.rs", "rank": 4, "score": 145960.44755789166 }, { "content": "fn get_env(key: &str) -> Result<String> {\n\n env::var(key).context(format!(\n\n \"`{}` environment variable is required for mnemonic signer\",\n\n key\n\n ))\n\n}\n\n\n\nimpl ToPublicKey for MnemonicSigner {\n\n fn to_public_key(&self) -> Result<PublicKey> {\n\n let signing_key = self.get_signing_key()?;\n\n let verifying_key = signing_key.verifying_key();\n\n\n\n match self.algo {\n\n AddressAlgo::Secp256k1 => Ok(PublicKey::Secp256k1(verifying_key)),\n\n #[cfg(feature = \"ethermint\")]\n\n AddressAlgo::EthSecp256k1 => Ok(PublicKey::EthSecp256k1(verifying_key)),\n\n }\n\n }\n\n\n\n fn get_account_prefix(&self) -> &str {\n", "file_path": "signers/mnemonic-signer/src/lib.rs", "rank": 5, "score": 134960.64085266154 }, { "content": "#[no_mangle]\n\npub fn register_signer(registrar: &mut dyn SignerRegistrar) -> Result<()> {\n\n registrar.register(Arc::new(MnemonicSigner::from_env()?));\n\n Ok(())\n\n}\n", "file_path": "signers/mnemonic-signer/src/lib.rs", "rank": 6, "score": 132249.54007882552 }, { "content": "pub fn proof_specs() -> Vec<ProofSpec> {\n\n vec![iavl_spec(), tendermint_spec()]\n\n}\n", "file_path": "solo-machine-core/src/ibc/core/ics23_vector_commitments.rs", "rank": 7, "score": 115170.39279317539 }, { "content": "#[no_mangle]\n\npub fn register_handler(registrar: &mut dyn HandlerRegistrar) -> Result<()> {\n\n registrar.register(Box::new(StdoutLogger {}));\n\n Ok(())\n\n}\n", "file_path": "event-hooks/stdout-logger/src/lib.rs", "rank": 8, "score": 110678.33114382804 }, { "content": "fn get_attribute(tags: &[Tag], key: &str) -> Result<String> {\n\n let key: Key = key\n\n .parse()\n\n .map_err(|e| anyhow!(\"unable to parse attribute key `{}`: {}\", key, e))?;\n\n\n\n for tag in tags {\n\n if tag.key == key {\n\n return Ok(tag.value.to_string());\n\n }\n\n }\n\n\n\n Err(anyhow!(\"{} not found in tags: {:?}\", key, tags))\n\n}\n", "file_path": "solo-machine-core/src/service/ibc_service.rs", "rank": 9, "score": 109458.07216730498 }, { "content": "fn into_row(operation: Operation) -> RowStruct {\n\n let color = get_color_for_operation_type(&operation.operation_type);\n\n\n\n vec![\n\n operation.id.cell().justify(Justify::Right),\n\n operation\n\n .request_id\n\n .unwrap_or_else(|| \"-\".to_string())\n\n .cell(),\n\n operation.address.cell(),\n\n operation.denom.cell(),\n\n operation.amount.cell().justify(Justify::Right),\n\n operation\n\n .operation_type\n\n .cell()\n\n .foreground_color(Some(color)),\n\n operation.transaction_hash.cell(),\n\n operation.created_at.cell(),\n\n ]\n\n .row()\n\n}\n\n\n", "file_path": "solo-machine/src/command/ibc.rs", "rank": 10, "score": 107057.05599656218 }, { "content": "/// Trait to register a signer\n\npub trait SignerRegistrar {\n\n /// Registers a new signer\n\n fn register(&mut self, signer: Arc<dyn Signer>);\n\n}\n", "file_path": "solo-machine-core/src/signer.rs", "rank": 11, "score": 97711.91512507781 }, { "content": "#[derive(Debug, FromRow)]\n\nstruct IbcData {\n\n path: String,\n\n data: Vec<u8>,\n\n}\n\n\n\n/// Adds tendermint client state to database\n\npub async fn add_tendermint_client_state<'e>(\n\n executor: impl Executor<'e, Database = Db>,\n\n client_id: &ClientId,\n\n client_state: &TendermintClientState,\n\n) -> Result<()> {\n\n let path: String = ClientStatePath::new(client_id).into();\n\n let data = proto_encode(client_state)?;\n\n\n\n add(executor, &path, &data).await\n\n}\n\n\n\n/// Fetches tendermint client state from database\n\npub async fn get_tendermint_client_state<'e>(\n\n executor: impl Executor<'e, Database = Db>,\n", "file_path": "solo-machine-core/src/model/ibc.rs", "rank": 12, "score": 97505.57728074072 }, { "content": "/// Trait to register different event handlers\n\npub trait HandlerRegistrar: EventHandler {\n\n /// Registers a new event handler\n\n fn register(&mut self, handler: Box<dyn EventHandler>);\n\n}\n", "file_path": "solo-machine-core/src/event/event_handler.rs", "rank": 13, "score": 94159.44634401028 }, { "content": "fn parse_trusted_hash(hash: &str) -> Result<[u8; 32]> {\n\n ensure!(!hash.is_empty(), \"empty trusted hash\");\n\n\n\n let bytes = hex::decode(hash).context(\"invalid trusted hash hex bytes\")?;\n\n ensure!(bytes.len() == 32, \"trusted hash length should be 32\");\n\n\n\n let mut trusted_hash = [0; 32];\n\n trusted_hash.clone_from_slice(&bytes);\n\n\n\n Ok(trusted_hash)\n\n}\n\n\n", "file_path": "solo-machine/src/command/chain.rs", "rank": 14, "score": 91095.20001669021 }, { "content": "struct StdoutLogger {}\n\n\n\n#[async_trait]\n\nimpl EventHandler for StdoutLogger {\n\n async fn handle(&self, event: Event) -> Result<()> {\n\n println!(\"EVENT: {:?}\", event);\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "event-hooks/stdout-logger/src/lib.rs", "rank": 15, "score": 90764.41810907685 }, { "content": "/// This trait must be implemented by all the public key providers (e.g. mnemonic, ledger, etc.)\n\npub trait ToPublicKey {\n\n /// Returns public key of signer\n\n fn to_public_key(&self) -> Result<PublicKey>;\n\n\n\n /// Returns account prefix for computing bech32 addresses\n\n fn get_account_prefix(&self) -> &str;\n\n\n\n /// Returns accounts address for this signer for given prefix\n\n fn to_account_address(&self) -> Result<String>;\n\n}\n\n\n\nimpl<T: ToPublicKey> ToPublicKey for &T {\n\n fn to_public_key(&self) -> Result<PublicKey> {\n\n (*self).to_public_key()\n\n }\n\n\n\n fn get_account_prefix(&self) -> &str {\n\n (*self).get_account_prefix()\n\n }\n\n\n", "file_path": "solo-machine-core/src/signer.rs", "rank": 16, "score": 87823.84035875386 }, { "content": "pub fn proto_encode<M: Message>(message: &M) -> Result<Vec<u8>> {\n\n let mut buf = Vec::with_capacity(message.encoded_len());\n\n message\n\n .encode(&mut buf)\n\n .context(\"unable to encode protobuf message\")?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "solo-machine-core/src/proto.rs", "rank": 17, "score": 80928.73161735386 }, { "content": "fn build_tx_body<T>(messages: &[T], memo: String) -> Result<TxBody>\n\nwhere\n\n T: AnyConvert,\n\n{\n\n let messages = messages\n\n .iter()\n\n .map(AnyConvert::to_any)\n\n .collect::<Result<_, _>>()?;\n\n\n\n Ok(TxBody {\n\n messages,\n\n memo,\n\n timeout_height: 0,\n\n extension_options: Default::default(),\n\n non_critical_extension_options: Default::default(),\n\n })\n\n}\n\n\n", "file_path": "solo-machine-core/src/transaction_builder.rs", "rank": 18, "score": 80916.57489948504 }, { "content": "fn prepare_light_client(\n\n chain: &Chain,\n\n rpc_client: HttpClient,\n\n light_store: Box<dyn LightStore>,\n\n) -> Result<Instance> {\n\n let builder = LightClientBuilder::prod(\n\n chain.node_id,\n\n rpc_client,\n\n light_store,\n\n Options {\n\n trust_threshold: TrustThresholdFraction::new(\n\n *chain.config.trust_level.numer(),\n\n *chain.config.trust_level.denom(),\n\n )\n\n .unwrap(),\n\n trusting_period: chain.config.trusting_period,\n\n clock_drift: chain.config.max_clock_drift,\n\n },\n\n Some(chain.config.rpc_timeout),\n\n );\n\n\n\n let builder = builder.trust_primary_at(\n\n chain.config.trusted_height,\n\n TendermintHash::Sha256(chain.config.trusted_hash),\n\n )?;\n\n\n\n Ok(builder.build())\n\n}\n\n\n", "file_path": "solo-machine-core/src/service/ibc_service.rs", "rank": 19, "score": 80891.81745218713 }, { "content": "pub trait IPacket {\n\n fn commitment_bytes(&self) -> Result<Vec<u8>>;\n\n}\n\n\n\nimpl IPacket for Packet {\n\n fn commitment_bytes(&self) -> Result<Vec<u8>> {\n\n let timeout_height = self\n\n .timeout_height\n\n .as_ref()\n\n .ok_or_else(|| anyhow!(\"timeout height is not set\"))?;\n\n\n\n let mut buf = Vec::new();\n\n\n\n buf.extend(&self.timeout_timestamp.to_be_bytes());\n\n buf.extend(&timeout_height.revision_number.to_be_bytes());\n\n buf.extend(&timeout_height.revision_height.to_be_bytes());\n\n buf.extend(Sha256::digest(&self.data));\n\n\n\n Ok(Sha256::digest(&buf).to_vec())\n\n }\n\n}\n", "file_path": "solo-machine-core/src/ibc/core/ics04_channel/packet.rs", "rank": 20, "score": 79918.77745073201 }, { "content": "fn into_row(key: ChainKey) -> RowStruct {\n\n vec![\n\n key.id.cell().justify(Justify::Right),\n\n key.chain_id.cell(),\n\n key.public_key.cell(),\n\n key.created_at.cell(),\n\n ]\n\n .row()\n\n}\n", "file_path": "solo-machine/src/command/chain.rs", "rank": 21, "score": 77137.60518303988 }, { "content": "pub trait IHeight: Sized {\n\n fn new(revision_number: u64, revision_height: u64) -> Self;\n\n\n\n fn zero() -> Self {\n\n Self::new(0, 0)\n\n }\n\n\n\n fn is_zero(&self) -> bool;\n\n\n\n fn checked_add(self, rhs: u64) -> Option<Self>;\n\n\n\n fn checked_sub(self, rhs: u64) -> Option<Self>;\n\n\n\n fn cmp(&self, other: &Self) -> Ordering;\n\n\n\n fn to_string(&self) -> String;\n\n\n\n fn to_block_height(&self) -> Result<BlockHeight, Error>;\n\n\n\n fn from_str(height: &str) -> Result<Self, Error>;\n", "file_path": "solo-machine-core/src/ibc/core/ics02_client/height.rs", "rank": 22, "score": 76653.83125096896 }, { "content": "fn tendermint_spec() -> ProofSpec {\n\n ProofSpec {\n\n leaf_spec: Some(LeafOp {\n\n hash: HashOp::Sha256.into(),\n\n prehash_key: 0,\n\n prehash_value: HashOp::Sha256.into(),\n\n length: LengthOp::VarProto.into(),\n\n prefix: vec![0],\n\n }),\n\n inner_spec: Some(InnerSpec {\n\n child_order: vec![0, 1],\n\n min_prefix_length: 1,\n\n max_prefix_length: 1,\n\n child_size: 32,\n\n empty_child: vec![],\n\n hash: HashOp::Sha256.into(),\n\n }),\n\n max_depth: 0,\n\n min_depth: 0,\n\n }\n\n}\n\n\n", "file_path": "solo-machine-core/src/ibc/core/ics23_vector_commitments.rs", "rank": 23, "score": 74418.17765358981 }, { "content": "fn iavl_spec() -> ProofSpec {\n\n ProofSpec {\n\n leaf_spec: Some(LeafOp {\n\n hash: HashOp::Sha256.into(),\n\n prehash_key: 0,\n\n prehash_value: HashOp::Sha256.into(),\n\n length: LengthOp::VarProto.into(),\n\n prefix: vec![0],\n\n }),\n\n inner_spec: Some(InnerSpec {\n\n child_order: vec![0, 1],\n\n min_prefix_length: 4,\n\n max_prefix_length: 12,\n\n child_size: 33,\n\n empty_child: vec![],\n\n hash: HashOp::Sha256.into(),\n\n }),\n\n max_depth: 0,\n\n min_depth: 0,\n\n }\n\n}\n\n\n", "file_path": "solo-machine-core/src/ibc/core/ics23_vector_commitments.rs", "rank": 24, "score": 74418.17765358981 }, { "content": "pub trait IConsensusState: Sized {\n\n fn from_block_header(header: BlockHeader) -> Self;\n\n}\n\n\n\nimpl IConsensusState for ConsensusState {\n\n fn from_block_header(header: BlockHeader) -> Self {\n\n Self {\n\n root: Some(MerkleRoot {\n\n hash: header.app_hash.value(),\n\n }),\n\n timestamp: Some(Timestamp::from(SystemTime::from(header.time.0))),\n\n next_validators_hash: header.next_validators_hash.as_bytes().to_vec(),\n\n }\n\n }\n\n}\n\n\n\nconst TYPE_URL: &str = \"/ibc.lightclients.tendermint.v1.ConsensusState\";\n\n\n\nimpl_any_conversion!(ConsensusState, TYPE_URL);\n", "file_path": "solo-machine-core/src/ibc/client/ics07_tendermint/consensus_state.rs", "rank": 25, "score": 73778.03611888034 }, { "content": "fn get_color_for_operation_type(operation_type: &OperationType) -> Color {\n\n match operation_type {\n\n OperationType::Mint { .. } => Color::Green,\n\n OperationType::Burn { .. } => Color::Red,\n\n }\n\n}\n", "file_path": "solo-machine/src/command/ibc.rs", "rank": 26, "score": 70171.74532366292 }, { "content": "fn ensure_response_success(response: &TxCommitResponse) -> Result<String> {\n\n ensure!(\n\n response.check_tx.code.is_ok(),\n\n \"check_tx response contains error code: {}\",\n\n response.check_tx.log\n\n );\n\n\n\n ensure!(\n\n response.deliver_tx.code.is_ok(),\n\n \"deliver_tx response contains error code: {}\",\n\n response.deliver_tx.log\n\n );\n\n\n\n Ok(response.hash.to_string())\n\n}\n\n\n", "file_path": "solo-machine-core/src/service/ibc_service.rs", "rank": 27, "score": 66441.10813538222 }, { "content": "use cosmos_sdk_proto::ibc::core::connection::v1::MsgConnectionOpenAck;\n\n\n\nconst TYPE_URL: &str = \"/ibc.core.connection.v1.MsgConnectionOpenAck\";\n\n\n\nimpl_any_conversion!(MsgConnectionOpenAck, TYPE_URL);\n", "file_path": "solo-machine-core/src/ibc/core/ics03_connection/msg_connection_open_ack.rs", "rank": 28, "score": 66240.60336681192 }, { "content": "use cosmos_sdk_proto::ibc::core::connection::v1::MsgConnectionOpenInit;\n\n\n\nconst TYPE_URL: &str = \"/ibc.core.connection.v1.MsgConnectionOpenInit\";\n\n\n\nimpl_any_conversion!(MsgConnectionOpenInit, TYPE_URL);\n", "file_path": "solo-machine-core/src/ibc/core/ics03_connection/msg_connection_open_init.rs", "rank": 29, "score": 66240.60336681192 }, { "content": "fn extract_packets(response: &TxCommitResponse) -> Result<Vec<Packet>> {\n\n let mut packets = vec![];\n\n\n\n for event in response.deliver_tx.events.iter() {\n\n if event.type_str == \"send_packet\" {\n\n let mut attributes = HashMap::new();\n\n\n\n for tag in event.attributes.iter() {\n\n attributes.insert(tag.key.to_string(), tag.value.to_string());\n\n }\n\n\n\n let packet = Packet {\n\n sequence: attributes\n\n .remove(\"packet_sequence\")\n\n .ok_or_else(|| anyhow!(\"`packet_sequence` is missing from packet data\"))?\n\n .parse()\n\n .context(\"invalid `packet_sequence`\")?,\n\n source_port: attributes\n\n .remove(\"packet_src_port\")\n\n .ok_or_else(|| anyhow!(\"`packet_src_port` is missing from packet data\"))?,\n", "file_path": "solo-machine-core/src/service/ibc_service.rs", "rank": 30, "score": 65437.004079441205 }, { "content": "fn compute_path(identifiers: &[Identifier]) -> Result<String, Error> {\n\n ensure!(\n\n identifiers.len() > 1,\n\n \"path contains less than or equal to one identifier\"\n\n );\n\n\n\n let mut path = String::new();\n\n\n\n for id in identifiers.iter() {\n\n path.push_str(&format!(\"/{}\", id));\n\n }\n\n\n\n Ok(path)\n\n}\n\n\n\nimpl AsRef<[u8]> for Path {\n\n fn as_ref(&self) -> &[u8] {\n\n self.0.as_bytes()\n\n }\n\n}\n", "file_path": "solo-machine-core/src/ibc/core/ics24_host/path.rs", "rank": 31, "score": 65437.004079441205 }, { "content": "pub mod msg_connection_open_ack;\n\npub mod msg_connection_open_init;\n", "file_path": "solo-machine-core/src/ibc/core/ics03_connection.rs", "rank": 32, "score": 62695.45504331725 }, { "content": "/// Raw state of an IBC enabled chain\n\nstruct RawChain {\n\n /// ID of chain\n\n pub id: String,\n\n /// Node ID of chain\n\n pub node_id: String,\n\n /// Configuration for chain\n\n pub config: Json<ChainConfig>,\n\n /// Consensus timestamp of solo machine (used when creating transactions on chain)\n\n pub consensus_timestamp: DateTime<Utc>,\n\n /// Sequence of solo machine (used when creating transactions on chain)\n\n pub sequence: i64,\n\n /// Packet sequence of solo machine (used when creating transactions on chain)\n\n pub packet_sequence: i64,\n\n /// IBC connection details\n\n pub connection_details: Option<Json<ConnectionDetails>>,\n\n /// Creation time of chain\n\n pub created_at: DateTime<Utc>,\n\n /// Last updation time of chain\n\n pub updated_at: DateTime<Utc>,\n\n}\n", "file_path": "solo-machine-core/src/model/chain/chain.rs", "rank": 33, "score": 57539.399543245876 }, { "content": "/// Raw signer's public key entry for an IBC enabled chain\n\nstruct RawChainKey {\n\n /// ID of operation\n\n pub id: i64,\n\n /// Chain ID\n\n pub chain_id: String,\n\n /// Public key of signer\n\n pub public_key: String,\n\n /// Creation time of chain key entry\n\n pub created_at: DateTime<Utc>,\n\n}\n\n\n\nimpl From<ChainKey> for RawChainKey {\n\n fn from(chain_key: ChainKey) -> Self {\n\n Self {\n\n id: chain_key.id,\n\n chain_id: chain_key.chain_id.to_string(),\n\n public_key: chain_key.public_key,\n\n created_at: chain_key.created_at,\n\n }\n\n }\n", "file_path": "solo-machine-core/src/model/chain/chain_keys.rs", "rank": 34, "score": 55768.763198105255 }, { "content": "fn build_auth_info(\n\n signer: impl ToPublicKey,\n\n chain: &Chain,\n\n account_sequence: u64,\n\n) -> Result<AuthInfo> {\n\n let signer_info = SignerInfo {\n\n public_key: Some(signer.to_public_key()?.to_any()?),\n\n mode_info: Some(ModeInfo {\n\n sum: Some(Sum::Single(Single { mode: 1 })),\n\n }),\n\n sequence: account_sequence,\n\n };\n\n\n\n let fee = Fee {\n\n amount: vec![Coin {\n\n denom: chain.config.fee.denom.to_string(),\n\n amount: chain.config.fee.amount.to_string(),\n\n }],\n\n gas_limit: chain.config.fee.gas_limit,\n\n payer: \"\".to_owned(),\n", "file_path": "solo-machine-core/src/transaction_builder.rs", "rank": 35, "score": 53593.76423505465 }, { "content": "pub trait AnyConvert: Sized {\n\n fn from_any(value: &Any) -> Result<Self>;\n\n\n\n fn to_any(&self) -> Result<Any>;\n\n}\n\n\n\nmacro_rules! impl_any_conversion {\n\n ($type: ty, $type_url: ident) => {\n\n impl $crate::proto::AnyConvert for $type {\n\n fn from_any(value: &::prost_types::Any) -> ::anyhow::Result<Self> {\n\n ::anyhow::ensure!(\n\n value.type_url == $type_url,\n\n \"invalid type url for `Any` type: expected `{}` and found `{}`\",\n\n $type_url,\n\n value.type_url\n\n );\n\n\n\n <Self as ::prost::Message>::decode(value.value.as_slice()).map_err(Into::into)\n\n }\n\n\n\n fn to_any(&self) -> ::anyhow::Result<::prost_types::Any> {\n\n Ok(::prost_types::Any {\n\n type_url: $type_url.to_owned(),\n\n value: $crate::proto::proto_encode(self)?,\n\n })\n\n }\n\n }\n\n };\n\n}\n", "file_path": "solo-machine-core/src/proto.rs", "rank": 36, "score": 53534.01975044886 }, { "content": "pub trait BitArray {\n\n fn is_empty(&self) -> bool;\n\n\n\n fn len(&self) -> usize;\n\n\n\n fn get(&self, index: usize) -> bool;\n\n\n\n fn num_true_bits_before(&self, index: usize) -> usize;\n\n}\n\n\n\nimpl BitArray for CompactBitArray {\n\n fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n\n\n\n fn len(&self) -> usize {\n\n if self.extra_bits_stored == 0 {\n\n return self.elems.len() * 8;\n\n }\n\n\n", "file_path": "solo-machine-core/src/cosmos/bit_array.rs", "rank": 37, "score": 53363.64397255305 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let mut files = Vec::new();\n\n\n\n let paths = read_dir(\"./proto\")?;\n\n\n\n for path in paths {\n\n files.extend(get_files(path?)?);\n\n }\n\n\n\n let mut config = Config::default();\n\n config.protoc_arg(\"--experimental_allow_proto3_optional\");\n\n\n\n tonic_build::configure()\n\n .build_client(false)\n\n .compile_with_config(config, &files, &[\"proto\"])?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "solo-machine/build.rs", "rank": 38, "score": 48752.575393721076 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let mut files = Vec::new();\n\n\n\n let paths = read_dir(\"./proto\")?;\n\n\n\n for path in paths {\n\n files.extend(get_files(path?)?);\n\n }\n\n\n\n tonic_build::configure()\n\n .extern_path(\n\n \".cosmos.auth.v1beta1\",\n\n \"::cosmos_sdk_proto::cosmos::auth::v1beta1\",\n\n )\n\n .extern_path(\n\n \".ibc.core.connection.v1\",\n\n \"::cosmos_sdk_proto::ibc::core::connection::v1\",\n\n )\n\n .extern_path(\n\n \".ibc.core.channel.v1\",\n\n \"::cosmos_sdk_proto::ibc::core::channel::v1\",\n\n )\n\n .build_server(false)\n\n .compile(&files, &[\"proto\"])?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "solo-machine-core/build.rs", "rank": 39, "score": 47792.13194241978 }, { "content": " cosmos::crypto::PublicKey,\n\n signer::{AddressAlgo, Message, SignerRegistrar},\n\n Signer, ToPublicKey,\n\n};\n\n\n\nconst DEFAULT_HD_PATH: &str = \"m/44'/118'/0'/0/0\";\n\nconst DEFAULT_ACCOUNT_PREFIX: &str = \"cosmos\";\n\nconst DEFAULT_ADDRESS_ALGO: &str = \"secp256k1\";\n\n\n\n#[derive(Clone)]\n\n/// Signer implementation using mnemonic\n\npub struct MnemonicSigner {\n\n /// Mnemonic of signer\n\n pub mnemonic: Mnemonic,\n\n /// HD path of signer\n\n pub hd_path: String,\n\n /// Bech32 prefix\n\n pub account_prefix: String,\n\n /// Algorithm used for address generation\n\n pub algo: AddressAlgo,\n", "file_path": "signers/mnemonic-signer/src/lib.rs", "rank": 40, "score": 45176.74493312865 }, { "content": "//! # Mnemonic Signer\n\n//!\n\n//! Signs transaction using provided mnemonic.\n\n//!\n\n//! ## Arguments\n\n//!\n\n//! Mnemonic signer can take following arguments using via environment variables:\n\n//!\n\n//! - `SOLO_MNEMONIC`: Mnemonic phrase (required)\n\n//! - `SOLO_HD_PATH`: HD wallet path (default: \"m/44'/118'/0'/0/0\")\n\n//! - `SOLO_ACCOUNT_PREFIX`: Account prefix for generating addresses (default: \"cosmos\")\n\n//! - `SOLO_ADDRESS_ALGO`: Algorithm of the key pair (default: \"secp256k1\") (possible values: [\"secp256k1\", \"eth-secp256k1\"])\n\nuse std::{env, str::FromStr, sync::Arc};\n\n\n\nuse anyhow::{anyhow, Context, Result};\n\nuse async_trait::async_trait;\n\nuse bip32::{DerivationPath, ExtendedPrivateKey, Language, Mnemonic};\n\nuse k256::ecdsa::{signature::DigestSigner, Signature, SigningKey};\n\nuse ripemd160::Digest;\n\nuse solo_machine_core::{\n", "file_path": "signers/mnemonic-signer/src/lib.rs", "rank": 41, "score": 45175.762727169276 }, { "content": "}\n\n\n\nimpl MnemonicSigner {\n\n pub fn from_env() -> Result<Self> {\n\n let mnemonic_str = get_env(\"SOLO_MNEMONIC\")?;\n\n let mnemonic = Mnemonic::new(mnemonic_str, Language::English)\n\n .map_err(|_| anyhow!(\"invalid mnemonic\"))?;\n\n\n\n let hd_path = get_env(\"SOLO_HD_PATH\").unwrap_or_else(|_| DEFAULT_HD_PATH.to_string());\n\n let account_prefix =\n\n get_env(\"SOLO_ACCOUNT_PREFIX\").unwrap_or_else(|_| DEFAULT_ACCOUNT_PREFIX.to_string());\n\n\n\n let algo = get_env(\"SOLO_ADDRESS_ALGO\")\n\n .unwrap_or_else(|_| DEFAULT_ADDRESS_ALGO.to_string())\n\n .parse()?;\n\n\n\n Ok(Self {\n\n mnemonic,\n\n hd_path,\n\n account_prefix,\n", "file_path": "signers/mnemonic-signer/src/lib.rs", "rank": 42, "score": 45172.90911846521 }, { "content": " &self.account_prefix\n\n }\n\n\n\n fn to_account_address(&self) -> Result<String> {\n\n self.to_public_key()?\n\n .account_address(self.get_account_prefix())\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Signer for MnemonicSigner {\n\n async fn sign(&self, _request_id: Option<&str>, message: Message<'_>) -> Result<Vec<u8>> {\n\n let signing_key = self.get_signing_key()?;\n\n\n\n let signature: Signature = match self.algo {\n\n AddressAlgo::Secp256k1 => signing_key.sign_digest(sha2::Sha256::new().chain(message)),\n\n #[cfg(feature = \"ethermint\")]\n\n AddressAlgo::EthSecp256k1 => {\n\n signing_key.sign_digest(sha3::Keccak256::new().chain(message))\n\n }\n\n };\n\n\n\n Ok(signature.as_ref().to_vec())\n\n }\n\n}\n\n\n", "file_path": "signers/mnemonic-signer/src/lib.rs", "rank": 43, "score": 45172.87136517003 }, { "content": " algo,\n\n })\n\n }\n\n\n\n fn get_signing_key(&self) -> Result<SigningKey> {\n\n let seed = self.mnemonic.to_seed(\"\");\n\n let hd_path = DerivationPath::from_str(&self.hd_path).context(\"invalid HD path\")?;\n\n let private_key =\n\n ExtendedPrivateKey::<SigningKey>::derive_from_path(seed.as_bytes(), &hd_path).unwrap();\n\n\n\n Ok(private_key.into())\n\n }\n\n}\n\n\n", "file_path": "signers/mnemonic-signer/src/lib.rs", "rank": 44, "score": 45164.05310616759 }, { "content": "use std::{fmt::Display, io::Write};\n\n\n\nuse anyhow::{Context, Result};\n\nuse async_trait::async_trait;\n\nuse cli_table::{\n\n format::Justify, print_stdout, Cell, Color, ColorChoice, Row, RowStruct, Style, Table,\n\n};\n\nuse serde_json::json;\n\nuse solo_machine_core::{event::EventHandler, Event};\n\nuse termcolor::{ColorSpec, StandardStream};\n\n\n\nuse crate::{\n\n command::{print_json, print_stream},\n\n output::OutputType,\n\n};\n\n\n\npub struct CliEventHandler {\n\n color_choice: ColorChoice,\n\n output: OutputType,\n\n}\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 45, "score": 42915.18983834865 }, { "content": "use anyhow::Result;\n\nuse async_trait::async_trait;\n\n\n\nuse super::Event;\n\n\n\n/// Trait to handle events generated by solo machine\n\n#[async_trait]\n", "file_path": "solo-machine-core/src/event/event_handler.rs", "rank": 46, "score": 42913.218993202005 }, { "content": "\n\nimpl CliEventHandler {\n\n pub fn new(color_choice: ColorChoice, output: OutputType) -> Self {\n\n Self {\n\n color_choice,\n\n output,\n\n }\n\n }\n\n\n\n fn handle_text_output(&self, event: Event) -> Result<()> {\n\n let mut stdout = StandardStream::stdout(self.color_choice);\n\n\n\n match event {\n\n Event::ChainAdded { chain_id } => {\n\n print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n \"New chain added!\",\n\n )?;\n\n\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 47, "score": 42912.074375388045 }, { "content": " json!({\n\n \"result\": \"success\",\n\n \"data\": event,\n\n }),\n\n ),\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl EventHandler for CliEventHandler {\n\n async fn handle(&self, event: Event) -> Result<()> {\n\n match self.output {\n\n OutputType::Text => self.handle_text_output(event),\n\n OutputType::Json => self.handle_json_output(event),\n\n }\n\n }\n\n}\n\n\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 48, "score": 42911.16819134465 }, { "content": " print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n format!(\n\n \"Initialized connection on solo machine [Connection ID = {}]\",\n\n connection_id\n\n ),\n\n )?;\n\n }\n\n Event::ConfirmedConnectionOnTendermint { connection_id } => {\n\n print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n format!(\n\n \"Confirmed connection on IBC enabled chain [Connection ID = {}]\",\n\n connection_id\n\n ),\n\n )?;\n\n }\n\n Event::ConfirmedConnectionOnSoloMachine { connection_id } => {\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 49, "score": 42910.18731910523 }, { "content": " print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n format!(\n\n \"Created tendermint client on solo machine [Client ID = {}]\",\n\n client_id\n\n ),\n\n )?;\n\n }\n\n Event::InitializedConnectionOnTendermint { connection_id } => {\n\n print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n format!(\n\n \"Initialized connection on IBC enabled chain [Connection ID = {}]\",\n\n connection_id\n\n ),\n\n )?;\n\n }\n\n Event::InitializedConnectionOnSoloMachine { connection_id } => {\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 50, "score": 42909.869491246085 }, { "content": " print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n format!(\n\n \"Confirmed connection on solo machine [Connection ID = {}]\",\n\n connection_id\n\n ),\n\n )?;\n\n }\n\n Event::InitializedChannelOnTendermint { channel_id } => {\n\n print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n format!(\n\n \"Initialized channel on IBC enabled chain [Channel ID = {}]\",\n\n channel_id\n\n ),\n\n )?;\n\n }\n\n Event::InitializedChannelOnSoloMachine { channel_id } => {\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 51, "score": 42909.031023133095 }, { "content": " add_row(&mut table, \"Chain ID\", chain_id);\n\n add_row(\n\n &mut table,\n\n \"Request ID\",\n\n request_id.as_deref().unwrap_or(\"-\"),\n\n );\n\n add_row(&mut table, \"From\", from_address);\n\n add_row(&mut table, \"Amount\", amount);\n\n add_row(&mut table, \"Denom\", denom);\n\n add_row(&mut table, \"Transaction Hash\", transaction_hash);\n\n\n\n print_stdout(table.table().color_choice(self.color_choice))\n\n .context(\"unable to print table to stdout\")?;\n\n }\n\n Event::SignerUpdated { chain_id, .. } => {\n\n print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n \"Signer updated!\",\n\n )?;\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 52, "score": 42907.33016369477 }, { "content": " print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n format!(\n\n \"Confirmed channel on solo machine [Channel ID = {}]\",\n\n channel_id\n\n ),\n\n )?;\n\n }\n\n Event::ConnectionEstablished {\n\n chain_id,\n\n connection_details,\n\n } => {\n\n print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n \"Connection established!\",\n\n )?;\n\n writeln!(stdout)?;\n\n\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 53, "score": 42906.36114887023 }, { "content": " \"Tendermint connection ID\",\n\n connection_details.tendermint_connection_id,\n\n );\n\n add_row(\n\n &mut table,\n\n \"Solo machine channel ID\",\n\n connection_details.solo_machine_channel_id.as_ref().unwrap(),\n\n );\n\n add_row(\n\n &mut table,\n\n \"Tendermint channel ID\",\n\n connection_details.tendermint_channel_id.as_ref().unwrap(),\n\n );\n\n\n\n print_stdout(table.table().color_choice(self.color_choice))\n\n .context(\"unable to print table to stdout\")?;\n\n }\n\n Event::Warning { message } => {\n\n print_stream(\n\n &mut stdout,\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 54, "score": 42906.19248520597 }, { "content": " print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n format!(\n\n \"Initialized channel on solo machine [Channel ID = {}]\",\n\n channel_id\n\n ),\n\n )?;\n\n }\n\n Event::ConfirmedChannelOnTendermint { channel_id } => {\n\n print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n format!(\n\n \"Confirmed channel on IBC enabled chain [Channel ID = {}]\",\n\n channel_id\n\n ),\n\n )?;\n\n }\n\n Event::ConfirmedChannelOnSoloMachine { channel_id } => {\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 55, "score": 42905.27062716657 }, { "content": " writeln!(stdout)?;\n\n\n\n let mut table = Vec::new();\n\n\n\n add_row(&mut table, \"Chain ID\", chain_id);\n\n\n\n print_stdout(table.table().color_choice(self.color_choice))\n\n .context(\"unable to print table to stdout\")?;\n\n }\n\n Event::CreatedSoloMachineClient { client_id } => {\n\n print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n format!(\n\n \"Created solo machine client on IBC enabled chain [Client ID = {}]\",\n\n client_id\n\n ),\n\n )?;\n\n }\n\n Event::CreatedTendermintClient { client_id } => {\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 56, "score": 42904.87626215706 }, { "content": " print_stdout(table.table().color_choice(self.color_choice))\n\n .context(\"unable to print table to stdout\")?;\n\n }\n\n Event::TokensBurnt {\n\n chain_id,\n\n request_id,\n\n from_address,\n\n amount,\n\n denom,\n\n transaction_hash,\n\n } => {\n\n print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n \"Tokens burnt!\",\n\n )?;\n\n writeln!(stdout)?;\n\n\n\n let mut table = Vec::new();\n\n\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 57, "score": 42904.74317727277 }, { "content": " ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)),\n\n format!(\"WARNING: {}\", message),\n\n )?;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn handle_json_output(&self, event: Event) -> Result<()> {\n\n match event {\n\n Event::Warning { message } => print_json(\n\n self.color_choice,\n\n json!({\n\n \"result\": \"warning\",\n\n \"data\": message,\n\n }),\n\n ),\n\n _ => print_json(\n\n self.color_choice,\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 58, "score": 42904.22918673767 }, { "content": " let mut table = Vec::new();\n\n\n\n add_row(&mut table, \"Chain ID\", chain_id);\n\n add_row(\n\n &mut table,\n\n \"Solo machine client ID\",\n\n connection_details.solo_machine_client_id,\n\n );\n\n add_row(\n\n &mut table,\n\n \"Tendermint client ID\",\n\n connection_details.tendermint_client_id,\n\n );\n\n add_row(\n\n &mut table,\n\n \"Solo machine connection ID\",\n\n connection_details.solo_machine_connection_id,\n\n );\n\n add_row(\n\n &mut table,\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 59, "score": 42904.04002639997 }, { "content": " &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n \"Chain channel init closed!\",\n\n )?;\n\n writeln!(stdout)?;\n\n let mut table = Vec::new();\n\n\n\n add_row(&mut table, \"Chain ID\", chain_id);\n\n add_row(&mut table, \"Solo Machine Channel Id\", channel_id);\n\n print_stdout(table.table().color_choice(self.color_choice))\n\n .context(\"unable to print table to stdout\")?;\n\n }\n\n Event::TokensMinted {\n\n chain_id,\n\n request_id,\n\n to_address,\n\n amount,\n\n denom,\n\n transaction_hash,\n\n } => {\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 60, "score": 42903.972888625496 }, { "content": " print_stream(\n\n &mut stdout,\n\n ColorSpec::new().set_bold(true),\n\n \"Tokens minted!\",\n\n )?;\n\n writeln!(stdout)?;\n\n\n\n let mut table = Vec::new();\n\n\n\n add_row(&mut table, \"Chain ID\", chain_id);\n\n add_row(\n\n &mut table,\n\n \"Request ID\",\n\n request_id.as_deref().unwrap_or(\"-\"),\n\n );\n\n add_row(&mut table, \"To\", to_address);\n\n add_row(&mut table, \"Amount\", amount);\n\n add_row(&mut table, \"Denom\", denom);\n\n add_row(&mut table, \"Transaction Hash\", transaction_hash);\n\n\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 61, "score": 42902.81504277593 }, { "content": " writeln!(stdout)?;\n\n\n\n let table = vec![vec![\n\n \"Chain ID\".cell().bold(true),\n\n format!(\"{}\", chain_id)\n\n .cell()\n\n .bold(true)\n\n .foreground_color(Some(Color::Green))\n\n .justify(Justify::Right),\n\n ]]\n\n .table()\n\n .color_choice(self.color_choice);\n\n\n\n print_stdout(table).context(\"unable to print table to stdout\")?;\n\n }\n\n Event::CloseChannelInitOnSoloMachine {\n\n chain_id,\n\n channel_id,\n\n } => {\n\n print_stream(\n", "file_path": "solo-machine/src/event/cli_event_handler.rs", "rank": 62, "score": 42902.15082911446 }, { "content": "fn to_u64_timestamp(timestamp: DateTime<Utc>) -> Result<u64> {\n\n timestamp\n\n .timestamp()\n\n .try_into()\n\n .context(\"unable to convert unix timestamp to u64\")\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct TokenTransferPacketData {\n\n pub denom: String,\n\n // Ideally `amount` should be `u64` but `ibc-go` uses `protojson` which encodes `uint64` into `string`. So, using\n\n // `String` here to keep consistent wire format.\n\n pub amount: String,\n\n pub sender: String,\n\n pub receiver: String,\n\n}\n", "file_path": "solo-machine-core/src/transaction_builder.rs", "rank": 63, "score": 42728.94410824933 }, { "content": "fn get_block_height(chain: &Chain, header: &Header) -> Height {\n\n let revision_number = chain.id.version();\n\n let revision_height = header.height.value();\n\n\n\n Height {\n\n revision_number,\n\n revision_height,\n\n }\n\n}\n\n\n\nasync fn get_packet_acknowledgement_proof(\n\n signer: impl Signer,\n\n chain: &Chain,\n\n acknowledgement: Vec<u8>,\n\n packet_sequence: u64,\n\n request_id: Option<&str>,\n\n) -> Result<Vec<u8>> {\n\n let connection_details = chain.connection_details.as_ref().ok_or_else(|| {\n\n anyhow!(\n\n \"connection details for chain with id {} not found\",\n", "file_path": "solo-machine-core/src/transaction_builder.rs", "rank": 64, "score": 42728.94410824933 }, { "content": "fn get_latest_header(instance: &mut Instance) -> Result<Header> {\n\n let light_block = instance\n\n .light_client\n\n .verify_to_highest(&mut instance.state)?;\n\n\n\n Ok(light_block.signed_header.header)\n\n}\n\n\n", "file_path": "solo-machine-core/src/transaction_builder.rs", "rank": 65, "score": 42728.94410824933 }, { "content": "fn get_files(path: DirEntry) -> Result<Vec<PathBuf>, Box<dyn Error>> {\n\n if path.file_type()?.is_file() {\n\n return Ok(vec![path.path()]);\n\n }\n\n\n\n let paths = read_dir(path.path())?;\n\n let mut files = Vec::new();\n\n\n\n for path in paths {\n\n files.extend(get_files(path?)?);\n\n }\n\n\n\n Ok(files)\n\n}\n", "file_path": "solo-machine/build.rs", "rank": 66, "score": 39653.247708356255 }, { "content": "fn get_files(path: DirEntry) -> Result<Vec<PathBuf>, Box<dyn Error>> {\n\n if path.file_type()?.is_file() {\n\n return Ok(vec![path.path()]);\n\n }\n\n\n\n let paths = read_dir(path.path())?;\n\n let mut files = Vec::new();\n\n\n\n for path in paths {\n\n files.extend(get_files(path?)?);\n\n }\n\n\n\n Ok(files)\n\n}\n", "file_path": "solo-machine-core/build.rs", "rank": 67, "score": 38997.7135414293 }, { "content": "fn deserialize_verifying_key<'de, D>(deserializer: D) -> Result<VerifyingKey, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let bytes: Vec<u8> = hex::deserialize(deserializer)?;\n\n VerifyingKey::from_sec1_bytes(&bytes).map_err(serde::de::Error::custom)\n\n}\n", "file_path": "solo-machine-core/src/cosmos/crypto.rs", "rank": 68, "score": 37778.60113977281 }, { "content": "use std::{convert::TryFrom, ffi::OsStr, path::PathBuf, sync::Arc};\n\n\n\nuse anyhow::{anyhow, Context, Error, Result};\n\nuse libloading::{Library, Symbol};\n\nuse solo_machine_core::{signer::SignerRegistrar as ISignerRegistrar, Signer};\n\n\n\n#[derive(Default)]\n\npub struct SignerRegistrar {\n\n signer: Option<Arc<dyn Signer>>,\n\n}\n\n\n\nimpl SignerRegistrar {\n\n pub fn unwrap(self) -> Result<Arc<dyn Signer>> {\n\n self.signer.ok_or_else(|| anyhow!(\"signer not registered\"))\n\n }\n\n\n\n // TODO: remove conditional compilation when this issue is fixed:\n\n // https://github.com/nagisa/rust_libloading/issues/41\n\n fn register_signer(&mut self, file: impl AsRef<OsStr>) -> Result<()> {\n\n unsafe {\n", "file_path": "solo-machine/src/signer.rs", "rank": 69, "score": 37131.621517750355 }, { "content": "\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl ISignerRegistrar for SignerRegistrar {\n\n fn register(&mut self, signer: Arc<dyn Signer>) {\n\n self.signer = Some(signer);\n\n }\n\n}\n\n\n\nimpl TryFrom<PathBuf> for SignerRegistrar {\n\n type Error = Error;\n\n\n\n fn try_from(file: PathBuf) -> Result<Self, Self::Error> {\n\n let mut registrar = Self::default();\n\n registrar.register_signer(file)?;\n\n\n\n Ok(registrar)\n\n }\n\n}\n", "file_path": "solo-machine/src/signer.rs", "rank": 70, "score": 37125.63404481794 }, { "content": " #[cfg(target_os = \"linux\")]\n\n let library: Library = {\n\n // Load library with `RTLD_NOW | RTLD_NODELETE` to fix a SIGSEGV\n\n libloading::os::unix::Library::open(\n\n Some(file),\n\n libloading::os::unix::RTLD_NOW | 0x1000,\n\n )\n\n .context(\"unable to load signer\")?\n\n .into()\n\n };\n\n #[cfg(not(target_os = \"linux\"))]\n\n let library = Library::new(file).context(\"unable to load signer\")?;\n\n\n\n let register_fn: Symbol<unsafe extern \"C\" fn(&mut dyn ISignerRegistrar) -> Result<()>> =\n\n library\n\n .get(\"register_signer\".as_bytes())\n\n .context(\"unable to load `register_signer` function from signer\")?;\n\n\n\n register_fn(self).context(\"unable to register signer\")?;\n\n }\n", "file_path": "solo-machine/src/signer.rs", "rank": 71, "score": 37120.56468767591 }, { "content": "CREATE TABLE IF NOT EXISTS ibc_data (\n\n path TEXT PRIMARY KEY NOT NULL,\n\n data BYTEA NOT NULL\n\n);\n", "file_path": "solo-machine-core/postgres-migrations/20210629091441_ibc_data.up.sql", "rank": 72, "score": 36806.16615564271 }, { "content": "DROP TABLE IF EXISTS ibc_data;\n", "file_path": "solo-machine-core/sqlite-migrations/20210629091441_ibc_data.down.sql", "rank": 73, "score": 36806.16615564271 }, { "content": "DROP TABLE IF EXISTS ibc_data;\n", "file_path": "solo-machine-core/postgres-migrations/20210629091441_ibc_data.down.sql", "rank": 74, "score": 36806.16615564271 }, { "content": "CREATE TABLE IF NOT EXISTS ibc_data (\n\n path TEXT PRIMARY KEY NOT NULL,\n\n data BLOB NOT NULL\n\n);\n", "file_path": "solo-machine-core/sqlite-migrations/20210629091441_ibc_data.up.sql", "rank": 75, "score": 36806.16615564271 }, { "content": "pub mod cli_event_handler;\n\npub mod env_logger;\n\n\n\nuse std::{convert::TryFrom, ffi::OsStr, path::PathBuf};\n\n\n\nuse anyhow::{Context, Error, Result};\n\nuse async_trait::async_trait;\n\nuse libloading::{Library, Symbol};\n\nuse solo_machine_core::{\n\n event::{EventHandler, HandlerRegistrar as IHandlerRegistrar},\n\n Event,\n\n};\n\nuse tokio::{\n\n sync::mpsc::{unbounded_channel, UnboundedSender},\n\n task::JoinHandle,\n\n};\n\n\n\n#[derive(Default)]\n\npub struct HandlerRegistrar {\n\n event_handlers: Vec<Box<dyn EventHandler>>,\n", "file_path": "solo-machine/src/event.rs", "rank": 76, "score": 36764.785805624604 }, { "content": " register_fn(self).context(\"unable to register event handler\")?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl EventHandler for HandlerRegistrar {\n\n async fn handle(&self, event: Event) -> Result<()> {\n\n // TODO: parallelise this\n\n for handler in self.event_handlers.iter() {\n\n handler.handle(event.clone()).await?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl IHandlerRegistrar for HandlerRegistrar {\n", "file_path": "solo-machine/src/event.rs", "rank": 77, "score": 36760.33520802357 }, { "content": "}\n\n\n\nimpl HandlerRegistrar {\n\n pub fn spawn(self) -> (UnboundedSender<Event>, JoinHandle<Result<()>>) {\n\n let (sender, mut receiver) = unbounded_channel();\n\n\n\n let handle = tokio::spawn(async move {\n\n while let Some(event) = receiver.recv().await {\n\n self.handle(event).await?;\n\n }\n\n\n\n Ok(())\n\n });\n\n\n\n (sender, handle)\n\n }\n\n\n\n // TODO: remove conditional compilation when this issue is fixed:\n\n // https://github.com/nagisa/rust_libloading/issues/41\n\n fn register_handler(&mut self, file: impl AsRef<OsStr>) -> Result<()> {\n", "file_path": "solo-machine/src/event.rs", "rank": 78, "score": 36759.266991568315 }, { "content": " fn register(&mut self, handler: Box<dyn EventHandler>) {\n\n self.event_handlers.push(handler)\n\n }\n\n}\n\n\n\nimpl TryFrom<Vec<PathBuf>> for HandlerRegistrar {\n\n type Error = Error;\n\n\n\n fn try_from(files: Vec<PathBuf>) -> Result<Self, Self::Error> {\n\n let mut registrar = Self::default();\n\n\n\n for file in files.iter() {\n\n registrar.register_handler(file)?;\n\n }\n\n\n\n Ok(registrar)\n\n }\n\n}\n", "file_path": "solo-machine/src/event.rs", "rank": 79, "score": 36755.67304196917 }, { "content": " unsafe {\n\n #[cfg(target_os = \"linux\")]\n\n let library: Library = {\n\n // Load library with `RTLD_NOW | RTLD_NODELETE` to fix a SIGSEGV\n\n libloading::os::unix::Library::open(\n\n Some(file),\n\n libloading::os::unix::RTLD_NOW | 0x1000,\n\n )\n\n .context(\"unable to load event handler\")?\n\n .into()\n\n };\n\n #[cfg(not(target_os = \"linux\"))]\n\n let library = Library::new(file).context(\"unable to load event handler\")?;\n\n\n\n let register_fn: Symbol<\n\n unsafe extern \"C\" fn(&mut dyn IHandlerRegistrar) -> Result<()>,\n\n > = library\n\n .get(\"register_handler\".as_bytes())\n\n .context(\"unable to load `register_handler` function from event hook\")?;\n\n\n", "file_path": "solo-machine/src/event.rs", "rank": 80, "score": 36751.16143751159 }, { "content": "\n\n/// Type of message given to a signer\n\n#[derive(Debug)]\n\npub enum Message<'a> {\n\n /// [cosmos_sdk_proto::ibc::lightclients::solomachine::v1::SignBytes]\n\n SignBytes(&'a [u8]),\n\n /// [cosmos_sdk_proto::cosmos::tx::v1beta1::SignDoc]\n\n SignDoc(&'a [u8]),\n\n}\n\n\n\nimpl<'a> Message<'a> {\n\n /// Returns the message type of current message\n\n pub fn message_type(&self) -> &'static str {\n\n match self {\n\n Self::SignBytes(_) => \"sign-bytes\",\n\n Self::SignDoc(_) => \"sign-doc\",\n\n }\n\n }\n\n}\n\n\n", "file_path": "solo-machine-core/src/signer.rs", "rank": 81, "score": 35884.42914652474 }, { "content": "//! Utilities for signing transactions\n\nuse std::{fmt, str::FromStr, sync::Arc};\n\n\n\nuse anyhow::{anyhow, Error, Result};\n\nuse async_trait::async_trait;\n\n\n\nuse crate::cosmos::crypto::PublicKey;\n\n\n\n#[derive(Debug, Clone, Copy)]\n\n/// Supported algorithms for address generation\n\npub enum AddressAlgo {\n\n /// Secp256k1 (tendermint)\n\n Secp256k1,\n\n #[cfg(feature = \"ethermint\")]\n\n /// EthSecp256k1 (ethermint)\n\n EthSecp256k1,\n\n}\n\n\n\nimpl fmt::Display for AddressAlgo {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "solo-machine-core/src/signer.rs", "rank": 82, "score": 35883.286663681676 }, { "content": " match self {\n\n Self::Secp256k1 => write!(f, \"secp256k1\"),\n\n #[cfg(feature = \"ethermint\")]\n\n Self::EthSecp256k1 => write!(f, \"eth-secp256k1\"),\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for AddressAlgo {\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"secp256k1\" => Ok(Self::Secp256k1),\n\n #[cfg(feature = \"ethermint\")]\n\n \"eth-secp256k1\" => Ok(Self::EthSecp256k1),\n\n _ => Err(anyhow!(\"invalid address generation algorithm: {}\", s)),\n\n }\n\n }\n\n}\n", "file_path": "solo-machine-core/src/signer.rs", "rank": 83, "score": 35877.59474981711 }, { "content": " fn to_account_address(&self) -> Result<String> {\n\n (*self).to_account_address()\n\n }\n\n}\n\n\n\nimpl<T: ToPublicKey + ?Sized> ToPublicKey for Arc<T> {\n\n fn to_public_key(&self) -> Result<PublicKey> {\n\n (**self).to_public_key()\n\n }\n\n\n\n fn get_account_prefix(&self) -> &str {\n\n (**self).get_account_prefix()\n\n }\n\n\n\n fn to_account_address(&self) -> Result<String> {\n\n (**self).to_account_address()\n\n }\n\n}\n\n\n\n/// This trait must be implemented by all the transaction signers (e.g. mnemonic, ledger, etc.)\n", "file_path": "solo-machine-core/src/signer.rs", "rank": 84, "score": 35873.913037860126 }, { "content": "impl AsRef<[u8]> for Message<'_> {\n\n fn as_ref(&self) -> &[u8] {\n\n match self {\n\n Self::SignBytes(bytes) => bytes,\n\n Self::SignDoc(bytes) => bytes,\n\n }\n\n }\n\n}\n\n\n\n/// This trait must be implemented by all the public key providers (e.g. mnemonic, ledger, etc.)\n", "file_path": "solo-machine-core/src/signer.rs", "rank": 85, "score": 35873.58328015358 }, { "content": "//! Events generated by solo machine\n\nmod event_handler;\n\n\n\nuse anyhow::{anyhow, Result};\n\nuse serde::{Deserialize, Serialize};\n\nuse tokio::sync::mpsc::UnboundedSender;\n\n\n\nuse crate::{\n\n cosmos::crypto::PublicKey,\n\n ibc::core::ics24_host::identifier::{ChainId, ChannelId, ClientId, ConnectionId, Identifier},\n\n model::ConnectionDetails,\n\n};\n\n\n\npub use event_handler::*;\n\n\n\n/// Events emitted by IBC service\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\n#[serde(tag = \"type\")]\n\n#[allow(clippy::large_enum_variant)]\n\npub enum Event {\n", "file_path": "solo-machine-core/src/event.rs", "rank": 86, "score": 35530.651862909916 }, { "content": "}\n\n\n\npub(crate) fn notify_event(notifier: &Option<UnboundedSender<Event>>, event: Event) -> Result<()> {\n\n match notifier {\n\n None => Ok(()),\n\n Some(ref notifier) => notifier\n\n .send(event)\n\n .map_err(|err| anyhow!(\"unable to send event to notifier: {}\", err)),\n\n }\n\n}\n", "file_path": "solo-machine-core/src/event.rs", "rank": 87, "score": 35528.187982248775 }, { "content": "\n\n // ----- IBC connection handshake events ----- //\n\n /// Created solo machine client on IBC enabled chain\n\n CreatedSoloMachineClient {\n\n /// Client ID of solo machine client on IBC enabled chain\n\n client_id: ClientId,\n\n },\n\n /// Created tendermint client on solo machine\n\n CreatedTendermintClient {\n\n /// Client ID of IBC enabled chain on solo machine\n\n client_id: ClientId,\n\n },\n\n /// Initialized connection on IBC enabled chain\n\n InitializedConnectionOnTendermint {\n\n /// Connection ID of solo machine client on IBC enabled chain\n\n connection_id: ConnectionId,\n\n },\n\n /// Initialized connection on solo machine\n\n InitializedConnectionOnSoloMachine {\n\n /// Connection ID of IBC enabled chain on solo machine\n", "file_path": "solo-machine-core/src/event.rs", "rank": 88, "score": 35521.93557996633 }, { "content": " ConnectionEstablished {\n\n /// Chain ID of IBC enabled chain\n\n chain_id: ChainId,\n\n /// Connection details\n\n connection_details: ConnectionDetails,\n\n },\n\n\n\n // ----- Chain events ----- //\n\n /// Added new chain metadata to solo machine\n\n ChainAdded {\n\n /// Chain ID\n\n chain_id: ChainId,\n\n },\n\n\n\n // ----- Other events ----- //\n\n /// Warning\n\n Warning {\n\n /// Warning message\n\n message: String,\n\n },\n", "file_path": "solo-machine-core/src/event.rs", "rank": 89, "score": 35521.728410072974 }, { "content": " /// Optional request ID (for tracking purposes)\n\n request_id: Option<String>,\n\n /// Address of account on IBC enabled chain\n\n from_address: String,\n\n /// Amount of tokens minted\n\n amount: u64,\n\n /// Denom of tokens minted\n\n denom: Identifier,\n\n /// Hash of transaction on IBC enabled chain (in hex)\n\n transaction_hash: String,\n\n },\n\n /// Updated signer's public key on IBC enabled change for future messages from solo machine\n\n SignerUpdated {\n\n /// Chain ID of IBC enabled chain\n\n chain_id: ChainId,\n\n /// Old signer's public key\n\n old_public_key: PublicKey,\n\n /// New signer's public key\n\n new_public_key: PublicKey,\n\n },\n", "file_path": "solo-machine-core/src/event.rs", "rank": 90, "score": 35521.66399531697 }, { "content": " // ----- IBC events ----- //\n\n /// Minted tokens on IBC enabled chain\n\n TokensMinted {\n\n /// Chain ID of IBC enabled chain\n\n chain_id: ChainId,\n\n /// Optional request ID (for tracking purposes)\n\n request_id: Option<String>,\n\n /// Address of account on IBC enabled chain\n\n to_address: String,\n\n /// Amount of tokens minted\n\n amount: u64,\n\n /// Denom of tokens minted\n\n denom: Identifier,\n\n /// Hash of transaction on IBC enabled chain (in hex)\n\n transaction_hash: String,\n\n },\n\n /// Burnt tokens on IBC enabled chain\n\n TokensBurnt {\n\n /// Chain ID of IBC enabled chain\n\n chain_id: ChainId,\n", "file_path": "solo-machine-core/src/event.rs", "rank": 91, "score": 35520.603830644905 }, { "content": " connection_id: ConnectionId,\n\n },\n\n /// Confirmed connection on IBC enabled chain\n\n ConfirmedConnectionOnTendermint {\n\n /// Connection ID of solo machine client on IBC enabled chain\n\n connection_id: ConnectionId,\n\n },\n\n /// Confirmed connection on solo machine\n\n ConfirmedConnectionOnSoloMachine {\n\n /// Connection ID of IBC enabled chain on solo machine\n\n connection_id: ConnectionId,\n\n },\n\n /// Initialized channel on IBC enabled chain\n\n InitializedChannelOnTendermint {\n\n /// Channel ID of solo machine client on IBC enabled chain\n\n channel_id: ChannelId,\n\n },\n\n /// Close channel on IBC enabled chain\n\n CloseChannelInitOnSoloMachine {\n\n /// Chain ID of IBC enabled chain\n", "file_path": "solo-machine-core/src/event.rs", "rank": 92, "score": 35520.41201587463 }, { "content": " chain_id: String,\n\n /// Channel ID of IBC enabled chain on solo machine\n\n channel_id: ChannelId,\n\n },\n\n /// Initialized channel on solo machine\n\n InitializedChannelOnSoloMachine {\n\n /// Channel ID of IBC enabled chain on solo machine\n\n channel_id: ChannelId,\n\n },\n\n /// Confirmed channel on IBC enabled chain\n\n ConfirmedChannelOnTendermint {\n\n /// Channel ID of solo machine client on IBC enabled chain\n\n channel_id: ChannelId,\n\n },\n\n /// Confirmed channel on solo machine\n\n ConfirmedChannelOnSoloMachine {\n\n /// Channel ID of IBC enabled chain on solo machine\n\n channel_id: ChannelId,\n\n },\n\n /// Connection successfully established\n", "file_path": "solo-machine-core/src/event.rs", "rank": 93, "score": 35517.12866502258 }, { "content": "fn serialize_verifying_key<S>(key: &VerifyingKey, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n hex::serialize_upper(key.to_bytes(), serializer)\n\n}\n\n\n", "file_path": "solo-machine-core/src/cosmos/crypto.rs", "rank": 94, "score": 35451.850589556976 }, { "content": "use anyhow::Result;\n\nuse async_trait::async_trait;\n\nuse solo_machine_core::{event::EventHandler, Event};\n\n\n\npub struct EnvLogger {}\n\n\n\nimpl EnvLogger {\n\n pub fn new() -> Self {\n\n env_logger::init();\n\n\n\n Self {}\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl EventHandler for EnvLogger {\n\n async fn handle(&self, event: Event) -> Result<()> {\n\n match event {\n\n Event::TokensMinted {\n\n chain_id,\n", "file_path": "solo-machine/src/event/env_logger.rs", "rank": 95, "score": 34380.401097445436 }, { "content": "use anyhow::Result;\n\nuse async_trait::async_trait;\n\nuse solo_machine_core::{\n\n event::{EventHandler, HandlerRegistrar},\n\n Event,\n\n};\n\n\n", "file_path": "event-hooks/stdout-logger/src/lib.rs", "rank": 96, "score": 34370.896606279355 }, { "content": " channel_id,\n\n ),\n\n Event::ConfirmedConnectionOnTendermint { connection_id } => log::info!(\n\n \"Confirmed connection on IBC enabled chain [Connection ID = {}]\",\n\n connection_id\n\n ),\n\n Event::ConfirmedConnectionOnSoloMachine { connection_id } => log::info!(\n\n \"Confirmed connection on solo machine [Connection ID = {}]\",\n\n connection_id\n\n ),\n\n Event::InitializedChannelOnTendermint { channel_id } => log::info!(\n\n \"Initialized channel on IBC enabled chain [Channel ID = {}]\",\n\n channel_id\n\n ),\n\n Event::InitializedChannelOnSoloMachine { channel_id } => log::info!(\n\n \"Initialized channel on solo machine [Channel ID = {}]\",\n\n channel_id\n\n ),\n\n Event::ConfirmedChannelOnTendermint { channel_id } => log::info!(\n\n \"Confirmed channel on IBC enabled chain [Channel ID = {}]\",\n", "file_path": "solo-machine/src/event/env_logger.rs", "rank": 97, "score": 34369.0860270345 }, { "content": " log::info!(\n\n \"Created solo machine client on IBC enabled chain [Client ID = {}]\",\n\n client_id\n\n )\n\n }\n\n Event::CreatedTendermintClient { client_id } => log::info!(\n\n \"Created tendermint client on solo machine [Client ID = {}]\",\n\n client_id\n\n ),\n\n Event::InitializedConnectionOnTendermint { connection_id } => log::info!(\n\n \"Initialized connection on IBC enabled chain [Connection ID = {}]\",\n\n connection_id\n\n ),\n\n Event::InitializedConnectionOnSoloMachine { connection_id } => log::info!(\n\n \"Initialized connection on solo machine [Connection ID = {}]\",\n\n connection_id\n\n ),\n\n Event::CloseChannelInitOnSoloMachine { chain_id, channel_id } => log::info!(\n\n \"Close channel initialized on solo machine [Chain ID = {}] [Channel ID = {}]\",\n\n chain_id,\n", "file_path": "solo-machine/src/event/env_logger.rs", "rank": 98, "score": 34368.1607445074 }, { "content": " transaction_hash,\n\n } => log::info!(\n\n \"Burnt tokens [Chain ID = {}] [Request ID = {}] [Address = {}] [Amount = {} {}] [Transaction Hash = {}]\",\n\n chain_id,\n\n request_id.unwrap_or_else(|| \"None\".to_string()),\n\n from_address,\n\n amount,\n\n denom,\n\n transaction_hash,\n\n ),\n\n Event::SignerUpdated {\n\n chain_id,\n\n old_public_key: _,\n\n new_public_key: _,\n\n } => log::info!(\n\n \"Successfully updated signer's public key [Chain ID: {}]\",\n\n chain_id\n\n ),\n\n\n\n Event::CreatedSoloMachineClient { client_id } => {\n", "file_path": "solo-machine/src/event/env_logger.rs", "rank": 99, "score": 34366.879914669 } ]
Rust
src/kernel/src/syscall/sync.rs
ariadiamond/twizzler-Rust
5f5d01bac9127ca1d64bb8aa472a04f6634fc3a9
use core::time::Duration; use alloc::{collections::BTreeMap, vec::Vec}; use twizzler_abi::syscall::{ ThreadSync, ThreadSyncError, ThreadSyncReference, ThreadSyncSleep, ThreadSyncWake, }; use x86_64::VirtAddr; use crate::{ obj::{LookupFlags, ObjectRef}, once::Once, spinlock::Spinlock, thread::{current_memory_context, current_thread_ref, CriticalGuard, ThreadRef, ThreadState}, }; struct Requeue { list: Spinlock<BTreeMap<u64, ThreadRef>>, } /* TODO: make this thread-local */ static mut REQUEUE: Once<Requeue> = Once::new(); fn get_requeue_list() -> &'static Requeue { unsafe { REQUEUE.call_once(|| Requeue { list: Spinlock::new(BTreeMap::new()), }) } } pub fn requeue_all() { let requeue = get_requeue_list(); let mut list = requeue.list.lock(); for (_, thread) in list.drain_filter(|_, v| v.reset_sync_sleep_done()) { crate::sched::schedule_thread(thread); } } pub fn add_to_requeue(thread: ThreadRef) { let requeue = get_requeue_list(); requeue.list.lock().insert(thread.id(), thread); } fn finish_blocking(guard: CriticalGuard) { let thread = current_thread_ref().unwrap(); crate::interrupt::with_disabled(|| { thread.set_state(ThreadState::Blocked); drop(guard); crate::sched::schedule(false); thread.set_state(ThreadState::Running); }); } fn get_obj_and_offset(addr: VirtAddr) -> Result<(ObjectRef, usize), ThreadSyncError> { let vmc = current_memory_context().ok_or(ThreadSyncError::Unknown)?; let mapping = { vmc.inner().lookup_object(addr) }.ok_or(ThreadSyncError::InvalidReference)?; let offset = (addr.as_u64() as usize) % (1024 * 1024 * 1024); Ok((mapping.obj.clone(), offset)) } fn get_obj(reference: ThreadSyncReference) -> Result<(ObjectRef, usize), ThreadSyncError> { Ok(match reference { ThreadSyncReference::ObjectRef(id, offset) => { let obj = match crate::obj::lookup_object(id, LookupFlags::empty()) { crate::obj::LookupResult::Found(o) => o, _ => return Err(ThreadSyncError::InvalidReference), }; (obj, offset) } ThreadSyncReference::Virtual(addr) => get_obj_and_offset(VirtAddr::new(addr as u64))?, }) } struct SleepEvent { obj: ObjectRef, offset: usize, did_sleep: bool, } fn prep_sleep(sleep: &ThreadSyncSleep, first_sleep: bool) -> Result<SleepEvent, ThreadSyncError> { let (obj, offset) = get_obj(sleep.reference)?; /* logln!( "{} sleep {} {:x}", current_thread_ref().unwrap().id(), obj.id(), offset ); if let ThreadSyncReference::Virtual(p) = &sleep.reference { logln!(" => {:p} {}", *p, unsafe { (**p).load(core::sync::atomic::Ordering::SeqCst) }); } */ let did_sleep = obj.setup_sleep_word(offset, sleep.op, sleep.value, first_sleep); Ok(SleepEvent { obj, offset, did_sleep, }) } fn undo_sleep(sleep: SleepEvent) { sleep.obj.remove_from_sleep_word(sleep.offset); } fn wakeup(wake: &ThreadSyncWake) -> Result<usize, ThreadSyncError> { let (obj, offset) = get_obj(wake.reference)?; Ok(obj.wakeup_word(offset, wake.count)) } fn thread_sync_cb_timeout(thread: ThreadRef) { if thread.reset_sync_sleep() { add_to_requeue(thread); } requeue_all(); } pub fn sys_thread_sync( ops: &mut [ThreadSync], timeout: Option<&mut Duration>, ) -> Result<usize, ThreadSyncError> { let mut ready_count = 0; let mut unsleeps = Vec::new(); for op in ops { match op { ThreadSync::Sleep(sleep, result) => match prep_sleep(sleep, unsleeps.is_empty()) { Ok(se) => { *result = Ok(if se.did_sleep { 0 } else { 1 }); if se.did_sleep { unsleeps.push(se); } else { ready_count += 1; } } Err(x) => *result = Err(x), }, ThreadSync::Wake(wake, result) => { /* if let ThreadSyncReference::Virtual(p) = &wake.reference { logln!(" wake => {:p} {}", *p, unsafe { (**p).load(core::sync::atomic::Ordering::SeqCst) }); } */ match wakeup(wake) { Ok(count) => { *result = Ok(count); if count > 0 { ready_count += 1; } } Err(x) => { *result = Err(x); } } } } } let thread = current_thread_ref().unwrap(); { let guard = thread.enter_critical(); if !unsleeps.is_empty() { if let Some(timeout) = timeout { crate::clock::register_timeout_callback( timeout.as_nanos() as u64, thread_sync_cb_timeout, thread.clone(), ); } thread.set_sync_sleep_done(); } requeue_all(); if !unsleeps.is_empty() { finish_blocking(guard); } else { drop(guard); } } for op in unsleeps { undo_sleep(op); } Ok(ready_count) }
use core::time::Duration; use alloc::{collections::BTreeMap, vec::Vec}; use twizzler_abi::syscall::{ ThreadSync, ThreadSyncError, ThreadSyncReference, ThreadSyncSleep, ThreadSyncWake, }; use x86_64::VirtAddr; use crate::{ obj::{LookupFlags, ObjectRef}, once::Once, spinlock::Spinlock, thread::{current_memory_context, current_thread_ref, CriticalGuard, ThreadRef, ThreadState}, }; struct Requeue { list: Spinlock<BTreeMap<u64, ThreadRef>>, } /* TODO: make this thread-local */ static mut REQUEUE: Once<Requeue> = Once::new(); fn get_requeue_list() -> &'static Requeue { unsafe { REQUEUE.call_once(|| Requeue { list: Spinlock::new(BTreeMap::new()), }) } } pub fn requeue_all() { let requeue = get_requeue_list(); let mut list = requeue.list.lock(); for (_, thread) in list.drain_filter(|_, v| v.reset_sync_sleep_done()) { crate::sched::schedule_thread(thread); } } pub fn add_to_requeue(thread: ThreadRef) { let requeue = get_requeue_list(); requeue.list.lock().insert(thread.id(), thread); } fn finish_blocking(guard: CriticalGuard) { let thread = current_thread_ref().unwrap(); crate::interrupt::with_disabled(|| { thread.set_state(ThreadState::Blocked); drop(guard); crate::sched::schedule(false); thread.set_state(ThreadState::Running); }); } fn get_obj_and_offset(addr: VirtAddr) -> Result<(ObjectRef, usize), ThreadSyncError> { let vmc = current_memory_context().ok_or(ThreadSyncError::Unknown)?; let mapping = { vmc.inner().lookup_object(addr) }.ok_or(ThreadSyncError::InvalidReference)?; let offset = (addr.as_u64() as usize) % (1024 * 1024 * 1024); Ok((mapping.obj.clone(), offset)) } fn get_obj(reference: ThreadSyncReference) -> Result<(ObjectRef, usize), ThreadSyncError> { Ok(match reference { ThreadSyncReference::ObjectRef(id, offset) => { let obj = match crate::obj::lookup_object(id, LookupFlags::empty()) { crate::obj::LookupResult::Found(o) => o, _ => return Err(ThreadSyncError::InvalidReference), }; (obj, offset) } ThreadSyncReference::Virtual(addr) => get_obj_and_offset(VirtAddr::new(addr as u64))?, }) } struct SleepEvent { obj: ObjectRef, offset: usize, did_sleep: bool, } fn prep_sleep(sleep: &ThreadSyncSleep, first_sleep: bool) -> Result<SleepEvent, ThreadSyncError> { let (obj, offset) = get_obj(sleep.referenc
fn undo_sleep(sleep: SleepEvent) { sleep.obj.remove_from_sleep_word(sleep.offset); } fn wakeup(wake: &ThreadSyncWake) -> Result<usize, ThreadSyncError> { let (obj, offset) = get_obj(wake.reference)?; Ok(obj.wakeup_word(offset, wake.count)) } fn thread_sync_cb_timeout(thread: ThreadRef) { if thread.reset_sync_sleep() { add_to_requeue(thread); } requeue_all(); } pub fn sys_thread_sync( ops: &mut [ThreadSync], timeout: Option<&mut Duration>, ) -> Result<usize, ThreadSyncError> { let mut ready_count = 0; let mut unsleeps = Vec::new(); for op in ops { match op { ThreadSync::Sleep(sleep, result) => match prep_sleep(sleep, unsleeps.is_empty()) { Ok(se) => { *result = Ok(if se.did_sleep { 0 } else { 1 }); if se.did_sleep { unsleeps.push(se); } else { ready_count += 1; } } Err(x) => *result = Err(x), }, ThreadSync::Wake(wake, result) => { /* if let ThreadSyncReference::Virtual(p) = &wake.reference { logln!(" wake => {:p} {}", *p, unsafe { (**p).load(core::sync::atomic::Ordering::SeqCst) }); } */ match wakeup(wake) { Ok(count) => { *result = Ok(count); if count > 0 { ready_count += 1; } } Err(x) => { *result = Err(x); } } } } } let thread = current_thread_ref().unwrap(); { let guard = thread.enter_critical(); if !unsleeps.is_empty() { if let Some(timeout) = timeout { crate::clock::register_timeout_callback( timeout.as_nanos() as u64, thread_sync_cb_timeout, thread.clone(), ); } thread.set_sync_sleep_done(); } requeue_all(); if !unsleeps.is_empty() { finish_blocking(guard); } else { drop(guard); } } for op in unsleeps { undo_sleep(op); } Ok(ready_count) }
e)?; /* logln!( "{} sleep {} {:x}", current_thread_ref().unwrap().id(), obj.id(), offset ); if let ThreadSyncReference::Virtual(p) = &sleep.reference { logln!(" => {:p} {}", *p, unsafe { (**p).load(core::sync::atomic::Ordering::SeqCst) }); } */ let did_sleep = obj.setup_sleep_word(offset, sleep.op, sleep.value, first_sleep); Ok(SleepEvent { obj, offset, did_sleep, }) }
function_block-function_prefixed
[]
Rust
src/render.rs
JorgenPo/rust-text-rpg
5e8f489741628062503ff1814535384b1692929f
use termion::terminal_size; use termion::{color, cursor, clear, style}; use std::io::{Error, Write, Stdout, StdoutLock}; use termion::raw::{IntoRawMode, RawTerminal}; use std::cmp::max; #[doc(hidden)] pub fn _print(args: ::core::fmt::Arguments) { use core::fmt::Write; std::io::stdout().write_fmt(args).expect("Failed to write"); } #[macro_export] macro_rules! render { ($($arg:tt)*) => ( _print(format_args!($($arg)*)); std::io::stdout().flush().unwrap(); ); } pub struct TermSize { pub width: u16, pub height: u16 } impl Default for TermSize { fn default() -> Self { TermSize{ width: 80, height: 120, } } } pub enum Coordinate { Absolute(u16), Centered, Percent(u8), FromBorder(u16), } pub struct Position { pub x: Coordinate, pub y: Coordinate, } impl Position { pub fn from(x: u16, y: u16) -> Self { Position { x: Coordinate::Absolute(x), y: Coordinate::Absolute(y) } } } pub struct Render { pub term_size: TermSize, pub clear_color: Box<dyn color::Color>, pub hide_cursor: bool } pub trait Drawable { fn draw(&self) -> String; fn get_width(&self) -> u16; fn get_height(&self) -> u16; fn get_position(&self) -> &Position; fn set_position(&mut self, pos: Position); } impl Render { pub fn new() -> Self { let mut term_size = match terminal_size() { Ok((width, height)) => TermSize{width, height}, Err(err) => TermSize::default(), }; if term_size.width == 0 || term_size.height == 0 { term_size = TermSize::default(); } Render { term_size, clear_color: Box::new(color::Black), hide_cursor: true } } pub fn clear_screen(&mut self) { render!("{}{}{}", color::Bg(self.clear_color.as_ref()), clear::All, cursor::Goto(1, 1)); if self.hide_cursor { render!("{}", cursor::Hide); } } fn get_middle_x<T: Drawable> (&self, drawable: &T) -> u16 { let half_width = drawable.get_width() / 2; let center_x = self.term_size.width / 2; center_x - half_width + 1 } fn get_middle_y<T: Drawable> (&self, drawable: &T) -> u16 { let half_height = drawable.get_height() / 2; let center_y = self.term_size.height / 2; center_y - half_height + 1 } pub fn set_cursor_position(&mut self, coord: (u16, u16)) { render!("{}", cursor::Goto(coord.0, coord.1)); } pub fn set_pixel_color(&mut self, coord: (u16, u16), color: Box<dyn color::Color>) { print!("{}{} ", cursor::Goto(coord.0, coord.1), color::Bg(color.as_ref())); } pub fn draw<T: Drawable>(&mut self, drawable: &T) { let position = drawable.get_position(); let x = match position.x { Coordinate::Absolute(x) => max(x, 1), Coordinate::Centered => self.get_middle_x(drawable), Coordinate::Percent(percent) => max(self.term_size.width, self.term_size.width * percent as u16 / 100), Coordinate::FromBorder(x) => max(1, self.term_size.width - x) }; let y = match position.y { Coordinate::Absolute(y) => max(y, 1), Coordinate::Centered => self.get_middle_y(drawable), Coordinate::Percent(percent) => max(self.term_size.height, self.term_size.height * percent as u16 / 100), Coordinate::FromBorder(y) => max(1, self.term_size.height - y) }; render!("{}{}", cursor::Goto(x, y), drawable.draw()); } pub fn draw_raw(&mut self, string: &str) { render!("{}", string); } pub fn flash(&self) { std::io::stdout().flush().unwrap(); } } impl Drop for Render { fn drop(&mut self) { print!("{}{}{}{}", clear::All, style::Reset, cursor::Show, cursor::Goto(1, 1)); } }
use termion::terminal_size; use termion::{color, cursor, clear, style}; use std::io::{Error, Write, Stdout, StdoutLock}; use termion::raw::{IntoRawMode, RawTerminal}; use std::cmp::max; #[doc(hidden)] pub fn _print(args: ::core::fmt::Arguments) { use core::fmt::Write; std::io::stdout().write_fmt(args).expect("Failed to write"); } #[macro_export] macro_rules! render { ($($arg:tt)*) => ( _print(format_args!($($arg)*)); std::io::stdout().flush().unwrap(); ); } pub struct TermSize { pub width: u16, pub height: u16 } impl Default for TermSize { fn default() -> Self { TermSize{ width: 80, height: 120, } } } pub enum Coordinate { Absolute(u16), Centered, Percent(u8), FromBorder(u16), } pub struct Position { pub x: Coordinate, pub y: Coordinate, } impl Position { pub fn from(x: u16, y: u16) -> Self { Position { x: Coordinate::Absolute(x), y: Coordinate::Absolute(y) } } } pub struct Render { pub term_size: TermSize, pub clear_color: Box<dyn color::Color>, pub hide_cursor: bool } pub trait Drawable { fn draw(&self) -> String; fn get_width(&self) -> u16; fn get_height(&self) -> u16; fn get_position(&self) -> &Position; fn set_position(&mut self, pos: Position); } impl Render { pub fn new() -> Self { let mut term_size = match terminal_size() { Ok((width, height)) => TermSize{width, height}, Err(err) => TermSize::default(), }; if term_size.width == 0 || term_size.height == 0 { term_size = TermSize::default(); } Render { term_size, clear_color: Box::new(color::Black), hide_cursor: true } } pub fn clear_screen(&mut self) { render!("{}{}{}", color::Bg(self.clear_color.as_ref()), clear::All, cursor::Goto(1, 1)); if self.hide_cursor { render!("{}", cursor::Hide); } } fn get_middle_x<T: Drawable> (&self, drawable: &T) -> u16 { let half_width = drawable.get_width() / 2; let center_x = self.term_size.width / 2; center_x - half_width + 1 } fn get_middle_y<T: Drawable> (&self, drawable: &T) -> u16 { let half_height = drawable.get_height() / 2; let center_y = self.term_size.height / 2; center_y - half_height + 1 } pub fn set_cursor_position(&mut self, coord: (u16, u16)) { render!("{}", cursor::Goto(coord.0, coord.1)); } pub fn set_pixel_color(&mut self, coord: (u16, u16), color: Box<dyn color::Color>) { print!("{}{} ", cursor::Goto(coord.0, coord.1), color::Bg(color.as_ref())); }
pub fn draw_raw(&mut self, string: &str) { render!("{}", string); } pub fn flash(&self) { std::io::stdout().flush().unwrap(); } } impl Drop for Render { fn drop(&mut self) { print!("{}{}{}{}", clear::All, style::Reset, cursor::Show, cursor::Goto(1, 1)); } }
pub fn draw<T: Drawable>(&mut self, drawable: &T) { let position = drawable.get_position(); let x = match position.x { Coordinate::Absolute(x) => max(x, 1), Coordinate::Centered => self.get_middle_x(drawable), Coordinate::Percent(percent) => max(self.term_size.width, self.term_size.width * percent as u16 / 100), Coordinate::FromBorder(x) => max(1, self.term_size.width - x) }; let y = match position.y { Coordinate::Absolute(y) => max(y, 1), Coordinate::Centered => self.get_middle_y(drawable), Coordinate::Percent(percent) => max(self.term_size.height, self.term_size.height * percent as u16 / 100), Coordinate::FromBorder(y) => max(1, self.term_size.height - y) }; render!("{}{}", cursor::Goto(x, y), drawable.draw()); }
function_block-full_function
[ { "content": "pub trait Logger {\n\n fn info(&mut self, message: &str);\n\n fn warn(&mut self, message: &str);\n\n}", "file_path": "src/game/loggers.rs", "rank": 2, "score": 61792.36740350007 }, { "content": "fn lerp_color(start: color::Rgb, end: color::Rgb, k: f32) -> color::Rgb {\n\n\n\n let r = start.0 as f32 + ((end.0 as f32 - start.0 as f32) * k);\n\n let g = start.1 as f32 + ((end.1 as f32 - start.1 as f32) * k);\n\n let b = start.2 as f32 + ((end.2 as f32 - start.2 as f32) * k);\n\n\n\n color::Rgb(r as u8, g as u8, b as u8)\n\n}\n\n\n\nimpl PlayState {\n\n pub fn new() -> Self {\n\n PlayState{}\n\n }\n\n}\n\n\n\nconst SPLASH_DURATION: Duration = Duration::from_secs(1);\n\n\n\nimpl super::PlayState for PlayState {\n\n\n\n fn play(&mut self, game_state: &mut GlobalState) -> PlayResult {\n", "file_path": "src/game/start_splash.rs", "rank": 3, "score": 56318.52491343356 }, { "content": "fn main() {\n\n // Init logger\n\n let appender = FileAppender::builder()\n\n .encoder(Box::new(PatternEncoder::new(\"{l}: {m}\\n\")))\n\n .append(false)\n\n .build(\"game.log\")\n\n .unwrap();\n\n\n\n let config = Config::builder()\n\n .appender(Appender::builder().build(\"filelog\", Box::new(appender)))\n\n .build(Root::builder().appender(\"filelog\").build(LevelFilter::Trace))\n\n .unwrap();\n\n\n\n let _handle = log4rs::init_config(config).unwrap();\n\n\n\n let mut game_state = game::Game::new();\n\n if let Err(error) = game_state.run() {\n\n println!(\"Game crashed: {}\", error);\n\n process::exit(1);\n\n }\n\n\n\n println!(\"Thanks for playing! Goodbye!\");\n\n}\n", "file_path": "src/main.rs", "rank": 4, "score": 34170.4748443155 }, { "content": "enum PlayResult {\n\n /// Terminate the GAME\n\n Shutdown,\n\n /// Pop the last screen. If the last screen is the only screen, then it is equal to\n\n /// Shutdown.\n\n Pop,\n\n /// Push a new state to the stack\n\n Push(Box<dyn PlayState>),\n\n /// Replace the current state with another\n\n Switch(Box<dyn PlayState>),\n\n /// Do nothing\n\n Still,\n\n}\n\n\n", "file_path": "src/game/mod.rs", "rank": 5, "score": 33743.98099680497 }, { "content": "#[derive(Clone)]\n\nstruct Tile {\n\n bg_color: (u8, u8, u8),\n\n fg_color: (u8, u8, u8),\n\n character: char\n\n}\n\n\n\nimpl Tile {\n\n fn new(char: char) -> Tile {\n\n Tile {\n\n bg_color: (0, 200, 0),\n\n fg_color: (0, 0, 0),\n\n character: char\n\n }\n\n }\n\n\n\n fn bg_color(&self) -> color::Bg<color::Rgb> {\n\n color::Bg(color::Rgb(self.bg_color.0, self.bg_color.1, self.bg_color.2))\n\n }\n\n\n\n fn fg_color(&self) -> color::Fg<color::Rgb> {\n", "file_path": "src/game/map.rs", "rank": 6, "score": 33612.50921013545 }, { "content": "struct Map {\n\n tiles: Vec<Vec<Tile>>,\n\n position: Position,\n\n}\n\n\n", "file_path": "src/game/map.rs", "rank": 7, "score": 33612.50921013545 }, { "content": "/// Represents some GAME state (e.g. menu, battle and so on)\n\ntrait PlayState {\n\n fn play(&mut self, game_state: &mut GlobalState) -> PlayResult;\n\n fn to_string(&self) -> String;\n\n fn on_key_pressed(&mut self, game_state: &mut GlobalState, key: Key) -> PlayResult {\n\n PlayResult::Still\n\n }\n\n}\n\n\n\npub struct GlobalState {\n\n render: render::Render,\n\n input: input::Controller,\n\n sound: sound::Manager,\n\n}\n\n\n\nimpl GlobalState {\n\n pub fn new() -> GlobalState {\n\n\n\n GlobalState {\n\n render: render::Render::new(),\n\n input: input::Controller::new(),\n", "file_path": "src/game/mod.rs", "rank": 8, "score": 32507.372210291716 }, { "content": "#[derive(Debug)]\n\nstruct MapParseError {\n\n text: String\n\n}\n\n\n\nimpl MapParseError {\n\n fn new(message: &str) -> MapParseError {\n\n MapParseError {\n\n text: String::from(message)\n\n }\n\n }\n\n}\n\n\n\nimpl Display for MapParseError {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n f.write_str(format!(\"Failed to parse map file: {}\", self.text).as_str())\n\n }\n\n}\n\n\n\nimpl Error for MapParseError {\n\n\n", "file_path": "src/game/map.rs", "rank": 9, "score": 31512.630206135902 }, { "content": "fn panic_handler(info: &PanicInfo) {\n\n error!(\"Panic payload is {:?}\", info.payload().type_id());\n\n\n\n let backtrace = Backtrace::new();\n\n if let Some(s) = info.payload().downcast_ref::<String>() {\n\n if let Some(location) = info.location() {\n\n error!(\"Panic: {} in {}\", s, location);\n\n } else {\n\n error!(\"Panic: {}\", s);\n\n }\n\n } else {\n\n if let Some(location) = info.location() {\n\n error!(\"Some critical error occurred in {}:{}!\", location.file(), location.line());\n\n } else {\n\n error!(\"Some critical error occurred!\");\n\n }\n\n }\n\n\n\n error!(\"Backtrace: {:?}\", backtrace);\n\n}", "file_path": "src/game/mod.rs", "rank": 10, "score": 26413.963434515575 }, { "content": "# Game backlog\n\n\n\n### Implement GAME state with translation between states (like in Veloren project)\n\n - Character creation screen\n\n - Main Game screen\n\n - Stats screen\n\n - Inventory screen\n\n \n\n### ASCI graphics\n\n\n\nTry to create ASCI graphics library to display some pseudo graphics\n\n\n\n - Use libraries such as \"colored\"\n\n \n", "file_path": "IDEAS.md", "rank": 18, "score": 13112.27880693445 }, { "content": "# Rust text RPG\n\nLearning project in 100% columbian Rust. \n\n\n\n\n", "file_path": "README.md", "rank": 19, "score": 13109.175928068624 }, { "content": "use crate::render::{Drawable, Render, Position};\n\n\n\nuse termion::color::Color;\n\nuse termion::color;\n\nuse std::cmp::max;\n\n\n\npub struct Label {\n\n pub color: Box<dyn Color>,\n\n pub selected_color: Box<dyn Color>,\n\n pub text: String,\n\n pub position: Position,\n\n pub selected: bool\n\n}\n\n\n\nimpl Label {\n\n pub fn new(text: &str) -> Self {\n\n let default = Label::default();\n\n Label { text: String::from(text), .. default}\n\n }\n\n\n", "file_path": "src/widgets/label.rs", "rank": 20, "score": 20.00994369096677 }, { "content": " pub fn set_color(&mut self, color: Box<dyn Color>) -> &mut Self {\n\n self.color = color;\n\n self\n\n }\n\n\n\n pub fn set_selected_color(&mut self, color: Box<dyn Color>) -> &mut Self {\n\n self.selected_color = color;\n\n self\n\n }\n\n\n\n pub fn set_selected(&mut self, selected: bool) -> &mut Self {\n\n self.selected = selected;\n\n self\n\n }\n\n}\n\n\n\nimpl Drawable for Label {\n\n fn draw(&self) -> String {\n\n let color = match self.selected {\n\n true => self.selected_color.as_ref(),\n", "file_path": "src/widgets/label.rs", "rank": 21, "score": 18.34769614147335 }, { "content": " color::Fg(color::Rgb(self.fg_color.0, self.fg_color.1, self.fg_color.2))\n\n }\n\n}\n\n\n\nimpl Drawable for Tile {\n\n fn draw(&self) -> String {\n\n unimplemented!()\n\n }\n\n\n\n fn get_width(&self) -> u16 {\n\n 1\n\n }\n\n\n\n fn get_height(&self) -> u16 {\n\n 1\n\n }\n\n\n\n fn get_position(&self) -> &Position {\n\n unimplemented!()\n\n }\n\n\n\n fn set_position(&mut self, pos: Position) {\n\n unimplemented!()\n\n }\n\n}\n\n\n", "file_path": "src/game/map.rs", "rank": 22, "score": 17.44411899131782 }, { "content": "\n\n rust_label.set_position(Position {\n\n x: Coordinate::Centered,\n\n y: Coordinate::Absolute(game_state.render.term_size.height / 2),\n\n });\n\n\n\n rpg_label.set_position(Position {\n\n x: Coordinate::Centered,\n\n y: Coordinate::Absolute(game_state.render.term_size.height / 2 + 1),\n\n });\n\n\n\n author_label.set_position(Position {\n\n x: Coordinate::Centered,\n\n y: Coordinate::Absolute(game_state.render.term_size.height - 3)\n\n });\n\n\n\n for _i in 0..255 {\n\n game_state.render.clear_color =\n\n Box::new(termion::color::Rgb(brightness, brightness, brightness));\n\n game_state.render.clear_screen();\n", "file_path": "src/game/exit_splash.rs", "rank": 23, "score": 16.654645143719378 }, { "content": " game_state.render.set_cursor_position((1, 1));\n\n\n\n let finish_color = color::Rgb(10, 10, 10);\n\n let start_color = color::Rgb(50, 50, 50);\n\n\n\n let mut org_label = Label::new(\"Breeze software presents\");\n\n let mut game_label = Label::new(\"T H E | G A M E\");\n\n\n\n game_label.set_color(Box::new(color::Rgb(255, 255, 255)));\n\n org_label.set_color(Box::new(color::Rgb(255, 255, 255)));\n\n game_label.set_position(Position {\n\n x: Centered,\n\n y: Absolute(game_state.render.term_size.height / 2),\n\n });\n\n org_label.set_position(Position {\n\n x: Centered,\n\n y: Absolute(game_state.render.term_size.height / 2 - 2),\n\n });\n\n\n\n for x in 1..game_state.render.term_size.width {\n", "file_path": "src/game/start_splash.rs", "rank": 24, "score": 15.513130074553562 }, { "content": " }\n\n}\n\n\n\nimpl Default for Label {\n\n fn default() -> Self {\n\n Label {\n\n color: Box::new(color::Black),\n\n selected_color: Box::new(color::Green),\n\n text: \"\".to_string(),\n\n position: Position::from(1, 1),\n\n selected: false\n\n }\n\n }\n\n}", "file_path": "src/widgets/label.rs", "rank": 25, "score": 15.016703628453225 }, { "content": " bg_music_started: false\n\n };\n\n\n\n for (i, button) in this.buttons.iter_mut().enumerate() {\n\n let y = state.render.term_size.height as f32 * 0.2\n\n + (i * 5) as f32;\n\n\n\n button.set_position(Position{\n\n x: Coordinate::Centered,\n\n y: Coordinate::Absolute(y as u16)\n\n });\n\n }\n\n\n\n this.buttons[this.selected_index as usize].set_selected(true);\n\n\n\n this\n\n }\n\n\n\n fn on_button_pressed(&mut self, button: u8, game_state: &mut GlobalState) -> PlayResult {\n\n return match button {\n", "file_path": "src/game/menu.rs", "rank": 26, "score": 14.340828115002587 }, { "content": "//! Starting splash screen\n\n\n\nuse crate::game::{GlobalState, PlayResult};\n\nuse termion::color;\n\nuse std::time::Duration;\n\nuse crate::widgets::label::Label;\n\nuse crate::render::{Drawable, Position};\n\nuse crate::render::Coordinate::{Centered, Absolute};\n\n\n\npub struct PlayState {\n\n\n\n}\n\n\n", "file_path": "src/game/start_splash.rs", "rank": 27, "score": 14.185076474246216 }, { "content": "//! Exit splash screen state\n\n\n\nuse crate::game::{GlobalState, PlayResult};\n\nuse std::time::Duration;\n\nuse crate::widgets::label::Label;\n\nuse crate::render::{Drawable, Position, Coordinate};\n\nuse std::io::Write;\n\n\n\npub struct PlayState {\n\n\n\n}\n\n\n\nimpl super::PlayState for PlayState {\n\n fn play(&mut self, game_state: &mut GlobalState) -> PlayResult {\n\n let mut brightness: u8 = 255;\n\n let mut inverted: u8 = 0;\n\n\n\n let mut rust_label = Label::new(\"R U S T\");\n\n let mut rpg_label = Label::new(\"R P G\");\n\n let mut author_label = Label::new(\"Made by George Popoff using Rust:3\");\n", "file_path": "src/game/exit_splash.rs", "rank": 28, "score": 13.442395054997773 }, { "content": " false => self.color.as_ref()\n\n };\n\n\n\n format!(\"{}{}\", color::Fg(color), self.text)\n\n }\n\n\n\n fn get_width(&self) -> u16 {\n\n max(self.text.len() as u16, 1)\n\n }\n\n\n\n fn get_height(&self) -> u16 {\n\n 1\n\n }\n\n\n\n fn get_position(&self) -> &Position {\n\n &self.position\n\n }\n\n\n\n fn set_position(&mut self, pos: Position) {\n\n self.position = pos;\n", "file_path": "src/widgets/label.rs", "rank": 29, "score": 13.214732861588747 }, { "content": " }\n\n}\n\n\n\npub struct PlayState {\n\n current_map: Map,\n\n need_update: bool\n\n}\n\n\n\nimpl super::PlayState for PlayState {\n\n fn play(&mut self, game_state: &mut GlobalState) -> PlayResult {\n\n self.handle_input(&game_state.input);\n\n\n\n if self.need_update {\n\n game_state.render.clear_color = Box::new(color::Black);\n\n game_state.render.clear_screen();\n\n\n\n self.render_map(&mut game_state.render);\n\n self.need_update = false;\n\n }\n\n\n", "file_path": "src/game/map.rs", "rank": 30, "score": 12.885832222813441 }, { "content": "use crate::game::{GlobalState, PlayResult};\n\nuse crate::widgets::label::Label;\n\nuse crate::render::{Drawable, Position, Coordinate};\n\n\n\nuse termion::color;\n\nuse termion::event::Key;\n\nuse std::time::Duration;\n\n\n\nuse super::map;\n\nuse super::exit_splash;\n\n\n\n/// Menu GAME state implementation\n\n\n\npub struct PlayState {\n\n buttons: [Label; 3],\n\n selected_index: i8,\n\n bg_music_started: bool\n\n}\n\n\n\nconst BUTTON_NEW_GAME: u8 = 0;\n", "file_path": "src/game/menu.rs", "rank": 31, "score": 12.508802964257924 }, { "content": " }\n\n\n\n /// Starts the GAME\n\n pub fn run(&mut self) -> Result<(), String> {\n\n let mut stdout = std::io::stdout().into_raw_mode();\n\n\n\n if stdout.is_err() {\n\n return Err(format!(\"Failed to set terminal into raw mode\"));\n\n }\n\n\n\n let mut stdout = screen::AlternateScreen::from(stdout.unwrap());\n\n let mut global_state = GlobalState::new();\n\n\n\n self.states.push(Box::new(start_splash::PlayState::new()));\n\n\n\n std::panic::set_hook(Box::new(panic_handler));\n\n\n\n let mut playing = true;\n\n while playing {\n\n let mut current_state = match self.states.last_mut() {\n", "file_path": "src/game/mod.rs", "rank": 32, "score": 11.917559067059312 }, { "content": " panic!(\"Failed to parse start map: {}\", err);\n\n });\n\n\n\n PlayState {\n\n current_map: start_map,\n\n need_update: true\n\n }\n\n }\n\n\n\n fn handle_input(&mut self, input: &Controller) {\n\n\n\n }\n\n\n\n fn render_map(&mut self, render: &mut Render) {\n\n for (i, row) in self.current_map.tiles.iter().enumerate() {\n\n render.set_cursor_position((1, (i + 1) as u16));\n\n\n\n for tile in row {\n\n render.draw_raw(\n\n format!(\"{}{}\", tile.bg_color(), tile.character).as_str());\n\n }\n\n }\n\n }\n\n}", "file_path": "src/game/map.rs", "rank": 33, "score": 11.20345211005511 }, { "content": " fn render(&mut self, game_state: &mut GlobalState) {\n\n\n\n for button in &self.buttons {\n\n game_state.render.draw(button);\n\n }\n\n }\n\n}\n\n\n\nconst BG_MUSIC: &'static str = \"assets/sound/menu_bg.wav\";\n\n\n\nimpl super::PlayState for PlayState {\n\n\n\n fn play(&mut self, game_state: &mut GlobalState) -> PlayResult {\n\n if !self.bg_music_started {\n\n game_state.sound.play(BG_MUSIC).unwrap();\n\n self.bg_music_started = true;\n\n }\n\n\n\n game_state.render.clear_color = Box::new(color::Rgb(255, 255, 255));\n\n game_state.render.clear_screen();\n", "file_path": "src/game/menu.rs", "rank": 34, "score": 10.914672926060629 }, { "content": "}\n\n\n\nconst MAGIC: &'static str = \"MAP\";\n\nconst WIDTH: u16 = 80;\n\nconst HEIGHT: u16 = 40;\n\n\n\nconst COLOR_DARK_GREEN: (u8, u8, u8) = (0, 100, 0);\n\nconst COLOR_BLACK: (u8, u8, u8) = (0, 0, 0);\n\nconst COLOR_BROWN: (u8, u8, u8) = (150, 40, 40);\n\n\n\nlazy_static! {\n\n static ref DEFAULT_TILE_MAP: HashMap<char, Tile> = {\n\n let mut map = HashMap::new();\n\n\n\n map.insert('#', Tile {\n\n fg_color: COLOR_BLACK,\n\n bg_color: COLOR_DARK_GREEN,\n\n character: ' '\n\n });\n\n\n", "file_path": "src/game/map.rs", "rank": 35, "score": 10.845387998661053 }, { "content": " for y in 1..game_state.render.term_size.height {\n\n let k = y as f32 / game_state.render.term_size.width as f32;\n\n let color = lerp_color(start_color, finish_color, k);\n\n\n\n game_state.render.set_pixel_color((x, y), Box::new(color));\n\n }\n\n }\n\n\n\n game_state.render.draw(&game_label);\n\n game_state.render.draw(&org_label);\n\n\n\n game_state.render.flash();\n\n std::thread::sleep(SPLASH_DURATION);\n\n\n\n let to_white_diff = 255 - finish_color.0;\n\n let mut current_color = finish_color.0;\n\n for x in 1..to_white_diff {\n\n game_state.render.clear_color =\n\n Box::new(color::Rgb(current_color, current_color, current_color));\n\n current_color = current_color + 1;\n", "file_path": "src/game/start_splash.rs", "rank": 36, "score": 10.73150920527566 }, { "content": "use termion::input::{TermRead, Keys};\n\nuse termion::event::{Event, Key};\n\nuse std::collections::HashMap;\n\nuse termion::AsyncReader;\n\nuse std::rc::Rc;\n\n\n\npub struct Controller {\n\n reader: Keys<AsyncReader>\n\n}\n\n\n\nimpl Controller {\n\n pub fn new() -> Self {\n\n Controller{\n\n reader: termion::async_stdin().keys(),\n\n }\n\n }\n\n\n\n pub fn get_pressed_key(&mut self) -> Option<Key> {\n\n return match self.reader.next() {\n\n Some(key) => Some(key.unwrap()),\n\n None => None,\n\n };\n\n }\n\n}", "file_path": "src/game/input.rs", "rank": 37, "score": 10.684582561451755 }, { "content": "//! Map screen where you can move\n\n\n\nuse crate::game::{GlobalState, PlayResult};\n\nuse crate::render::{Render, Drawable, Position};\n\nuse crate::game::input::Controller;\n\n\n\nuse termion::color::Color;\n\nuse termion::color;\n\n\n\nuse std::error::Error;\n\nuse std::{fs, io};\n\nuse std::fmt::{Display, Formatter};\n\nuse std::fs::File;\n\nuse std::io::BufRead;\n\nuse log::{error};\n\nuse termion::event::Key;\n\nuse std::collections::HashMap;\n\nuse std::borrow::{Borrow, BorrowMut};\n\nuse lazy_static::lazy_static;\n\n\n\n#[derive(Clone)]\n", "file_path": "src/game/map.rs", "rank": 38, "score": 9.442245722426314 }, { "content": "//! Sound subsystem code\n\n\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::collections::HashMap;\n\nuse std::io::BufReader;\n\nuse rodio::{Source, Device, Decoder};\n\nuse rodio::source::Buffered;\n\nuse std::ops::Deref;\n\n\n\n\n\npub struct Manager {\n\n device: Option<rodio::Device>,\n\n sound_cache: HashMap<String, Buffered<Decoder<BufReader<File>>>>,\n\n}\n\n\n\nimpl Manager {\n\n pub fn new() -> Self {\n\n let device = rodio::default_output_device();\n\n Manager {\n", "file_path": "src/game/sound.rs", "rank": 39, "score": 9.082088220154683 }, { "content": " map.insert('X', Tile {\n\n fg_color: COLOR_BLACK,\n\n bg_color: COLOR_BROWN,\n\n character: ' '\n\n });\n\n\n\n map\n\n };\n\n}\n\n\n\nconst DEFAULT_TILE: Tile = Tile {\n\n bg_color: COLOR_BLACK,\n\n fg_color: COLOR_BLACK,\n\n character: ' '\n\n};\n\n\n\nimpl Map {\n\n fn from_file(file: &str) -> Result<Map, MapParseError> {\n\n let file = match File::open(file) {\n\n Ok(file) => file,\n", "file_path": "src/game/map.rs", "rank": 40, "score": 8.580003616237699 }, { "content": "\n\n rust_label.color = Box::new(termion::color::Rgb(inverted, inverted, inverted));\n\n rpg_label.color = Box::new(termion::color::Rgb(inverted, inverted, inverted));\n\n author_label.color = Box::new(termion::color::Rgb(inverted, inverted, inverted));\n\n\n\n game_state.render.draw(&rust_label);\n\n game_state.render.draw(&rpg_label);\n\n game_state.render.draw(&author_label);\n\n\n\n brightness = brightness - 1;\n\n inverted = inverted + 1;\n\n\n\n std::thread::sleep(Duration::from_millis(20));\n\n }\n\n\n\n PlayResult::Pop\n\n }\n\n\n\n fn to_string(&self) -> String {\n\n String::from(\"ExitSplash\")\n\n }\n\n}\n", "file_path": "src/game/exit_splash.rs", "rank": 41, "score": 8.36919068892239 }, { "content": " PlayResult::Still\n\n }\n\n\n\n fn to_string(&self) -> String {\n\n String::from(\"MapPlayState\")\n\n }\n\n\n\n fn on_key_pressed(&mut self, game_state: &mut GlobalState, key: Key) -> PlayResult {\n\n PlayResult::Still\n\n }\n\n}\n\n\n\nconst MAP_FOLDER: &'static str = \"assets/maps/\";\n\nconst MAP_START: &'static str = \"start.map\";\n\n\n\nimpl PlayState {\n\n pub fn new() -> Self {\n\n let start_map = Map::from_file(format!(\"{}{}\", MAP_FOLDER, MAP_START).as_str());\n\n\n\n let start_map = start_map.unwrap_or_else(|err| {\n", "file_path": "src/game/map.rs", "rank": 42, "score": 8.355013812253096 }, { "content": "\n\n game_state.render.clear_screen();\n\n\n\n std::thread::sleep(Duration::from_millis(5));\n\n }\n\n\n\n PlayResult::Switch(Box::new(super::menu::PlayState::new(game_state)))\n\n }\n\n\n\n fn to_string(&self) -> String {\n\n String::from(\"StartSplash\")\n\n }\n\n}", "file_path": "src/game/start_splash.rs", "rank": 43, "score": 8.280949852573697 }, { "content": "use crate::render::{Render, Coordinate};\n\nuse log::{Metadata, Record};\n\nuse crate::widgets::label::Label;\n\nuse std::cell::RefCell;\n\n\n", "file_path": "src/game/loggers.rs", "rank": 44, "score": 7.488340613702071 }, { "content": "const BUTTON_SETTINGS: u8 = 1;\n\nconst BUTTON_EXIT: u8 = 2;\n\n\n\nconst BUTTON_SOUND_EXIT: &'static str = \"assets/sound/button2.wav\";\n\nconst BUTTON_SOUND: &'static str = \"assets/sound/button.wav\";\n\n\n\nimpl PlayState {\n\n pub fn new(state: &GlobalState) -> Self {\n\n\n\n let new_game = Label::new(\"New GAME\");\n\n let settings = Label::new(\"Settings\");\n\n let exit = Label::new(\"Exit\");\n\n\n\n let mut this = PlayState {\n\n buttons: [\n\n new_game,\n\n settings,\n\n exit\n\n ],\n\n selected_index: 0,\n", "file_path": "src/game/menu.rs", "rank": 45, "score": 7.399812386110122 }, { "content": " sound: sound::Manager::new(),\n\n }\n\n }\n\n\n\n\n\n}\n\n\n\npub struct Game {\n\n states: Vec<Box<dyn PlayState>>,\n\n fps: u64,\n\n mpf: u64\n\n}\n\n\n\nimpl Game {\n\n pub fn new() -> Self {\n\n let states : Vec<Box<dyn PlayState>> = vec![];\n\n let fps = 10;\n\n let mpf = 1000 / fps;\n\n\n\n Game { states, fps, mpf }\n", "file_path": "src/game/mod.rs", "rank": 46, "score": 7.269188918457643 }, { "content": " Err(_) => return Err(MapParseError::new(\"failed to open map file\")),\n\n };\n\n\n\n let lines: Vec<String> = io::BufReader::new(file).lines()\n\n .map(|l| l.expect(\"Failed to parse line\"))\n\n .collect();\n\n\n\n // HEIGHT lines + 1 MAGIC LINE\n\n if lines.len() < (HEIGHT + 1) as usize {\n\n return Err(MapParseError::new(\"file corrupted (no magic line)\"));\n\n }\n\n\n\n if !lines[0].as_str().eq(MAGIC) {\n\n return Err(MapParseError::new(\"bad magic line\"));\n\n }\n\n\n\n let mut tiles = vec![];\n\n for line in lines.iter().skip(1) {\n\n if line.len() != WIDTH as usize {\n\n return Err(MapParseError::new(\n", "file_path": "src/game/map.rs", "rank": 47, "score": 7.088267283385287 }, { "content": " format!(\"each row should be {} characters ({} found)!\", WIDTH, line.len())\n\n .as_str()))\n\n }\n\n\n\n let mut line_tiles: Vec<Tile> = vec![];\n\n for char in line.chars() {\n\n if let Some(tile) = DEFAULT_TILE_MAP.get(&char) {\n\n line_tiles.push(tile.clone());\n\n } else {\n\n line_tiles.push(DEFAULT_TILE);\n\n }\n\n }\n\n\n\n tiles.push(line_tiles)\n\n }\n\n\n\n Ok(Map{\n\n tiles,\n\n position: Position::from(0, 0)\n\n })\n", "file_path": "src/game/map.rs", "rank": 48, "score": 6.9138919172395354 }, { "content": "use std::error::Error;\n\nuse std::fmt::Debug;\n\n\n\nuse crate::render;\n\n\n\nuse std::time::Duration;\n\nuse std::io::Write;\n\nuse termion::raw::IntoRawMode;\n\nuse termion::screen;\n\nuse log::{info, error};\n\n\n\nuse crate::widgets::label::Label;\n\nuse crate::render::Coordinate;\n\nuse std::panic::PanicInfo;\n\nuse std::any::TypeId;\n\nuse backtrace::Backtrace;\n\nuse termion::event::Key;\n\n\n\nmod menu;\n\nmod exit_splash;\n\nmod start_splash;\n\nmod input;\n\nmod sound;\n\nmod map;\n\nmod loggers;\n\n\n\n/// Here is a state system implemented\n\n/// Inspired by Veloren project\n\n\n", "file_path": "src/game/mod.rs", "rank": 49, "score": 6.6664671075516 }, { "content": "\n\n self.render(game_state);\n\n\n\n PlayResult::Still\n\n }\n\n\n\n fn to_string(&self) -> String {\n\n String::from(\"MenuState\")\n\n }\n\n\n\n fn on_key_pressed(&mut self, game_state: &mut GlobalState, key: Key) -> PlayResult {\n\n self.buttons[self.selected_index as usize].set_selected(false);\n\n\n\n match key {\n\n Key::Down => {\n\n self.selected_index = self.selected_index + 1;\n\n game_state.sound.play(BUTTON_SOUND).unwrap();\n\n }\n\n Key::Up => {\n\n self.selected_index = self.selected_index - 1;\n", "file_path": "src/game/menu.rs", "rank": 50, "score": 6.588411637300505 }, { "content": " device,\n\n sound_cache: HashMap::new()\n\n }\n\n }\n\n\n\n fn load(&mut self, name: &str) -> Result<(), Box<dyn Error>> {\n\n let file = File::open(name)?;\n\n let reader = BufReader::new(file);\n\n let source = rodio::Decoder::new(reader)?.buffered();\n\n\n\n self.sound_cache.insert(String::from(name), source);\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn play(&mut self, name: &str) -> Result<(), Box<dyn Error>> {\n\n // Just silently return OK\n\n if self.device.is_none() {\n\n return Ok(());\n\n }\n", "file_path": "src/game/sound.rs", "rank": 51, "score": 6.32973698534494 }, { "content": "pub mod game;\n\npub mod render;\n\npub mod widgets;", "file_path": "src/lib.rs", "rank": 52, "score": 5.179644335724355 }, { "content": " None => {\n\n playing = false;\n\n continue;\n\n },\n\n Some(state) => state\n\n };\n\n\n\n info!(\"Current state: {}\", current_state.to_string());\n\n\n\n let mut result = PlayResult::Still;\n\n if let Some(key) = global_state.input.get_pressed_key() {\n\n result = current_state.on_key_pressed(&mut global_state, key);\n\n\n\n match key {\n\n Key::Esc => {\n\n info!(\"Esc pressed. Exit game from state {}\", current_state.to_string());\n\n break;\n\n },\n\n _ => {}\n\n }\n", "file_path": "src/game/mod.rs", "rank": 53, "score": 5.090734686915411 }, { "content": "pub mod label;\n\n\n\nuse label::*;", "file_path": "src/widgets/mod.rs", "rank": 54, "score": 5.015953613251854 }, { "content": " info!(\"Switch to state: from {} to {}\", current_state.to_string(), state.to_string());\n\n self.states.pop().expect(\"Empty state in queue!\");\n\n self.states.push(state);\n\n continue;\n\n },\n\n PlayResult::Still => {}\n\n }\n\n\n\n stdout.flush().unwrap();\n\n std::thread::sleep(Duration::from_millis(self.mpf));\n\n }\n\n\n\n info!(\"Shutdown the GAME\");\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/game/mod.rs", "rank": 55, "score": 3.843057822536317 }, { "content": " }\n\n\n\n if let PlayResult::Still = result {\n\n result = current_state.play(&mut global_state);\n\n }\n\n\n\n match result {\n\n PlayResult::Shutdown => {\n\n info!(\"Shutdown state\");\n\n playing = false;\n\n },\n\n PlayResult::Pop => {\n\n info!(\"Pop state\");\n\n self.states.pop().expect(\"Empty state in queue!\");\n\n },\n\n PlayResult::Push(state) => {\n\n info!(\"Push state: {}\", state.to_string());\n\n self.states.push(state);\n\n },\n\n PlayResult::Switch(state) => {\n", "file_path": "src/game/mod.rs", "rank": 56, "score": 3.8096515434595455 }, { "content": " BUTTON_EXIT => {\n\n game_state.sound.play(BUTTON_SOUND_EXIT).unwrap();\n\n std::thread::sleep(Duration::from_millis(800));\n\n return PlayResult::Switch(Box::new(exit_splash::PlayState{}));\n\n },\n\n BUTTON_NEW_GAME => {\n\n return PlayResult::Push(Box::new(map::PlayState::new()))\n\n }\n\n _ => PlayResult::Still\n\n };\n\n }\n\n\n\n fn adjust_selected_index(&mut self) {\n\n self.selected_index = self.selected_index % self.buttons.len() as i8;\n\n\n\n if self.selected_index < 0 {\n\n self.selected_index = (self.buttons.len() as i8) + self.selected_index;\n\n }\n\n }\n\n\n", "file_path": "src/game/menu.rs", "rank": 57, "score": 3.3256183177601524 }, { "content": " game_state.sound.play(BUTTON_SOUND).unwrap();\n\n }\n\n Key::Esc => {\n\n return PlayResult::Push(Box::new(map::PlayState::new()));\n\n }\n\n Key::Char(char) => {\n\n // Enter on exit label\n\n if (char as u8) == 10 {\n\n return self.on_button_pressed(self.selected_index as u8, game_state);\n\n }\n\n }\n\n _ => {}\n\n }\n\n\n\n self.adjust_selected_index();\n\n\n\n self.buttons[self.selected_index as usize].set_selected(true);\n\n PlayResult::Still\n\n }\n\n}", "file_path": "src/game/menu.rs", "rank": 58, "score": 3.2683197409781957 }, { "content": "use rust_rpg::*;\n\nuse std::process;\n\nuse log4rs::append::file::FileAppender;\n\nuse log4rs::encode::pattern::PatternEncoder;\n\nuse log4rs::config::{Config, Appender, Root};\n\nuse log::LevelFilter;\n\n\n", "file_path": "src/main.rs", "rank": 59, "score": 3.139335470794287 }, { "content": "\n\n let sound = match self.sound_cache.get(name) {\n\n None => {\n\n self.load(name)?;\n\n self.sound_cache.get(name).unwrap()\n\n },\n\n Some(sound) => sound,\n\n };\n\n\n\n rodio::play_raw(&self.device.as_ref().unwrap(), sound.clone().convert_samples());\n\n\n\n Ok(())\n\n }\n\n}", "file_path": "src/game/sound.rs", "rank": 60, "score": 1.7743276860933794 } ]
Rust
scheduler/crates/api/src/calendar/update_calendar.rs
omid/nettu-scheduler
27a2728882842222085ee4e17c952ec215fd683e
use crate::shared::{ auth::{ account_can_modify_calendar, account_can_modify_user, protect_account_route, Permission, }, usecase::{execute, execute_with_policy, PermissionBoundary, UseCase}, }; use crate::{error::NettuError, shared::auth::protect_route}; use actix_web::{web, HttpResponse}; use nettu_scheduler_api_structs::update_calendar::{APIResponse, PathParams, RequestBody}; use nettu_scheduler_domain::{Calendar, Metadata, User, ID}; use nettu_scheduler_infra::NettuContext; pub async fn update_calendar_admin_controller( http_req: web::HttpRequest, ctx: web::Data<NettuContext>, path: web::Path<PathParams>, body: web::Json<RequestBody>, ) -> Result<HttpResponse, NettuError> { let account = protect_account_route(&http_req, &ctx).await?; let cal = account_can_modify_calendar(&account, &path.calendar_id, &ctx).await?; let user = account_can_modify_user(&account, &cal.user_id, &ctx).await?; let usecase = UpdateCalendarUseCase { user, calendar_id: cal.id, week_start: body.0.settings.week_start, timezone: body.0.settings.timezone, metadata: body.0.metadata, }; execute(usecase, &ctx) .await .map(|calendar| HttpResponse::Ok().json(APIResponse::new(calendar))) .map_err(NettuError::from) } pub async fn update_calendar_controller( http_req: web::HttpRequest, ctx: web::Data<NettuContext>, mut path: web::Path<PathParams>, body: web::Json<RequestBody>, ) -> Result<HttpResponse, NettuError> { let (user, policy) = protect_route(&http_req, &ctx).await?; let usecase = UpdateCalendarUseCase { user, calendar_id: std::mem::take(&mut path.calendar_id), week_start: body.0.settings.week_start, timezone: body.0.settings.timezone, metadata: body.0.metadata, }; execute_with_policy(usecase, &policy, &ctx) .await .map(|calendar| HttpResponse::Ok().json(APIResponse::new(calendar))) .map_err(NettuError::from) } #[derive(Debug)] struct UpdateCalendarUseCase { pub user: User, pub calendar_id: ID, pub week_start: Option<isize>, pub timezone: Option<String>, pub metadata: Option<Metadata>, } #[derive(Debug)] enum UseCaseError { CalendarNotFound, StorageError, InvalidSettings(String), } impl From<UseCaseError> for NettuError { fn from(e: UseCaseError) -> Self { match e { UseCaseError::StorageError => Self::InternalError, UseCaseError::CalendarNotFound => Self::NotFound("The calendar was not found.".into()), UseCaseError::InvalidSettings(err) => Self::BadClientData(format!( "Bad calendar settings provided. Error message: {}", err )), } } } #[async_trait::async_trait(?Send)] impl UseCase for UpdateCalendarUseCase { type Response = Calendar; type Error = UseCaseError; const NAME: &'static str = "UpdateCalendar"; async fn execute(&mut self, ctx: &NettuContext) -> Result<Self::Response, Self::Error> { let mut calendar = match ctx.repos.calendars.find(&self.calendar_id).await { Some(cal) if cal.user_id == self.user.id => cal, _ => return Err(UseCaseError::CalendarNotFound), }; if let Some(wkst) = self.week_start { if !calendar.settings.set_week_start(wkst) { return Err(UseCaseError::InvalidSettings(format!( "Invalid week start: {}, must be between 0 and 6", wkst ))); } } if let Some(timezone) = &self.timezone { if !calendar.settings.set_timezone(timezone) { return Err(UseCaseError::InvalidSettings(format!( "Invalid timezone: {}, must be a valid IANA Timezone string", timezone ))); } } if let Some(metadata) = &self.metadata { calendar.metadata = metadata.clone(); } ctx.repos .calendars .save(&calendar) .await .map(|_| calendar) .map_err(|_| UseCaseError::StorageError) } } impl PermissionBoundary for UpdateCalendarUseCase { fn permissions(&self) -> Vec<Permission> { vec![Permission::UpdateCalendar] } } #[cfg(test)] mod test { use nettu_scheduler_domain::{Account, Calendar, User}; use nettu_scheduler_infra::setup_context; use super::*; #[actix_web::main] #[test] async fn it_rejects_invalid_wkst() { let ctx = setup_context().await; let account = Account::default(); ctx.repos.accounts.insert(&account).await.unwrap(); let user = User::new(account.id.clone()); ctx.repos.users.insert(&user).await.unwrap(); let calendar = Calendar::new(&user.id, &account.id); ctx.repos.calendars.insert(&calendar).await.unwrap(); let mut usecase = UpdateCalendarUseCase { user, calendar_id: calendar.id.into(), week_start: Some(20), timezone: None, metadata: None, }; let res = usecase.execute(&ctx).await; assert!(res.is_err()); } #[actix_web::main] #[test] async fn it_update_settings_with_valid_wkst() { let ctx = setup_context().await; let account = Account::default(); ctx.repos.accounts.insert(&account).await.unwrap(); let user = User::new(account.id.clone()); ctx.repos.users.insert(&user).await.unwrap(); let calendar = Calendar::new(&user.id, &account.id); ctx.repos.calendars.insert(&calendar).await.unwrap(); assert_eq!(calendar.settings.week_start, 0); let new_wkst = 3; let mut usecase = UpdateCalendarUseCase { user, calendar_id: calendar.id.clone(), week_start: Some(new_wkst), timezone: None, metadata: Some(Metadata::new()), }; let res = usecase.execute(&ctx).await; assert!(res.is_ok()); let calendar = ctx.repos.calendars.find(&calendar.id).await.unwrap(); assert_eq!(calendar.settings.week_start, new_wkst); } }
use crate::shared::{ auth::{ account_can_modify_calendar, account_can_modify_user, protect_account_route, Permission, }, usecase::{execute, execute_with_policy, PermissionBoundary, UseCase}, }; use crate::{error::NettuError, shared::auth::protect_route}; use actix_web::{web, HttpResponse}; use nettu_scheduler_api_structs::update_calendar::{APIResponse, PathParams, RequestBody}; use nettu_scheduler_domain::{Calendar, Metadata, User, ID}; use nettu_scheduler_infra::NettuContext; pub async fn update_calendar_admin_controller( http_req: web::HttpRequest, ctx: web::Data<NettuContext>, path: web::Path<PathParams>, body: web::Json<RequestBody>, ) -> Result<HttpResponse, NettuError> { let account = protect_account_route(&http_req, &ctx).await?; let cal = account_can_modify_calendar(&account, &path.calendar_id, &ctx).await?; let user = account_can_modify_user(&account, &cal.user_id, &ctx).await?; let usecase = UpdateCalendarUseCase { user, calendar_id: cal.id, week_start: body.0.settings.week_start, timezone: body.0.settings.timezone, metadata: body.0.metadata, }; execute(usecase, &ctx) .await .map(|calendar| HttpResponse::Ok().json(APIResponse::new(calendar))) .map_err(NettuError::from) } pub async fn update_calendar_controller( http_req: web::HttpRequest, ctx: web::Data<NettuContext>, mut path: web::Path<PathParams>, body: web::Json<RequestBody>, ) -> Result<HttpResponse, NettuError> { let (user, policy) = protect_route(&http_req, &ctx).await?; let usecase = UpdateCalendarUseCase { user, calendar_id: std::mem::take(&mut path.calendar_id), week_start: body.0.settings.week_start, timezone: body.0.settings.timezone, metadata: body.0.metadata, }; execute_with_policy(usecase, &policy, &ctx) .await .map(|calendar| HttpResponse::Ok().json(APIResponse::new(calendar))) .map_err(NettuError::from) } #[derive(Debug)] struct UpdateCalendarUseCase { pub user: User, pub calendar_id: ID, pub week_start: Option<isize>, pub timezone: Option<String>, pub metadata: Option<Metadata>, } #[derive(Debug)] enum UseCaseError { CalendarNotFound, StorageError, InvalidSettings(String), } impl From<UseCaseError> for NettuError { fn from(e: UseCaseError) -> Self { match e { UseCaseError::StorageError => Self::InternalError, UseCaseError::CalendarNotFound => Self::NotFound("The calendar was not found.".into()), UseCaseError::InvalidSettings(err) => Self::BadClientData(format!( "Bad calendar settings provided. Error message: {}", err )), } } } #[async_trait::async_trait(?Send)] impl UseCase for UpdateCalendarUseCase { type Response = Calendar; type Error = UseCaseError; const NAME: &'static str = "UpdateCalendar"; async fn execute(&mut self, ctx: &NettuContext) -> Result<Self::Response, Self::Error> { let mut calendar = match ctx.repos.calendars.find(&self.calendar_id).await { Some(cal) if cal.user_id == self.user.id => cal, _ => return Err(UseCaseError::CalendarNotFound), }; if let Some(wkst) = self.week_start { if !calendar.settings.set_week_start(wkst) { return Err(UseCaseError::InvalidSettings(format!( "Invalid week start: {}, must be between 0 and 6", wkst ))); } } if let Some(timezone) = &self.timezone { if !calendar.settings.set_timezone(timezone) { return
; } } if let Some(metadata) = &self.metadata { calendar.metadata = metadata.clone(); } ctx.repos .calendars .save(&calendar) .await .map(|_| calendar) .map_err(|_| UseCaseError::StorageError) } } impl PermissionBoundary for UpdateCalendarUseCase { fn permissions(&self) -> Vec<Permission> { vec![Permission::UpdateCalendar] } } #[cfg(test)] mod test { use nettu_scheduler_domain::{Account, Calendar, User}; use nettu_scheduler_infra::setup_context; use super::*; #[actix_web::main] #[test] async fn it_rejects_invalid_wkst() { let ctx = setup_context().await; let account = Account::default(); ctx.repos.accounts.insert(&account).await.unwrap(); let user = User::new(account.id.clone()); ctx.repos.users.insert(&user).await.unwrap(); let calendar = Calendar::new(&user.id, &account.id); ctx.repos.calendars.insert(&calendar).await.unwrap(); let mut usecase = UpdateCalendarUseCase { user, calendar_id: calendar.id.into(), week_start: Some(20), timezone: None, metadata: None, }; let res = usecase.execute(&ctx).await; assert!(res.is_err()); } #[actix_web::main] #[test] async fn it_update_settings_with_valid_wkst() { let ctx = setup_context().await; let account = Account::default(); ctx.repos.accounts.insert(&account).await.unwrap(); let user = User::new(account.id.clone()); ctx.repos.users.insert(&user).await.unwrap(); let calendar = Calendar::new(&user.id, &account.id); ctx.repos.calendars.insert(&calendar).await.unwrap(); assert_eq!(calendar.settings.week_start, 0); let new_wkst = 3; let mut usecase = UpdateCalendarUseCase { user, calendar_id: calendar.id.clone(), week_start: Some(new_wkst), timezone: None, metadata: Some(Metadata::new()), }; let res = usecase.execute(&ctx).await; assert!(res.is_ok()); let calendar = ctx.repos.calendars.find(&calendar.id).await.unwrap(); assert_eq!(calendar.settings.week_start, new_wkst); } }
Err(UseCaseError::InvalidSettings(format!( "Invalid timezone: {}, must be a valid IANA Timezone string", timezone )))
call_expression
[]
Rust
tower-balance/src/p2c/service.rs
JeanMertz/tower
7e55b7fa0b2db4ff36fd90f3700bd628c89951b6
use crate::error; use futures::{future, Async, Future, Poll}; use rand::{rngs::SmallRng, SeedableRng}; use tower_discover::{Change, Discover}; use tower_load::Load; use tower_ready_cache::{error::Failed, ReadyCache}; use tower_service::Service; use tracing::{debug, trace}; #[derive(Debug)] pub struct Balance<D: Discover, Req> { discover: D, services: ReadyCache<D::Key, D::Service, Req>, ready_index: Option<usize>, rng: SmallRng, } impl<D, Req> Balance<D, Req> where D: Discover, D::Service: Service<Req>, <D::Service as Service<Req>>::Error: Into<error::Error>, { pub fn new(discover: D, rng: SmallRng) -> Self { Self { rng, discover, ready_index: None, services: ReadyCache::default(), } } pub fn from_entropy(discover: D) -> Self { Self::new(discover, SmallRng::from_entropy()) } pub fn len(&self) -> usize { self.services.len() } pub(crate) fn discover_mut(&mut self) -> &mut D { &mut self.discover } } impl<D, Req> Balance<D, Req> where D: Discover, D::Key: Clone, D::Error: Into<error::Error>, D::Service: Service<Req> + Load, <D::Service as Load>::Metric: std::fmt::Debug, <D::Service as Service<Req>>::Error: Into<error::Error>, { fn update_pending_from_discover(&mut self) -> Result<(), error::Discover> { debug!("updating from discover"); loop { match self .discover .poll() .map_err(|e| error::Discover(e.into()))? { Async::NotReady => return Ok(()), Async::Ready(Change::Remove(key)) => { trace!("remove"); self.services.evict(&key); } Async::Ready(Change::Insert(key, svc)) => { trace!("insert"); self.services.push(key, svc); } } } } fn promote_pending_to_ready(&mut self) { loop { match self.services.poll_pending() { Ok(Async::Ready(())) => { debug_assert_eq!(self.services.pending_len(), 0); break; } Ok(Async::NotReady) => { debug_assert!(self.services.pending_len() > 0); break; } Err(error) => { debug!(%error, "dropping failed endpoint"); } } } trace!( ready = %self.services.ready_len(), pending = %self.services.pending_len(), "poll_unready" ); } fn p2c_ready_index(&mut self) -> Option<usize> { match self.services.ready_len() { 0 => None, 1 => Some(0), len => { let idxs = rand::seq::index::sample(&mut self.rng, len, 2); let aidx = idxs.index(0); let bidx = idxs.index(1); debug_assert_ne!(aidx, bidx, "random indices must be distinct"); let aload = self.ready_index_load(aidx); let bload = self.ready_index_load(bidx); let chosen = if aload <= bload { aidx } else { bidx }; trace!( a.index = aidx, a.load = ?aload, b.index = bidx, b.load = ?bload, chosen = if chosen == aidx { "a" } else { "b" }, "p2c", ); Some(chosen) } } } fn ready_index_load(&self, index: usize) -> <D::Service as Load>::Metric { let (_, svc) = self.services.get_ready_index(index).expect("invalid index"); svc.load() } } impl<D, Req> Service<Req> for Balance<D, Req> where D: Discover, D::Key: Clone, D::Error: Into<error::Error>, D::Service: Service<Req> + Load, <D::Service as Load>::Metric: std::fmt::Debug, <D::Service as Service<Req>>::Error: Into<error::Error>, { type Response = <D::Service as Service<Req>>::Response; type Error = error::Error; type Future = future::MapErr< <D::Service as Service<Req>>::Future, fn(<D::Service as Service<Req>>::Error) -> error::Error, >; fn poll_ready(&mut self) -> Poll<(), Self::Error> { self.update_pending_from_discover()?; self.promote_pending_to_ready(); loop { if let Some(index) = self.ready_index.take() { match self.services.check_ready_index(index) { Ok(true) => { self.ready_index = Some(index); return Ok(Async::Ready(())); } Ok(false) => { trace!("ready service became unavailable"); } Err(Failed(_, error)) => { debug!(%error, "endpoint failed"); } } } self.ready_index = self.p2c_ready_index(); if self.ready_index.is_none() { debug_assert_eq!(self.services.ready_len(), 0); return Ok(Async::NotReady); } } } fn call(&mut self, request: Req) -> Self::Future { let index = self.ready_index.take().expect("called before ready"); self.services .call_ready_index(index, request) .map_err(Into::into) } }
use crate::error; use futures::{future, Async, Future, Poll}; use rand::{rngs::SmallRng, SeedableRng}; use tower_discover::{Change, Discover}; use tower_load::Load; use tower_ready_cache::{error::Failed, ReadyCache}; use tower_service::Service; use tracing::{debug, trace}; #[derive(Debug)] pub struct Balance<D: Discover, Req> { discover: D, services: ReadyCache<D::Key, D::Service, Req>, ready_index: Option<usize>, rng: SmallRng, } impl<D, Req> Balance<D, Req> where D: Discover, D::Service: Service<Req>, <D::Service as Service<Req>>::Error: Into<error::Error>, { pub fn new(discover: D, rng: SmallRng) -> Self { Self { rng, discover, ready_index: None, services: ReadyCache::default(), } } pub fn from_entropy(discover: D) -> Self { Self::new(discover, SmallRng::from_entropy()) } pub fn len(&self) -> usize { self.services.len() } pub(crate) fn discover_mut(&mut self) -> &mut D { &mut self.discover } } impl<D, Req> Balance<D, Req> where D: Discover, D::Key: Clone, D::Error: Into<error::Error>, D::Service: Service<Req> + Load, <D::Service as Load>::Metric: std::fmt::Debug, <D::Service as Service<Req>>::Error: Into<error::Error>, { fn update_pending_from_discover(&mut self) -> Result<(), error::Discover> { debug!("updating from discover"); loop { match self .discover .poll() .map_err(|e| error::Discover(e.into()))? { Async::NotReady => return Ok(()), Async::Ready(Change::Remove(key)) => { trace!("remove"); self.services.evict(&key); } Async::Ready(Change::Insert(key, svc)) => { trace!("insert"); self.services.push(key, svc); } } } } fn promote_pending_to_ready(&mut self) { loop { match self.services.poll_pending() { Ok(Async::Ready(())) => { debug_assert_eq!(self.services.pending_len(), 0); break; } Ok(Async::NotReady) => { debug_assert!(self.services.pending_len() > 0); break; } Err(error) => { debug!(%error, "dropping failed endpoint"); } } } trace!( ready = %self.services.ready_len(), pending = %self.services.pending_len(), "poll_unready" ); } fn p2c_ready_index(&mut self) -> Option<usize> { match self.services.ready_len() { 0 => None, 1 => Some(0), len => { let idxs = rand::seq::index::sample(&mut self.rng, len, 2); let aidx = idxs.index(0); let bidx = idxs.index(1); debug_assert_ne!(aidx, bidx, "random indices must be distinct"); let aload = self.ready_index_load(aidx); let bload = self.ready_index_load(bidx); let chosen = if aload <= bload { aidx } else { bidx }; trace!( a.index = aidx, a.load = ?aload, b.index = bidx, b.load = ?bload, chosen = if chosen == aidx { "a" } else { "b" }, "p2c", ); Some(chosen) } } } fn ready_index_load(&self, index: usize) -> <D::Service as Load>::Metric { let (_, svc) = self.services.get_ready_index(index).expect("invalid index"); svc.load() } } impl<D, Req> Service<Req> for Balance<D, Req> where D: Discover, D::Key: Clone, D::Error: Into<error::Error>, D::Service: Service<Req> + Load, <D::Service as Load>::Metric: std::fmt::Debug, <D::Service as Service<Req>>::Error: Into<error::Error>, { type Response = <D::Service as Service<Req>>::Response; type Error = error::Error; type Future = future::MapErr< <D::Service as Service<Req>>::Future, fn(<D::Service as Service<Req>>::Error) -> error::Error, >; fn poll_ready(&mut self) -> Poll<(), Self::Error> { self.update_pending_from_discover()?; self.promote_pending_to_ready(); loop { if let Some(index) = self.ready_index.take() { match self.services.check_ready_index(index) { Ok(true) => {
} } self.ready_index = self.p2c_ready_index(); if self.ready_index.is_none() { debug_assert_eq!(self.services.ready_len(), 0); return Ok(Async::NotReady); } } } fn call(&mut self, request: Req) -> Self::Future { let index = self.ready_index.take().expect("called before ready"); self.services .call_ready_index(index, request) .map_err(Into::into) } }
self.ready_index = Some(index); return Ok(Async::Ready(())); } Ok(false) => { trace!("ready service became unavailable"); } Err(Failed(_, error)) => { debug!(%error, "endpoint failed"); }
function_block-random_span
[ { "content": "fn new_service<P: Policy<Req, Res, Error> + Clone>(\n\n policy: P,\n\n) -> (tower_retry::Retry<P, Mock>, Handle) {\n\n let (service, handle) = mock::pair();\n\n let service = tower_retry::Retry::new(policy, service);\n\n (service, handle)\n\n}\n\n\n", "file_path": "tower-retry/tests/retry.rs", "rank": 0, "score": 276938.2486159946 }, { "content": "fn run<D>(name: &'static str, lb: lb::p2c::Balance<D, Req>) -> impl Future<Item = (), Error = ()>\n\nwhere\n\n D: Discover + Send + 'static,\n\n D::Error: Into<Error>,\n\n D::Key: Clone + Send,\n\n D::Service: Service<Req, Response = Rsp> + load::Load + Send,\n\n <D::Service as Service<Req>>::Error: Into<Error>,\n\n <D::Service as Service<Req>>::Future: Send,\n\n <D::Service as load::Load>::Metric: std::fmt::Debug,\n\n{\n\n println!(\"{}\", name);\n\n\n\n let requests = stream::repeat::<_, Error>(Req).take(REQUESTS as u64);\n\n let service = ConcurrencyLimit::new(lb, CONCURRENCY);\n\n let responses = service.call_all(requests).unordered();\n\n\n\n compute_histo(responses).map(|s| s.report()).map_err(|_| {})\n\n}\n\n\n", "file_path": "tower-balance/examples/demo.rs", "rank": 1, "score": 271564.79826530634 }, { "content": "#[test]\n\nfn two_endpoints_with_equal_load() {\n\n let (mock_a, mut handle_a) = mock::pair();\n\n let (mock_b, mut handle_b) = mock::pair();\n\n let mock_a = load::Constant::new(mock_a, 1);\n\n let mock_b = load::Constant::new(mock_b, 1);\n\n\n\n let disco = ServiceList::new(vec![mock_a, mock_b].into_iter());\n\n let mut svc = Balance::from_entropy(disco);\n\n\n\n with_task(|| {\n\n handle_a.allow(0);\n\n handle_b.allow(0);\n\n assert_not_ready!(svc);\n\n assert_eq!(svc.len(), 2, \"balancer must have discovered both endpoints\");\n\n\n\n handle_a.allow(1);\n\n handle_b.allow(0);\n\n assert_ready!(svc, \"must be ready when one of two services is ready\");\n\n {\n\n let fut = svc.call(());\n", "file_path": "tower-balance/src/p2c/test.rs", "rank": 2, "score": 222418.81813094197 }, { "content": "type Rx<T> = oneshot::Receiver<Result<T, Error>>;\n\n\n\nimpl<T> ResponseFuture<T> {\n\n pub(crate) fn new(rx: Rx<T>) -> ResponseFuture<T> {\n\n ResponseFuture { rx: Some(rx) }\n\n }\n\n\n\n pub(crate) fn closed() -> ResponseFuture<T> {\n\n ResponseFuture { rx: None }\n\n }\n\n}\n\n\n\nimpl<T> Future for ResponseFuture<T> {\n\n type Item = T;\n\n type Error = Error;\n\n\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n\n match self.rx {\n\n Some(ref mut rx) => match rx.poll() {\n\n Ok(Async::Ready(Ok(v))) => Ok(v.into()),\n\n Ok(Async::Ready(Err(e))) => Err(e),\n\n Ok(Async::NotReady) => Ok(Async::NotReady),\n\n Err(_) => Err(error::Closed::new().into()),\n\n },\n\n None => Err(error::Closed::new().into()),\n\n }\n\n }\n\n}\n", "file_path": "tower-test/src/mock/future.rs", "rank": 3, "score": 217968.11304593654 }, { "content": "#[derive(Debug)]\n\nstruct Pending<K, S, Req> {\n\n key: Option<K>,\n\n cancel: Option<CancelRx>,\n\n ready: tower_util::Ready<S, Req>,\n\n}\n\n\n\n// === ReadyCache ===\n\n\n\nimpl<K, S, Req> Default for ReadyCache<K, S, Req>\n\nwhere\n\n K: Eq + Hash,\n\n S: Service<Req>,\n\n{\n\n fn default() -> Self {\n\n Self {\n\n ready: IndexMap::default(),\n\n pending: stream::FuturesUnordered::new(),\n\n pending_cancel_txs: IndexMap::default(),\n\n }\n\n }\n", "file_path": "tower-ready-cache/src/cache.rs", "rank": 4, "score": 210404.61904319507 }, { "content": "fn assert_not_ready<F: Future>(f: &mut F)\n\nwhere\n\n F::Error: ::std::fmt::Debug,\n\n{\n\n future::poll_fn(|| {\n\n assert!(f.poll().unwrap().is_not_ready());\n\n Ok::<_, ()>(().into())\n\n })\n\n .wait()\n\n .unwrap();\n\n}\n", "file_path": "tower-retry/tests/retry.rs", "rank": 5, "score": 199544.40832350866 }, { "content": "fn new_service<P: Policy<Req> + Clone>(policy: P) -> (Hedge<Mock, P>, Handle) {\n\n let (service, handle) = tower_test::mock::pair();\n\n\n\n let mock_latencies: [u64; 10] = [1, 1, 1, 1, 1, 1, 1, 1, 10, 10];\n\n\n\n let service = Hedge::new_with_mock_latencies(\n\n service,\n\n policy,\n\n 10,\n\n 0.9,\n\n Duration::from_secs(60),\n\n &mock_latencies,\n\n );\n\n (service, handle)\n\n}\n", "file_path": "tower-hedge/tests/hedge.rs", "rank": 6, "score": 194562.6811230501 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Req;\n\n\n", "file_path": "tower-balance/examples/demo.rs", "rank": 7, "score": 193103.4017275871 }, { "content": "fn compute_histo<S>(times: S) -> impl Future<Item = Summary, Error = Error> + 'static\n\nwhere\n\n S: Stream<Item = Rsp, Error = Error> + 'static,\n\n{\n\n times.fold(Summary::new(), |mut summary, rsp| {\n\n summary.count(rsp);\n\n Ok(summary) as Result<_, Error>\n\n })\n\n}\n\n\n\nimpl Summary {\n\n fn new() -> Self {\n\n Self {\n\n // The max delay is 2000ms. At 3 significant figures.\n\n latencies: Histogram::<u64>::new_with_max(3_000, 3).unwrap(),\n\n start: Instant::now(),\n\n count_by_instance: [0; 10],\n\n }\n\n }\n\n\n", "file_path": "tower-balance/examples/demo.rs", "rank": 8, "score": 192460.0724618008 }, { "content": "type Mock = mock::Mock<Req, Req>;\n\n\n", "file_path": "tower-ready-cache/tests/ready_cache.rs", "rank": 9, "score": 191735.96830537985 }, { "content": "#[test]\n\nfn single_endpoint() {\n\n let (mock, mut handle) = mock::pair();\n\n let mock = load::Constant::new(mock, 0);\n\n\n\n let disco = ServiceList::new(vec![mock].into_iter());\n\n let mut svc = Balance::from_entropy(disco);\n\n\n\n with_task(|| {\n\n handle.allow(0);\n\n assert_not_ready!(svc);\n\n assert_eq!(svc.len(), 1, \"balancer must have discovered endpoint\");\n\n\n\n handle.allow(1);\n\n assert_ready!(svc);\n\n\n\n let fut = svc.call(());\n\n\n\n let ((), rsp) = handle.next_request().unwrap();\n\n rsp.send_response(1);\n\n\n\n assert_eq!(fut.wait().expect(\"call must complete\"), 1);\n\n handle.allow(1);\n\n assert_ready!(svc);\n\n\n\n handle.send_error(\"endpoint lost\");\n\n assert_not_ready!(svc);\n\n assert!(svc.len() == 0, \"balancer must drop failed endpoints\");\n\n });\n\n}\n\n\n", "file_path": "tower-balance/src/p2c/test.rs", "rank": 10, "score": 190685.93399962244 }, { "content": "#[test]\n\nfn failing_service() {\n\n // start the pool\n\n let (mock, mut handle) =\n\n mock::pair::<(), load::Constant<mock::Mock<(), &'static str>, usize>>();\n\n let mut pool = Builder::new()\n\n .urgency(1.0) // so _any_ NotReady will add a service\n\n .underutilized_below(0.0) // so no Ready will remove a service\n\n .build(mock, ());\n\n with_task(|| {\n\n assert_not_ready!(pool);\n\n });\n\n\n\n // give the pool a backing service\n\n let (svc1_m, mut svc1) = mock::pair();\n\n svc1.allow(1);\n\n handle\n\n .next_request()\n\n .unwrap()\n\n .1\n\n .send_response(load::Constant::new(svc1_m, 0));\n", "file_path": "tower-balance/src/pool/test.rs", "rank": 11, "score": 190141.97958989616 }, { "content": "/// Returns a new `ServiceFn` with the given closure.\n\npub fn service_fn<T>(f: T) -> ServiceFn<T> {\n\n ServiceFn { f }\n\n}\n\n\n\n/// A `Service` implemented by a closure.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct ServiceFn<T> {\n\n f: T,\n\n}\n\n\n\nimpl<T, F, Request> Service<Request> for ServiceFn<T>\n\nwhere\n\n T: FnMut(Request) -> F,\n\n F: IntoFuture,\n\n{\n\n type Response = F::Item;\n\n type Error = F::Error;\n\n type Future = F::Future;\n\n\n\n fn poll_ready(&mut self) -> Poll<(), F::Error> {\n\n Ok(().into())\n\n }\n\n\n\n fn call(&mut self, req: Request) -> Self::Future {\n\n (self.f)(req).into_future()\n\n }\n\n}\n", "file_path": "tower-util/src/service_fn.rs", "rank": 12, "score": 184859.77389950553 }, { "content": "#[test]\n\nfn poll_ready_when_worker_is_dropped_early() {\n\n let (service, _handle) = mock::pair::<(), ()>();\n\n\n\n // drop that worker right on the floor!\n\n let mut exec = ExecFn(|fut| {\n\n drop(fut);\n\n Ok(())\n\n });\n\n\n\n let mut service = Buffer::with_executor(service, 1, &mut exec);\n\n\n\n let err = with_task(|| {\n\n service\n\n .poll_ready()\n\n .expect_err(\"buffer poll_ready should error\")\n\n });\n\n\n\n assert!(err.is::<error::Closed>(), \"should be a Closed: {:?}\", err);\n\n}\n\n\n", "file_path": "tower-buffer/tests/buffer.rs", "rank": 13, "score": 184582.00805008685 }, { "content": "type Req = &'static str;\n", "file_path": "tower-ready-cache/tests/ready_cache.rs", "rank": 14, "score": 183730.7532666014 }, { "content": "type Error = Box<dyn std::error::Error + Send + Sync>;\n\n\n", "file_path": "tower-balance/examples/demo.rs", "rank": 15, "score": 182043.78261373958 }, { "content": "#[test]\n\nfn basic_service_limit_functionality_with_poll_ready() {\n\n let mut task = MockTask::new();\n\n\n\n let (mut service, mut handle) = new_service(2);\n\n\n\n poll_fn(|| service.poll_ready()).wait().unwrap();\n\n let r1 = service.call(\"hello 1\");\n\n\n\n poll_fn(|| service.poll_ready()).wait().unwrap();\n\n let r2 = service.call(\"hello 2\");\n\n\n\n task.enter(|| {\n\n assert!(service.poll_ready().unwrap().is_not_ready());\n\n });\n\n\n\n assert!(!task.is_notified());\n\n\n\n // The request gets passed through\n\n assert_request_eq!(handle, \"hello 1\").send_response(\"world 1\");\n\n\n", "file_path": "tower-limit/tests/concurrency.rs", "rank": 16, "score": 180875.18420185297 }, { "content": "#[test]\n\nfn basic_service_limit_functionality_without_poll_ready() {\n\n let mut task = MockTask::new();\n\n\n\n let (mut service, mut handle) = new_service(2);\n\n\n\n assert_ready!(service.poll_ready());\n\n let r1 = service.call(\"hello 1\");\n\n\n\n assert_ready!(service.poll_ready());\n\n let r2 = service.call(\"hello 2\");\n\n\n\n task.enter(|| {\n\n assert_not_ready!(service.poll_ready());\n\n });\n\n\n\n // The request gets passed through\n\n assert_request_eq!(handle, \"hello 1\").send_response(\"world 1\");\n\n\n\n assert!(!task.is_notified());\n\n\n", "file_path": "tower-limit/tests/concurrency.rs", "rank": 17, "score": 177999.39850511367 }, { "content": "/// Advance the timer the specified amount\n\npub fn advance(timer: &mut Timer<MockPark>, duration: Duration) {\n\n let inner = timer.get_park().inner.clone();\n\n let deadline = inner.lock().unwrap().now() + duration;\n\n\n\n while inner.lock().unwrap().now() < deadline {\n\n let dur = deadline - inner.lock().unwrap().now();\n\n turn(timer, dur);\n\n }\n\n}\n\n\n", "file_path": "tower-hedge/tests/support/mod.rs", "rank": 18, "score": 172838.29434996005 }, { "content": "/// This trait allows you to use either Tokio's threaded runtime's executor or\n\n/// the `current_thread` runtime's executor depending on if `T` is `Send` or\n\n/// `!Send`.\n\npub trait BackgroundReadyExecutor<T, Request>: TypedExecutor<BackgroundReady<T, Request>>\n\nwhere\n\n T: Service<Request>,\n\n T::Error: Into<Error>,\n\n{\n\n}\n\n\n\nimpl<T, Request, E> BackgroundReadyExecutor<T, Request> for E\n\nwhere\n\n E: TypedExecutor<BackgroundReady<T, Request>>,\n\n T: Service<Request>,\n\n T::Error: Into<Error>,\n\n{\n\n}\n\n\n\npub(crate) fn background_ready<T, Request>(\n\n service: T,\n\n) -> (\n\n BackgroundReady<T, Request>,\n\n oneshot::Receiver<Result<T, Error>>,\n", "file_path": "tower-spawn-ready/src/future.rs", "rank": 19, "score": 170692.09090663912 }, { "content": "/// Attaches `I`-typed instruments to `V` typed values.\n\n///\n\n/// This utility allows load metrics to have a protocol-agnostic means to track streams\n\n/// past their initial response future. For example, if `V` represents an HTTP response\n\n/// type, an implementation could add `H`-typed handles to each response's extensions to\n\n/// detect when the response is dropped.\n\n///\n\n/// Handles are intended to be RAII guards that primarily implement `Drop` and update load\n\n/// metric state as they are dropped.\n\n///\n\n/// A base `impl<H, V> Instrument<H, V> for NoInstrument` is provided to drop the handle\n\n/// immediately. This is appropriate when a response is discrete and cannot comprise\n\n/// multiple messages.\n\n///\n\n/// In many cases, the `Output` type is simply `V`. However, `Instrument` may alter the\n\n/// type in order to instrument it appropriately. For example, an HTTP Instrument may\n\n/// modify the body type: so an `Instrument` that takes values of type `http::Response<A>`\n\n/// may output values of type `http::Response<B>`.\n\npub trait Instrument<H, V>: Clone {\n\n /// The instrumented value type.\n\n type Output;\n\n\n\n /// Attaches an `H`-typed handle to a `V`-typed value.\n\n fn instrument(&self, handle: H, value: V) -> Self::Output;\n\n}\n\n\n\n/// A `Instrument` implementation that drops each instrument immediately.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct NoInstrument;\n\n\n\n/// Attaches a `I`-typed instruments to the result of an `F`-typed `Future`.\n\n#[derive(Debug)]\n\npub struct InstrumentFuture<F, I, H>\n\nwhere\n\n F: Future,\n\n I: Instrument<H, F::Item>,\n\n{\n\n future: F,\n", "file_path": "tower-load/src/instrument.rs", "rank": 20, "score": 169611.71285163704 }, { "content": "/// Turn the timer state once\n\npub fn turn<T: IntoTimeout>(timer: &mut Timer<MockPark>, duration: T) {\n\n timer.turn(duration.into_timeout()).unwrap();\n\n}\n\n\n", "file_path": "tower-hedge/tests/support/mod.rs", "rank": 21, "score": 163467.14514730385 }, { "content": "#[test]\n\nfn poll_ready_not_ready() {\n\n let mut cache = ReadyCache::<usize, Mock, Req>::default();\n\n\n\n let (service0, mut handle0) = mock::pair::<Req, Req>();\n\n handle0.allow(0);\n\n cache.push(0, service0);\n\n\n\n let (service1, mut handle1) = mock::pair::<Req, Req>();\n\n handle1.allow(0);\n\n cache.push(1, service1);\n\n\n\n with_task(|| {\n\n assert!(cache.poll_pending().expect(\"must succeed\").is_not_ready());\n\n });\n\n\n\n assert_eq!(cache.ready_len(), 0);\n\n assert_eq!(cache.pending_len(), 2);\n\n assert_eq!(cache.len(), 2);\n\n}\n\n\n", "file_path": "tower-ready-cache/tests/ready_cache.rs", "rank": 22, "score": 162171.01844014812 }, { "content": "#[test]\n\nfn when_ready() {\n\n let (mut service, mut handle) = new_service();\n\n\n\n with_task(|| {\n\n assert!(\n\n service.poll_ready().unwrap().is_ready(),\n\n \"overload always reports ready\",\n\n );\n\n });\n\n\n\n let response = service.call(\"hello\");\n\n\n\n assert_request_eq!(handle, \"hello\").send_response(\"world\");\n\n assert_eq!(response.wait().unwrap(), \"world\");\n\n}\n\n\n", "file_path": "tower-load-shed/tests/load-shed.rs", "rank": 23, "score": 159225.48377457217 }, { "content": "#[test]\n\nfn when_not_ready() {\n\n let (mut service, mut handle) = new_service();\n\n\n\n handle.allow(0);\n\n\n\n with_task(|| {\n\n assert!(\n\n service.poll_ready().unwrap().is_ready(),\n\n \"overload always reports ready\",\n\n );\n\n });\n\n\n\n let fut = service.call(\"hello\");\n\n\n\n let err = fut.wait().unwrap_err();\n\n assert!(err.is::<tower_load_shed::error::Overloaded>());\n\n}\n\n\n", "file_path": "tower-load-shed/tests/load-shed.rs", "rank": 24, "score": 159225.48377457217 }, { "content": "#[test]\n\nfn poll_ready_inner_failure() {\n\n let mut cache = ReadyCache::<usize, Mock, Req>::default();\n\n\n\n let (service0, mut handle0) = mock::pair::<Req, Req>();\n\n handle0.send_error(\"doom\");\n\n cache.push(0, service0);\n\n\n\n let (service1, mut handle1) = mock::pair::<Req, Req>();\n\n handle1.allow(1);\n\n cache.push(1, service1);\n\n\n\n with_task(|| {\n\n let error::Failed(key, err) = cache\n\n .poll_pending()\n\n .err()\n\n .expect(\"poll_ready should fail when exhausted\");\n\n assert_eq!(key, 0);\n\n assert_eq!(format!(\"{}\", err), \"doom\");\n\n });\n\n\n\n assert_eq!(cache.len(), 1);\n\n}\n\n\n", "file_path": "tower-ready-cache/tests/ready_cache.rs", "rank": 25, "score": 157735.72398392454 }, { "content": "#[test]\n\nfn poll_ready_promotes_inner() {\n\n let mut cache = ReadyCache::<usize, Mock, Req>::default();\n\n\n\n let (service0, mut handle0) = mock::pair::<Req, Req>();\n\n handle0.allow(1);\n\n cache.push(0, service0);\n\n\n\n let (service1, mut handle1) = mock::pair::<Req, Req>();\n\n handle1.allow(1);\n\n cache.push(1, service1);\n\n\n\n assert_eq!(cache.ready_len(), 0);\n\n assert_eq!(cache.pending_len(), 2);\n\n assert_eq!(cache.len(), 2);\n\n\n\n with_task(|| {\n\n assert!(cache.poll_pending().expect(\"must succeed\").is_ready());\n\n });\n\n\n\n assert_eq!(cache.ready_len(), 2);\n\n assert_eq!(cache.pending_len(), 0);\n\n assert_eq!(cache.len(), 2);\n\n}\n", "file_path": "tower-ready-cache/tests/ready_cache.rs", "rank": 26, "score": 157735.72398392454 }, { "content": "#[test]\n\nfn when_inner_fails() {\n\n //use std::error::Error as StdError;\n\n\n\n let (mut service, mut handle) = new_service();\n\n\n\n // Make the service NotReady\n\n handle.allow(0);\n\n handle.send_error(\"foobar\");\n\n\n\n with_task(|| {\n\n let e = service.poll_ready().unwrap_err();\n\n assert_eq!(e.to_string(), \"foobar\");\n\n });\n\n}\n\n\n", "file_path": "tower-spawn-ready/tests/spawn_ready.rs", "rank": 27, "score": 157559.21474228386 }, { "content": "#[test]\n\nfn when_spawn_fails() {\n\n let (service, mut handle) = mock::pair::<(), ()>();\n\n\n\n let exec = ExecFn(|_| Err(()));\n\n let mut service = SpawnReady::with_executor(service, exec);\n\n\n\n // Make the service NotReady so a background task is spawned.\n\n handle.allow(0);\n\n\n\n let err = with_task(|| service.poll_ready().expect_err(\"poll_ready should error\"));\n\n\n\n assert!(\n\n err.is::<error::SpawnError>(),\n\n \"should be a SpawnError: {:?}\",\n\n err\n\n );\n\n}\n\n\n", "file_path": "tower-spawn-ready/tests/spawn_ready.rs", "rank": 28, "score": 157559.21474228386 }, { "content": "/// Exposes a load metric.\n\npub trait Load {\n\n /// A comparable load metric. Lesser values are \"preferable\" to greater values.\n\n type Metric: PartialOrd;\n\n\n\n /// Obtains a service's load.\n\n fn load(&self) -> Self::Metric;\n\n}\n", "file_path": "tower-load/src/lib.rs", "rank": 29, "score": 155062.25256521394 }, { "content": "type Key = usize;\n\n\n", "file_path": "tower-balance/examples/demo.rs", "rank": 30, "score": 153081.5524087536 }, { "content": "#[test]\n\nfn empty() {\n\n let empty: Vec<load::Constant<mock::Mock<(), &'static str>, usize>> = vec![];\n\n let disco = ServiceList::new(empty);\n\n let mut svc = Balance::from_entropy(disco);\n\n with_task(|| {\n\n assert_not_ready!(svc);\n\n })\n\n}\n\n\n", "file_path": "tower-balance/src/p2c/test.rs", "rank": 31, "score": 152889.78808144465 }, { "content": "/// A boxed `Future` trait object.\n\n///\n\n/// This type alias represents a boxed future that is *not* `Send` and must\n\n/// remain on the current thread.\n\ntype UnsyncBoxFuture<T, E> = Box<dyn Future<Item = T, Error = E>>;\n\n\n", "file_path": "tower-util/src/boxed/unsync.rs", "rank": 32, "score": 151679.39496036957 }, { "content": "struct ExecFn<Func>(Func);\n\n\n\nimpl<Func, F> TypedExecutor<F> for ExecFn<Func>\n\nwhere\n\n Func: Fn(F) -> Result<(), ()>,\n\n F: Future<Item = (), Error = ()> + Send + 'static,\n\n{\n\n fn spawn(&mut self, fut: F) -> Result<(), SpawnError> {\n\n (self.0)(fut).map_err(|()| SpawnError::shutdown())\n\n }\n\n}\n\n\n", "file_path": "tower-spawn-ready/tests/spawn_ready.rs", "rank": 33, "score": 151525.164053604 }, { "content": "/// A boxed `Future + Send` trait object.\n\n///\n\n/// This type alias represents a boxed future that is `Send` and can be moved\n\n/// across threads.\n\ntype BoxFuture<T, E> = Box<dyn Future<Item = T, Error = E> + Send>;\n\n\n", "file_path": "tower-util/src/boxed/sync.rs", "rank": 34, "score": 149989.01076710594 }, { "content": "#[test]\n\nfn high_load() {\n\n // start the pool\n\n let (mock, mut handle) =\n\n mock::pair::<(), load::Constant<mock::Mock<(), &'static str>, usize>>();\n\n let mut pool = Builder::new()\n\n .urgency(1.0) // so _any_ NotReady will add a service\n\n .underutilized_below(0.0) // so no Ready will remove a service\n\n .max_services(Some(2))\n\n .build(mock, ());\n\n with_task(|| {\n\n assert_not_ready!(pool);\n\n });\n\n\n\n // give the pool a backing service\n\n let (svc1_m, mut svc1) = mock::pair();\n\n svc1.allow(1);\n\n handle\n\n .next_request()\n\n .unwrap()\n\n .1\n", "file_path": "tower-balance/src/pool/test.rs", "rank": 35, "score": 149976.3922045676 }, { "content": "#[test]\n\nfn low_load() {\n\n // start the pool\n\n let (mock, mut handle) =\n\n mock::pair::<(), load::Constant<mock::Mock<(), &'static str>, usize>>();\n\n let mut pool = Builder::new()\n\n .urgency(1.0) // so any event will change the service count\n\n .build(mock, ());\n\n with_task(|| {\n\n assert_not_ready!(pool);\n\n });\n\n\n\n // give the pool a backing service\n\n let (svc1_m, mut svc1) = mock::pair();\n\n svc1.allow(1);\n\n handle\n\n .next_request()\n\n .unwrap()\n\n .1\n\n .send_response(load::Constant::new(svc1_m, 0));\n\n with_task(|| {\n", "file_path": "tower-balance/src/pool/test.rs", "rank": 36, "score": 149976.3922045676 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct RefCount(Arc<()>);\n\n\n\n/// Wraps `inner`'s services with `PendingRequests`.\n\n#[derive(Debug)]\n\npub struct PendingRequestsDiscover<D, I = NoInstrument> {\n\n discover: D,\n\n instrument: I,\n\n}\n\n\n\n/// Represents the number of currently-pending requests to a given service.\n\n#[derive(Clone, Copy, Debug, Default, PartialOrd, PartialEq, Ord, Eq)]\n\npub struct Count(usize);\n\n\n\n/// Tracks an in-flight request by reference count.\n\n#[derive(Debug)]\n\npub struct Handle(RefCount);\n\n\n\n// ===== impl PendingRequests =====\n\n\n\nimpl<S, I> PendingRequests<S, I> {\n", "file_path": "tower-load/src/pending_requests.rs", "rank": 37, "score": 149162.68022735868 }, { "content": "fn new_service() -> (LoadShed<Mock>, Handle) {\n\n let (service, handle) = mock::pair();\n\n let service = LoadShed::new(service);\n\n (service, handle)\n\n}\n\n\n", "file_path": "tower-load-shed/tests/load-shed.rs", "rank": 38, "score": 148238.4015465026 }, { "content": "#[test]\n\nfn service_drop_frees_capacity() {\n\n let mut task = MockTask::new();\n\n\n\n let (mut s1, _handle) = new_service(1);\n\n\n\n let mut s2 = s1.clone();\n\n\n\n // Reserve capacity in s1\n\n assert_ready!(s1.poll_ready());\n\n\n\n // Service 2 cannot get capacity\n\n task.enter(|| {\n\n assert_not_ready!(s2.poll_ready());\n\n });\n\n\n\n drop(s1);\n\n\n\n assert!(task.is_notified());\n\n assert_ready!(s2.poll_ready());\n\n}\n\n\n", "file_path": "tower-limit/tests/concurrency.rs", "rank": 39, "score": 148221.93878983037 }, { "content": "type Error = Box<dyn std::error::Error + Send + Sync>;\n\n\n", "file_path": "tower/src/util.rs", "rank": 40, "score": 147422.32390753535 }, { "content": "#[test]\n\nfn response_future_drop_releases_capacity() {\n\n let mut task = MockTask::new();\n\n\n\n let (mut s1, _handle) = new_service(1);\n\n\n\n let mut s2 = s1.clone();\n\n\n\n // Reserve capacity in s1\n\n task.enter(|| {\n\n assert_ready!(s1.poll_ready());\n\n });\n\n\n\n // s1 sends the request, then s2 is able to get capacity\n\n let r1 = s1.call(\"hello\");\n\n\n\n task.enter(|| {\n\n assert_not_ready!(s2.poll_ready());\n\n });\n\n\n\n drop(r1);\n\n\n\n task.enter(|| {\n\n assert!(s2.poll_ready().unwrap().is_ready());\n\n });\n\n}\n\n\n", "file_path": "tower-limit/tests/concurrency.rs", "rank": 41, "score": 146277.43517080872 }, { "content": "#[test]\n\nfn response_future_when_worker_is_dropped_early() {\n\n let (service, mut handle) = mock::pair::<_, ()>();\n\n\n\n // hold the worker in a cell until we want to drop it later\n\n let cell = RefCell::new(None);\n\n let mut exec = ExecFn(|fut| {\n\n *cell.borrow_mut() = Some(fut);\n\n Ok(())\n\n });\n\n\n\n let mut service = Buffer::with_executor(service, 1, &mut exec);\n\n\n\n // keep the request in the worker\n\n handle.allow(0);\n\n let response = service.call(\"hello\");\n\n\n\n // drop the worker (like an executor closing up)\n\n cell.borrow_mut().take();\n\n\n\n let err = response.wait().expect_err(\"res.wait\");\n\n assert!(err.is::<error::Closed>(), \"should be a Closed: {:?}\", err);\n\n}\n\n\n", "file_path": "tower-buffer/tests/buffer.rs", "rank": 42, "score": 146277.43517080872 }, { "content": "type Error = Box<dyn std::error::Error + Send + Sync>;\n\n\n", "file_path": "tower-hedge/src/lib.rs", "rank": 43, "score": 146274.3230062187 }, { "content": "type Error = Box<dyn std::error::Error + Send + Sync>;\n", "file_path": "tower-retry/tests/retry.rs", "rank": 44, "score": 146274.3230062187 }, { "content": "type Error = Box<dyn std::error::Error + Send + Sync>;\n\n\n", "file_path": "tower-util/tests/call_all.rs", "rank": 45, "score": 146274.3230062187 }, { "content": "type Error = Box<dyn std::error::Error + Send + Sync>;\n\n\n\nimpl<A, B, Request> Service<Request> for Either<A, B>\n\nwhere\n\n A: Service<Request>,\n\n A::Error: Into<Error>,\n\n B: Service<Request, Response = A::Response>,\n\n B::Error: Into<Error>,\n\n{\n\n type Response = A::Response;\n\n type Error = Error;\n\n type Future = Either<A::Future, B::Future>;\n\n\n\n fn poll_ready(&mut self) -> Poll<(), Self::Error> {\n\n use self::Either::*;\n\n\n\n match self {\n\n A(service) => service.poll_ready().map_err(Into::into),\n\n B(service) => service.poll_ready().map_err(Into::into),\n\n }\n", "file_path": "tower-util/src/either.rs", "rank": 46, "score": 146274.3230062187 }, { "content": "type Error = Box<dyn std::error::Error + Send + Sync>;\n\n\n", "file_path": "tower-reconnect/src/lib.rs", "rank": 47, "score": 146274.3230062187 }, { "content": "type Error = Box<dyn std::error::Error + Send + Sync>;\n", "file_path": "tower-util/src/call_all/mod.rs", "rank": 48, "score": 145159.9768163321 }, { "content": "type Error = Box<dyn std::error::Error + Send + Sync>;\n", "file_path": "tower-limit/src/concurrency/mod.rs", "rank": 49, "score": 145159.9768163321 }, { "content": "fn new_service() -> (SpawnReady<Mock, Exec>, Handle) {\n\n let (service, handle) = mock::pair();\n\n let service = SpawnReady::with_executor(service, Exec);\n\n (service, handle)\n\n}\n\n\n", "file_path": "tower-spawn-ready/tests/spawn_ready.rs", "rank": 50, "score": 144952.98182026838 }, { "content": "fn gen_disco() -> impl Discover<\n\n Key = Key,\n\n Error = Error,\n\n Service = ConcurrencyLimit<\n\n impl Service<Req, Response = Rsp, Error = Error, Future = impl Send> + Send,\n\n >,\n\n> + Send {\n\n Disco(\n\n MAX_ENDPOINT_LATENCIES\n\n .iter()\n\n .enumerate()\n\n .map(|(instance, latency)| {\n\n let svc = tower::service_fn(move |_| {\n\n let start = Instant::now();\n\n\n\n let maxms = u64::from(latency.subsec_nanos() / 1_000 / 1_000)\n\n .saturating_add(latency.as_secs().saturating_mul(1_000));\n\n let latency = Duration::from_millis(rand::thread_rng().gen_range(0, maxms));\n\n\n\n timer::Delay::new(start + latency)\n", "file_path": "tower-balance/examples/demo.rs", "rank": 51, "score": 144378.36843136174 }, { "content": "type Req = &'static str;\n", "file_path": "tower-hedge/tests/hedge.rs", "rank": 52, "score": 139429.9402223703 }, { "content": "type Req = &'static str;\n", "file_path": "tower-retry/tests/retry.rs", "rank": 53, "score": 139429.9402223703 }, { "content": "fn with_task<F: FnOnce() -> U, U>(f: F) -> U {\n\n use futures::future::lazy;\n\n lazy(|| Ok::<_, ()>(f())).wait().unwrap()\n\n}\n", "file_path": "tower-balance/src/p2c/test.rs", "rank": 54, "score": 139038.91725262045 }, { "content": "#[cfg(test)]\n\n#[allow(dead_code)]\n\ntype ListVecTest<T> = ServiceList<Vec<T>>;\n\n\n", "file_path": "tower-discover/src/list.rs", "rank": 55, "score": 134333.7099914683 }, { "content": "fn new_service(max: usize) -> (ConcurrencyLimit<Mock>, Handle) {\n\n let (service, handle) = mock::pair();\n\n let service = ConcurrencyLimit::new(service, max);\n\n (service, handle)\n\n}\n", "file_path": "tower-limit/tests/concurrency.rs", "rank": 56, "score": 131553.42626349683 }, { "content": "type Mock = mock::Mock<Req, Res>;\n", "file_path": "tower-retry/tests/retry.rs", "rank": 57, "score": 129952.60993687999 }, { "content": "type Handle = mock::Handle<Req, Res>;\n\n\n", "file_path": "tower-retry/tests/retry.rs", "rank": 58, "score": 129952.60993687999 }, { "content": "/// A \"retry policy\" to classify if a request should be retried.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// extern crate futures;\n\n/// extern crate tower_retry;\n\n///\n\n/// use tower_retry::Policy;\n\n///\n\n/// type Req = String;\n\n/// type Res = String;\n\n///\n\n/// struct Attempts(usize);\n\n///\n\n/// impl<E> Policy<Req, Res, E> for Attempts {\n\n/// type Future = futures::future::FutureResult<Self, ()>;\n\n///\n\n/// fn retry(&self, req: &Req, result: Result<&Res, &E>) -> Option<Self::Future> {\n\n/// match result {\n\n/// Ok(_) => {\n\n/// // Treat all `Response`s as success,\n\n/// // so don't retry...\n\n/// None\n\n/// },\n\n/// Err(_) => {\n\n/// // Treat all errors as failures...\n\n/// // But we limit the number of attempts...\n\n/// if self.0 > 0 {\n\n/// // Try again!\n\n/// Some(futures::future::ok(Attempts(self.0 - 1)))\n\n/// } else {\n\n/// // Used all our attempts, no retry...\n\n/// None\n\n/// }\n\n/// }\n\n/// }\n\n/// }\n\n///\n\n/// fn clone_request(&self, req: &Req) -> Option<Req> {\n\n/// Some(req.clone())\n\n/// }\n\n/// }\n\n/// ```\n\npub trait Policy<Req, Res, E>: Sized {\n\n /// The `Future` type returned by `Policy::retry()`.\n\n type Future: Future<Item = Self, Error = ()>;\n\n /// Check the policy if a certain request should be retried.\n\n ///\n\n /// This method is passed a reference to the original request, and either\n\n /// the `Service::Response` or `Service::Error` from the inner service.\n\n ///\n\n /// If the request should **not** be retried, return `None`.\n\n ///\n\n /// If the request *should* be retried, return `Some` future of a new\n\n /// policy that would apply for the next request attempt.\n\n ///\n\n /// If the returned `Future` errors, the request will **not** be retried\n\n /// after all.\n\n fn retry(&self, req: &Req, result: Result<&Res, &E>) -> Option<Self::Future>;\n\n /// Tries to clone a request before being passed to the inner service.\n\n ///\n\n /// If the request cannot be cloned, return `None`.\n\n fn clone_request(&self, req: &Req) -> Option<Req>;\n\n}\n", "file_path": "tower-retry/src/policy.rs", "rank": 59, "score": 126870.1120429037 }, { "content": "pub fn ms(num: u64) -> Duration {\n\n Duration::from_millis(num)\n\n}\n\n\n", "file_path": "tower-hedge/tests/support/mod.rs", "rank": 60, "score": 126415.50278212721 }, { "content": "#[cfg(test)]\n\n#[allow(dead_code)]\n\ntype ListVecIterTest<T> = ServiceList<::std::vec::IntoIter<T>>;\n", "file_path": "tower-discover/src/list.rs", "rank": 61, "score": 126046.38019033284 }, { "content": "type Mock = tower_test::mock::Mock<Req, Res>;\n", "file_path": "tower-hedge/tests/hedge.rs", "rank": 74, "score": 124332.48216420726 }, { "content": "type Handle = tower_test::mock::Handle<Req, Res>;\n\n\n\nstatic NOT_RETRYABLE: &'static str = \"NOT_RETRYABLE\";\n\nstatic NOT_CLONABLE: &'static str = \"NOT_CLONABLE\";\n\n\n", "file_path": "tower-hedge/tests/hedge.rs", "rank": 75, "score": 124332.48216420726 }, { "content": "/// Provide a uniform set of services able to satisfy a request.\n\n///\n\n/// This set of services may be updated over time. On each change to the set, a\n\n/// new `NewServiceSet` is yielded by `Discover`.\n\n///\n\n/// See crate documentation for more details.\n\npub trait Discover {\n\n /// NewService key\n\n type Key: Hash + Eq;\n\n\n\n type Service;\n\n\n\n /// Error produced during discovery\n\n type Error;\n\n\n\n /// Yields the next discovery change set.\n\n fn poll(&mut self) -> Poll<Change<Self::Key, Self::Service>, Self::Error>;\n\n}\n\n\n\n/// A change in the service set\n\npub enum Change<K, V> {\n\n Insert(K, V),\n\n Remove(K),\n\n}\n", "file_path": "tower-discover/src/lib.rs", "rank": 76, "score": 122991.45737650158 }, { "content": "struct Exec;\n\n\n\nimpl<F> TypedExecutor<F> for Exec\n\nwhere\n\n F: Future<Item = (), Error = ()> + Send + 'static,\n\n{\n\n fn spawn(&mut self, fut: F) -> Result<(), SpawnError> {\n\n thread::spawn(move || {\n\n fut.wait().unwrap();\n\n });\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "tower-spawn-ready/tests/spawn_ready.rs", "rank": 77, "score": 122522.1773046134 }, { "content": "#[test]\n\nfn when_inner_is_not_ready() {\n\n let (mut service, mut handle) = new_service();\n\n\n\n // Make the service NotReady\n\n handle.allow(0);\n\n\n\n with_task(|| {\n\n let poll = service.poll_ready();\n\n assert!(poll.expect(\"poll_ready\").is_not_ready());\n\n });\n\n\n\n // Make the service is Ready\n\n handle.allow(1);\n\n thread::sleep(Duration::from_millis(100));\n\n with_task(|| {\n\n let poll = service.poll_ready();\n\n assert!(poll.expect(\"poll_ready\").is_ready());\n\n });\n\n}\n\n\n", "file_path": "tower-spawn-ready/tests/spawn_ready.rs", "rank": 78, "score": 122275.68082446248 }, { "content": "enum State<S: Service<Req>, Req> {\n\n NotReady(S, Req),\n\n Called(S::Future),\n\n Tmp,\n\n}\n\n\n\nimpl<S, Req> Oneshot<S, Req>\n\nwhere\n\n S: Service<Req>,\n\n{\n\n pub fn new(svc: S, req: Req) -> Self {\n\n Oneshot {\n\n state: State::NotReady(svc, req),\n\n }\n\n }\n\n}\n\n\n\nimpl<S, Req> Future for Oneshot<S, Req>\n\nwhere\n\n S: Service<Req>,\n", "file_path": "tower-util/src/oneshot.rs", "rank": 79, "score": 121785.60776173804 }, { "content": "pub fn mocked<F, R>(f: F) -> R\n\nwhere\n\n F: FnOnce(&mut Timer<MockPark>, &mut MockTime) -> R,\n\n{\n\n mocked_with_now(Instant::now(), f)\n\n}\n\n\n", "file_path": "tower-hedge/tests/support/mod.rs", "rank": 80, "score": 120052.05812020655 }, { "content": "/// An asynchronous function from a `Request` to a `Response`.\n\n///\n\n/// The `Service` trait is a simplified interface making it easy to write\n\n/// network applications in a modular and reusable way, decoupled from the\n\n/// underlying protocol. It is one of Tower's fundamental abstractions.\n\n///\n\n/// # Functional\n\n///\n\n/// A `Service` is a function of a `Request`. It immediately returns a\n\n/// `Future` representing the eventual completion of processing the\n\n/// request. The actual request processing may happen at any time in the\n\n/// future, on any thread or executor. The processing may depend on calling\n\n/// other services. At some point in the future, the processing will complete,\n\n/// and the `Future` will resolve to a response or error.\n\n///\n\n/// At a high level, the `Service::call` function represents an RPC request. The\n\n/// `Service` value can be a server or a client.\n\n///\n\n/// # Server\n\n///\n\n/// An RPC server *implements* the `Service` trait. Requests received by the\n\n/// server over the network are deserialized and then passed as an argument to the\n\n/// server value. The returned response is sent back over the network.\n\n///\n\n/// As an example, here is how an HTTP request is processed by a server:\n\n///\n\n/// ```rust,ignore\n\n/// impl Service<http::Request> for HelloWorld {\n\n/// type Response = http::Response;\n\n/// type Error = http::Error;\n\n/// type Future = Box<Future<Item = Self::Response, Error = Self::Error>>;\n\n///\n\n/// fn poll_ready(&mut self) -> Poll<(), Self::Error> {\n\n/// Ok(Async::Ready(()))\n\n/// }\n\n///\n\n/// fn call(&mut self, req: http::Request) -> Self::Future {\n\n/// // Create the HTTP response\n\n/// let resp = http::Response::ok()\n\n/// .with_body(b\"hello world\\n\");\n\n///\n\n/// // Return the response as an immediate future\n\n/// Box::new(futures::finished(resp))\n\n/// }\n\n/// }\n\n/// ```\n\n///\n\n/// # Client\n\n///\n\n/// A client consumes a service by using a `Service` value. The client may\n\n/// issue requests by invoking `call` and passing the request as an argument.\n\n/// It then receives the response by waiting for the returned future.\n\n///\n\n/// As an example, here is how a Redis request would be issued:\n\n///\n\n/// ```rust,ignore\n\n/// let client = redis::Client::new()\n\n/// .connect(\"127.0.0.1:6379\".parse().unwrap())\n\n/// .unwrap();\n\n///\n\n/// let resp = client.call(Cmd::set(\"foo\", \"this is the value of foo\"));\n\n///\n\n/// // Wait for the future to resolve\n\n/// println!(\"Redis response: {:?}\", await(resp));\n\n/// ```\n\n///\n\n/// # Middleware / Layer\n\n///\n\n/// More often than not, all the pieces needed for writing robust, scalable\n\n/// network applications are the same no matter the underlying protocol. By\n\n/// unifying the API for both clients and servers in a protocol agnostic way,\n\n/// it is possible to write middleware that provide these pieces in a\n\n/// reusable way.\n\n///\n\n/// Take timeouts as an example:\n\n///\n\n/// ```rust,ignore\n\n/// use tower_service::Service;\n\n/// use tower_layer::Layer;\n\n/// use futures::Future;\n\n/// use std::time::Duration;\n\n///\n\n///\n\n/// pub struct Timeout<T> {\n\n/// inner: T,\n\n/// delay: Duration,\n\n/// timer: Timer,\n\n/// }\n\n///\n\n/// pub struct TimeoutLayer(Duration);\n\n///\n\n/// pub struct Expired;\n\n///\n\n/// impl<T> Timeout<T> {\n\n/// pub fn new(inner: T, timeout: Duration) -> Timeout<T> {\n\n/// Timeout {\n\n/// inner,\n\n/// timeout\n\n/// }\n\n/// }\n\n/// }\n\n///\n\n/// impl<T, Request> Service<Request> for Timeout<T>\n\n/// where\n\n/// T: Service<Request>,\n\n/// T::Future: 'static,\n\n/// T::Error: From<Expired> + 'static,\n\n/// T::Response: 'static\n\n/// {\n\n/// type Response = T::Response;\n\n/// type Error = T::Error;\n\n/// type Future = Box<Future<Item = Self::Response, Error = Self::Error>>;\n\n///\n\n/// fn poll_ready(&mut self) -> Poll<(), Self::Error> {\n\n/// self.inner.poll_ready().map_err(Into::into)\n\n/// }\n\n///\n\n/// fn call(&mut self, req: Request) -> Self::Future {\n\n/// let timeout = tokio_timer::sleep(self.timeout)\n\n/// .then(|_| Err(Self::Error::from(Expired)));\n\n///\n\n/// let f = self.inner.call(req).select(timeout)\n\n/// .map(|(v, _)| v)\n\n/// .map_err(|(e, _)| e);\n\n///\n\n/// Box::new(f)\n\n/// }\n\n/// }\n\n///\n\n/// impl TimeoutLayer {\n\n/// pub fn new(delay: Duration) -> Self {\n\n/// TimeoutLayer(delay)\n\n/// }\n\n/// }\n\n///\n\n/// impl<S, Request> Layer<S, Request> for TimeoutLayer\n\n/// where\n\n/// S: Service<Request>,\n\n/// {\n\n/// type Response = S::Response;\n\n/// type Error = S::Error;\n\n/// type Service = Timeout<S>;\n\n///\n\n/// fn layer(&self, service: S) -> Timeout<S> {\n\n/// Timeout::new(service, self.0)\n\n/// }\n\n/// }\n\n///\n\n/// ```\n\n///\n\n/// The above timeout implementation is decoupled from the underlying protocol\n\n/// and is also decoupled from client or server concerns. In other words, the\n\n/// same timeout middleware could be used in either a client or a server.\n\n///\n\n/// # Backpressure\n\n///\n\n/// Calling a `Service` which is at capacity (i.e., it is temporarily unable to process a\n\n/// request) should result in an error. The caller is responsible for ensuring\n\n/// that the service is ready to receive the request before calling it.\n\n///\n\n/// `Service` provides a mechanism by which the caller is able to coordinate\n\n/// readiness. `Service::poll_ready` returns `Ready` if the service expects that\n\n/// it is able to process a request.\n\npub trait Service<Request> {\n\n /// Responses given by the service.\n\n type Response;\n\n\n\n /// Errors produced by the service.\n\n type Error;\n\n\n\n /// The future response value.\n\n type Future: Future<Item = Self::Response, Error = Self::Error>;\n\n\n\n /// Returns `Ready` when the service is able to process requests.\n\n ///\n\n /// If the service is at capacity, then `NotReady` is returned and the task\n\n /// is notified when the service becomes ready again. This function is\n\n /// expected to be called while on a task.\n\n ///\n\n /// If `Err` is returned, the service is no longer able to service requests\n\n /// and the caller should discard the service instance.\n\n ///\n\n /// Once `poll_ready` returns `Ready`, a request may be dispatched to the\n", "file_path": "tower-service/src/lib.rs", "rank": 81, "score": 118832.00313972267 }, { "content": "#[derive(Debug)]\n\nstruct MockSvc;\n\nimpl Service<Request> for MockSvc {\n\n type Response = Response;\n\n type Error = Void;\n\n type Future = FutureResult<Self::Response, Self::Error>;\n\n\n\n fn poll_ready(&mut self) -> Poll<(), Self::Error> {\n\n Ok(().into())\n\n }\n\n\n\n fn call(&mut self, _: Request) -> Self::Future {\n\n future::ok(Response)\n\n }\n\n}\n\n\n", "file_path": "tower/tests/builder.rs", "rank": 82, "score": 117637.324470411 }, { "content": "#[derive(Clone)]\n\nstruct CannotClone;\n\n\n\nimpl Policy<Req, Res, Error> for CannotClone {\n\n type Future = future::FutureResult<Self, ()>;\n\n fn retry(&self, _: &Req, _: Result<&Res, &Error>) -> Option<Self::Future> {\n\n unreachable!(\"retry cannot be called since request isn't cloned\");\n\n }\n\n\n\n fn clone_request(&self, _req: &Req) -> Option<Req> {\n\n None\n\n }\n\n}\n\n\n", "file_path": "tower-retry/tests/retry.rs", "rank": 83, "score": 117587.72117699045 }, { "content": "#[derive(Debug)]\n\nstruct Rsp {\n\n latency: Duration,\n\n instance: usize,\n\n}\n", "file_path": "tower-balance/examples/demo.rs", "rank": 84, "score": 116755.75342781923 }, { "content": "struct Summary {\n\n latencies: Histogram<u64>,\n\n start: Instant,\n\n count_by_instance: [usize; 10],\n\n}\n\n\n", "file_path": "tower-balance/examples/demo.rs", "rank": 85, "score": 116755.75342781923 }, { "content": "#[derive(Clone)]\n\nstruct RetryErrors;\n\n\n\nimpl Policy<Req, Res, Error> for RetryErrors {\n\n type Future = future::FutureResult<Self, ()>;\n\n fn retry(&self, _: &Req, result: Result<&Res, &Error>) -> Option<Self::Future> {\n\n if result.is_err() {\n\n Some(future::ok(RetryErrors))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn clone_request(&self, req: &Req) -> Option<Req> {\n\n Some(*req)\n\n }\n\n}\n\n\n", "file_path": "tower-retry/tests/retry.rs", "rank": 86, "score": 115460.6334385869 }, { "content": "#[derive(Debug)]\n\nstruct Limit {\n\n semaphore: Arc<Semaphore>,\n\n permit: semaphore::Permit,\n\n}\n\n\n\nimpl<T> ConcurrencyLimit<T> {\n\n /// Create a new concurrency limiter.\n\n pub fn new(inner: T, max: usize) -> Self {\n\n ConcurrencyLimit {\n\n inner,\n\n limit: Limit {\n\n semaphore: Arc::new(Semaphore::new(max)),\n\n permit: semaphore::Permit::new(),\n\n },\n\n }\n\n }\n\n\n\n /// Get a reference to the inner service\n\n pub fn get_ref(&self) -> &T {\n\n &self.inner\n", "file_path": "tower-limit/src/concurrency/service.rs", "rank": 87, "score": 115058.98455746405 }, { "content": "#[derive(Debug)]\n\nenum PendingError<K, E> {\n\n Canceled(K),\n\n Inner(K, E),\n\n}\n\n\n\n/// A Future that becomes satisfied when an `S`-typed service is ready.\n\n///\n\n/// May fail due to cancelation, i.e. if the service is evicted from the balancer.\n", "file_path": "tower-ready-cache/src/cache.rs", "rank": 88, "score": 114973.09211347281 }, { "content": "#[test]\n\nfn builder_service() {\n\n tokio::run(future::lazy(|| {\n\n let policy = MockPolicy;\n\n let mut client = ServiceBuilder::new()\n\n .layer(BufferLayer::new(5))\n\n .layer(ConcurrencyLimitLayer::new(5))\n\n .layer(RateLimitLayer::new(5, Duration::from_secs(1)))\n\n .layer(RetryLayer::new(policy))\n\n .layer(BufferLayer::new(5))\n\n .service(MockSvc);\n\n\n\n client.poll_ready().unwrap();\n\n client\n\n .call(Request)\n\n .map(|_| ())\n\n .map_err(|_| panic!(\"this is bad\"))\n\n }));\n\n}\n\n\n", "file_path": "tower/tests/builder.rs", "rank": 89, "score": 113784.95226886864 }, { "content": "#[derive(Clone)]\n\nstruct Limit(usize);\n\n\n\nimpl Policy<Req, Res, Error> for Limit {\n\n type Future = future::FutureResult<Self, ()>;\n\n fn retry(&self, _: &Req, result: Result<&Res, &Error>) -> Option<Self::Future> {\n\n if result.is_err() && self.0 > 0 {\n\n Some(future::ok(Limit(self.0 - 1)))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn clone_request(&self, req: &Req) -> Option<Req> {\n\n Some(*req)\n\n }\n\n}\n\n\n", "file_path": "tower-retry/tests/retry.rs", "rank": 90, "score": 113715.99354922851 }, { "content": "fn main() {\n\n tracing::subscriber::set_global_default(tracing_subscriber::fmt::Subscriber::default())\n\n .unwrap();\n\n\n\n println!(\"REQUESTS={}\", REQUESTS);\n\n println!(\"CONCURRENCY={}\", CONCURRENCY);\n\n println!(\"ENDPOINT_CAPACITY={}\", ENDPOINT_CAPACITY);\n\n print!(\"MAX_ENDPOINT_LATENCIES=[\");\n\n for max in &MAX_ENDPOINT_LATENCIES {\n\n let l = max.as_secs() * 1_000 + u64::from(max.subsec_nanos() / 1_000 / 1_000);\n\n print!(\"{}ms, \", l);\n\n }\n\n println!(\"]\");\n\n\n\n let mut rt = runtime::Runtime::new().unwrap();\n\n\n\n let fut = future::lazy(move || {\n\n let decay = Duration::from_secs(10);\n\n let d = gen_disco();\n\n let pe = lb::p2c::Balance::from_entropy(load::PeakEwmaDiscover::new(\n", "file_path": "tower-balance/examples/demo.rs", "rank": 91, "score": 113714.8186021002 }, { "content": "/// Holds the current RTT estimate and the last time this value was updated.\n\nstruct RttEstimate {\n\n update_at: Instant,\n\n rtt_ns: f64,\n\n}\n\n\n\nconst NANOS_PER_MILLI: f64 = 1_000_000.0;\n\n\n\n// ===== impl PeakEwma =====\n\n\n\nimpl<D, I> PeakEwmaDiscover<D, I> {\n\n /// Wraps a `D`-typed `Discover` so that services have a `PeakEwma` load metric.\n\n ///\n\n /// The provided `default_rtt` is used as the default RTT estimate for newly\n\n /// added services.\n\n ///\n\n /// They `decay` value determines over what time period a RTT estimate should\n\n /// decay.\n\n pub fn new<Request>(discover: D, default_rtt: Duration, decay: Duration, instrument: I) -> Self\n\n where\n\n D: Discover,\n", "file_path": "tower-load/src/peak_ewma.rs", "rank": 92, "score": 113497.98900927571 }, { "content": "/// An extension trait for `Service`s that provides a variety of convenient\n\n/// adapters\n\npub trait ServiceExt<Request>: Service<Request> {\n\n /// A future yielding the service when it is ready to accept a request.\n\n fn ready(self) -> Ready<Self, Request>\n\n where\n\n Self: Sized,\n\n {\n\n Ready::new(self)\n\n }\n\n\n\n /// Consume this `Service`, calling with the providing request once it is ready.\n\n fn oneshot(self, req: Request) -> Oneshot<Self, Request>\n\n where\n\n Self: Sized,\n\n {\n\n Oneshot::new(self, req)\n\n }\n\n\n\n /// Process all requests from the given `Stream`, and produce a `Stream` of their responses.\n\n ///\n\n /// This is essentially `Stream<Item = Request>` + `Self` => `Stream<Item = Response>`. See the\n", "file_path": "tower/src/util.rs", "rank": 93, "score": 112799.28121900535 }, { "content": "#[test]\n\nfn when_inner_fails() {\n\n use std::error::Error as StdError;\n\n\n\n let (mut service, mut handle) = new_service();\n\n\n\n // Make the service NotReady\n\n handle.allow(0);\n\n handle.send_error(\"foobar\");\n\n\n\n let mut res1 = service.call(\"hello\");\n\n\n\n // Allow the Buffer's executor to do work\n\n ::std::thread::sleep(::std::time::Duration::from_millis(100));\n\n with_task(|| {\n\n let e = res1.poll().unwrap_err();\n\n if let Some(e) = e.downcast_ref::<error::ServiceError>() {\n\n let e = e.source().unwrap();\n\n\n\n assert_eq!(e.to_string(), \"foobar\");\n\n } else {\n\n panic!(\"unexpected error type: {:?}\", e);\n\n }\n\n });\n\n}\n\n\n", "file_path": "tower-buffer/tests/buffer.rs", "rank": 94, "score": 112781.92969591971 }, { "content": "#[test]\n\nfn when_spawn_fails() {\n\n let (service, _handle) = mock::pair::<(), ()>();\n\n\n\n let mut exec = ExecFn(|_| Err(()));\n\n\n\n let mut service = Buffer::with_executor(service, 1, &mut exec);\n\n\n\n let err = with_task(|| {\n\n service\n\n .poll_ready()\n\n .expect_err(\"buffer poll_ready should error\")\n\n });\n\n\n\n assert!(\n\n err.is::<error::SpawnError>(),\n\n \"should be a SpawnError: {:?}\",\n\n err\n\n );\n\n}\n\n\n", "file_path": "tower-buffer/tests/buffer.rs", "rank": 95, "score": 112781.92969591971 }, { "content": "#[test]\n\nfn success_with_cannot_clone() {\n\n // Even though the request couldn't be cloned, if the first request succeeds,\n\n // it should succeed overall.\n\n let (mut service, mut handle) = new_service(CannotClone);\n\n\n\n assert!(service.poll_ready().unwrap().is_ready());\n\n let fut = service.call(\"hello\");\n\n\n\n assert_request_eq!(handle, \"hello\").send_response(\"world\");\n\n assert_eq!(fut.wait().unwrap(), \"world\");\n\n}\n\n\n", "file_path": "tower-retry/tests/retry.rs", "rank": 96, "score": 112771.66548663519 }, { "content": "#[test]\n\nfn req_and_res() {\n\n let (mut service, mut handle) = new_service();\n\n\n\n let response = service.call(\"hello\");\n\n\n\n assert_request_eq!(handle, \"hello\").send_response(\"world\");\n\n\n\n assert_eq!(response.wait().unwrap(), \"world\");\n\n}\n\n\n", "file_path": "tower-buffer/tests/buffer.rs", "rank": 97, "score": 112669.06524400847 }, { "content": "#[test]\n\nfn retry_errors() {\n\n let (mut service, mut handle) = new_service(RetryErrors);\n\n\n\n assert!(service.poll_ready().unwrap().is_ready());\n\n let mut fut = service.call(\"hello\");\n\n\n\n assert_request_eq!(handle, \"hello\").send_error(\"retry me\");\n\n\n\n assert_not_ready(&mut fut);\n\n\n\n assert_request_eq!(handle, \"hello\").send_response(\"world\");\n\n\n\n assert_eq!(fut.wait().unwrap(), \"world\");\n\n}\n\n\n", "file_path": "tower-retry/tests/retry.rs", "rank": 98, "score": 112427.58973538115 }, { "content": "#[test]\n\nfn when_inner_is_not_ready() {\n\n let (mut service, mut handle) = new_service();\n\n\n\n // Make the service NotReady\n\n handle.allow(0);\n\n\n\n let mut res1 = service.call(\"hello\");\n\n\n\n // Allow the Buffer's executor to do work\n\n ::std::thread::sleep(::std::time::Duration::from_millis(100));\n\n with_task(|| {\n\n assert!(res1.poll().expect(\"res1.poll\").is_not_ready());\n\n assert!(handle.poll_request().expect(\"poll_request\").is_not_ready());\n\n });\n\n\n\n handle.allow(1);\n\n\n\n assert_request_eq!(handle, \"hello\").send_response(\"world\");\n\n\n\n assert_eq!(res1.wait().expect(\"res1.wait\"), \"world\");\n\n}\n\n\n", "file_path": "tower-buffer/tests/buffer.rs", "rank": 99, "score": 112053.23855479511 } ]
Rust
src/views/krate_publish.rs
vignesh-sankaran/crates.io
387ae4f2e9f6e653804bb063ae30cadd8c3382be
use std::collections::HashMap; use semver; use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use models::krate::MAX_NAME_LENGTH; use models::Crate; use models::DependencyKind; use models::Keyword as CrateKeyword; #[derive(Deserialize, Serialize, Debug)] pub struct EncodableCrateUpload { pub name: EncodableCrateName, pub vers: EncodableCrateVersion, pub deps: Vec<EncodableCrateDependency>, pub features: HashMap<EncodableFeatureName, Vec<EncodableFeature>>, pub authors: Vec<String>, pub description: Option<String>, pub homepage: Option<String>, pub documentation: Option<String>, pub readme: Option<String>, pub readme_file: Option<String>, pub keywords: Option<EncodableKeywordList>, pub categories: Option<EncodableCategoryList>, pub license: Option<String>, pub license_file: Option<String>, pub repository: Option<String>, pub badges: Option<HashMap<String, HashMap<String, String>>>, #[serde(default)] pub links: Option<String>, } #[derive(PartialEq, Eq, Hash, Serialize, Debug, Deref)] pub struct EncodableCrateName(pub String); #[derive(Debug, Deref)] pub struct EncodableCrateVersion(pub semver::Version); #[derive(Debug, Deref)] pub struct EncodableCrateVersionReq(pub semver::VersionReq); #[derive(Serialize, Debug, Deref)] pub struct EncodableKeywordList(pub Vec<EncodableKeyword>); #[derive(Serialize, Debug, Deref)] pub struct EncodableKeyword(pub String); #[derive(Serialize, Debug, Deref)] pub struct EncodableCategoryList(pub Vec<EncodableCategory>); #[derive(Serialize, Deserialize, Debug, Deref)] pub struct EncodableCategory(pub String); #[derive(Serialize, Debug, Deref)] pub struct EncodableFeature(pub String); #[derive(PartialEq, Eq, Hash, Serialize, Debug, Deref)] pub struct EncodableFeatureName(pub String); #[derive(Serialize, Deserialize, Debug)] pub struct EncodableCrateDependency { pub optional: bool, pub default_features: bool, pub name: EncodableCrateName, pub features: Vec<EncodableFeature>, pub version_req: EncodableCrateVersionReq, pub target: Option<String>, pub kind: Option<DependencyKind>, pub explicit_name_in_toml: Option<EncodableCrateName>, } impl<'de> Deserialize<'de> for EncodableCrateName { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableCrateName, D::Error> { let s = String::deserialize(d)?; if !Crate::valid_name(&s) { let value = de::Unexpected::Str(&s); let expected = format!( "a valid crate name to start with a letter, contain only letters, \ numbers, hyphens, or underscores and have at most {} characters", MAX_NAME_LENGTH ); Err(de::Error::invalid_value(value, &expected.as_ref())) } else { Ok(EncodableCrateName(s)) } } } impl<T: ?Sized> PartialEq<T> for EncodableCrateName where String: PartialEq<T>, { fn eq(&self, rhs: &T) -> bool { self.0 == *rhs } } impl<'de> Deserialize<'de> for EncodableKeyword { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableKeyword, D::Error> { let s = String::deserialize(d)?; if !CrateKeyword::valid_name(&s) { let value = de::Unexpected::Str(&s); let expected = "a valid keyword specifier"; Err(de::Error::invalid_value(value, &expected)) } else { Ok(EncodableKeyword(s)) } } } impl<'de> Deserialize<'de> for EncodableFeatureName { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> { let s = String::deserialize(d)?; if !Crate::valid_feature_name(&s) { let value = de::Unexpected::Str(&s); let expected = "a valid feature name containing only letters, \ numbers, hyphens, or underscores"; Err(de::Error::invalid_value(value, &expected)) } else { Ok(EncodableFeatureName(s)) } } } impl<'de> Deserialize<'de> for EncodableFeature { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableFeature, D::Error> { let s = String::deserialize(d)?; if !Crate::valid_feature(&s) { let value = de::Unexpected::Str(&s); let expected = "a valid feature name"; Err(de::Error::invalid_value(value, &expected)) } else { Ok(EncodableFeature(s)) } } } impl<'de> Deserialize<'de> for EncodableCrateVersion { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableCrateVersion, D::Error> { let s = String::deserialize(d)?; match semver::Version::parse(&s) { Ok(v) => Ok(EncodableCrateVersion(v)), Err(..) => { let value = de::Unexpected::Str(&s); let expected = "a valid semver"; Err(de::Error::invalid_value(value, &expected)) } } } } impl<'de> Deserialize<'de> for EncodableCrateVersionReq { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableCrateVersionReq, D::Error> { let s = String::deserialize(d)?; match semver::VersionReq::parse(&s) { Ok(v) => Ok(EncodableCrateVersionReq(v)), Err(..) => { let value = de::Unexpected::Str(&s); let expected = "a valid version req"; Err(de::Error::invalid_value(value, &expected)) } } } } impl<T: ?Sized> PartialEq<T> for EncodableCrateVersionReq where semver::VersionReq: PartialEq<T>, { fn eq(&self, rhs: &T) -> bool { self.0 == *rhs } } impl<'de> Deserialize<'de> for EncodableKeywordList { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableKeywordList, D::Error> { let inner = <Vec<EncodableKeyword> as Deserialize<'de>>::deserialize(d)?; if inner.len() > 5 { let expected = "at most 5 keywords per crate"; return Err(de::Error::invalid_length(inner.len(), &expected)); } for val in &inner { if val.len() > 20 { let expected = "a keyword with less than 20 characters"; return Err(de::Error::invalid_length(val.len(), &expected)); } } Ok(EncodableKeywordList(inner)) } } impl<'de> Deserialize<'de> for EncodableCategoryList { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableCategoryList, D::Error> { let inner = <Vec<EncodableCategory> as Deserialize<'de>>::deserialize(d)?; if inner.len() > 5 { let expected = "at most 5 categories per crate"; Err(de::Error::invalid_length(inner.len(), &expected)) } else { Ok(EncodableCategoryList(inner)) } } } impl Serialize for EncodableCrateVersion { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&(**self).to_string()) } } impl Serialize for EncodableCrateVersionReq { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&(**self).to_string()) } } use diesel::pg::Pg; use diesel::serialize::{self, Output, ToSql}; use diesel::sql_types::Text; use std::io::Write; impl ToSql<Text, Pg> for EncodableFeature { fn to_sql<W: Write>(&self, out: &mut Output<'_, W, Pg>) -> serialize::Result { ToSql::<Text, Pg>::to_sql(&**self, out) } } #[test] fn feature_deserializes_for_valid_features() { use serde_json as json; assert!(json::from_str::<EncodableFeature>("\"foo\"").is_ok()); assert!(json::from_str::<EncodableFeature>("\"\"").is_err()); assert!(json::from_str::<EncodableFeature>("\"/\"").is_err()); assert!(json::from_str::<EncodableFeature>("\"%/%\"").is_err()); assert!(json::from_str::<EncodableFeature>("\"a/a\"").is_ok()); assert!(json::from_str::<EncodableFeature>("\"32-column-tables\"").is_ok()); }
use std::collections::HashMap; use semver; use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use models::krate::MAX_NAME_LENGTH; use models::Crate; use models::DependencyKind; use models::Keyword as CrateKeyword; #[derive(Deserialize, Serialize, Debug)] pub struct EncodableCrateUpload { pub name: EncodableCrateName, pub vers: EncodableCrateVersion, pub deps: Vec<EncodableCrateDependency>, pub features: HashMap<EncodableFeatureName, Vec<EncodableFeature>>, pub authors: Vec<String>, pub description: Option<String>, pub homepage: Option<String>, pub documentation: Option<String>, pub readme: Option<String>, pub readme_file: Option<String>, pub keywords: Option<EncodableKeywordList>, pub categories: Option<EncodableCategoryList>, pub license: Option<String>, pub license_file: Option<String>, pub repository: Option<String>, pub badges: Option<HashMap<String, HashMap<String, String>>>, #[serde(default)] pub links: Option<String>, } #[derive(PartialEq, Eq, Hash, Serialize, Debug, Deref)] pub struct EncodableCrateName(pub String); #[derive(Debug, Deref)] pub struct EncodableCrateVersion(pub semver::Version); #[derive(Debug, Deref)] pub struct EncodableCrateVersionReq(pub semver::VersionReq); #[derive(Serialize, Debug, Deref)] pub struct EncodableKeywordList(pub Vec<EncodableKeyword>); #[derive(Serialize, Debug, Deref)] pub struct EncodableKeyword(pub String); #[derive(Serialize, Debug, Deref)] pub struct EncodableCategoryList(pub Vec<EncodableCategory>); #[derive(Serialize, Deserialize, Debug, Deref)] pub struct EncodableCategory(pub String); #[derive(Serialize, Debug, Deref)] pub struct EncodableFeature(pub String); #[derive(PartialEq, Eq, Hash, Serialize, Debug, Deref)] pub struct EncodableFeatureName(pub String); #[derive(Serialize, Deserialize, Debug)] pub struct EncodableCrateDependency { pub optional: bool, pub default_features: bool, pub name: EncodableCrateName, pub features: Vec<EncodableFeature>, pub version_req: EncodableCrateVersionReq, pub target: Option<String>, pub kind: Option<DependencyKind>, pub explicit_name_in_toml: Option<EncodableCrateName>, } impl<'de> Deserialize<'de> for EncodableCrateName { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableCrateName, D::Error> { let s = String::deserialize(d)?; if !Crate::valid_name(&s) { let value = de::Unexpected::Str(&s); let expected = format!( "a valid crate name to start with a letter, contain only letters, \ numbers, hyphens, or underscores and have at most {} characters", MAX_NAME_LENGTH ); Err(de::Error::invalid_value(value, &expected.as_ref())) } else { Ok(EncodableCrateName(s)) } } } impl<T: ?Sized> PartialEq<T> for EncodableCrateName where String: PartialEq<T>, { fn eq(&self, rhs: &T) -> bool { self.0 == *rhs } } impl<'de> Deserialize<'de> for EncodableKeyword { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableKeyword, D::Error> { let s = String::deserialize(d)?; if !CrateKeyword::valid_name(&s) { let value = de::Unexpected::Str(&s); let expected = "a valid keyword specifier"; Err(de::Error::invalid_value(value, &expected)) } else { Ok(EncodableKeyword(s)) } } } impl<'de> Deserialize<'de> for EncodableFeatureName { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> { let s = String::deserialize(d)?; if !Crate::valid_feature_name(&s) { let value = de::Unexpected::Str(&s);
} impl<'de> Deserialize<'de> for EncodableFeature { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableFeature, D::Error> { let s = String::deserialize(d)?; if !Crate::valid_feature(&s) { let value = de::Unexpected::Str(&s); let expected = "a valid feature name"; Err(de::Error::invalid_value(value, &expected)) } else { Ok(EncodableFeature(s)) } } } impl<'de> Deserialize<'de> for EncodableCrateVersion { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableCrateVersion, D::Error> { let s = String::deserialize(d)?; match semver::Version::parse(&s) { Ok(v) => Ok(EncodableCrateVersion(v)), Err(..) => { let value = de::Unexpected::Str(&s); let expected = "a valid semver"; Err(de::Error::invalid_value(value, &expected)) } } } } impl<'de> Deserialize<'de> for EncodableCrateVersionReq { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableCrateVersionReq, D::Error> { let s = String::deserialize(d)?; match semver::VersionReq::parse(&s) { Ok(v) => Ok(EncodableCrateVersionReq(v)), Err(..) => { let value = de::Unexpected::Str(&s); let expected = "a valid version req"; Err(de::Error::invalid_value(value, &expected)) } } } } impl<T: ?Sized> PartialEq<T> for EncodableCrateVersionReq where semver::VersionReq: PartialEq<T>, { fn eq(&self, rhs: &T) -> bool { self.0 == *rhs } } impl<'de> Deserialize<'de> for EncodableKeywordList { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableKeywordList, D::Error> { let inner = <Vec<EncodableKeyword> as Deserialize<'de>>::deserialize(d)?; if inner.len() > 5 { let expected = "at most 5 keywords per crate"; return Err(de::Error::invalid_length(inner.len(), &expected)); } for val in &inner { if val.len() > 20 { let expected = "a keyword with less than 20 characters"; return Err(de::Error::invalid_length(val.len(), &expected)); } } Ok(EncodableKeywordList(inner)) } } impl<'de> Deserialize<'de> for EncodableCategoryList { fn deserialize<D: Deserializer<'de>>(d: D) -> Result<EncodableCategoryList, D::Error> { let inner = <Vec<EncodableCategory> as Deserialize<'de>>::deserialize(d)?; if inner.len() > 5 { let expected = "at most 5 categories per crate"; Err(de::Error::invalid_length(inner.len(), &expected)) } else { Ok(EncodableCategoryList(inner)) } } } impl Serialize for EncodableCrateVersion { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&(**self).to_string()) } } impl Serialize for EncodableCrateVersionReq { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&(**self).to_string()) } } use diesel::pg::Pg; use diesel::serialize::{self, Output, ToSql}; use diesel::sql_types::Text; use std::io::Write; impl ToSql<Text, Pg> for EncodableFeature { fn to_sql<W: Write>(&self, out: &mut Output<'_, W, Pg>) -> serialize::Result { ToSql::<Text, Pg>::to_sql(&**self, out) } } #[test] fn feature_deserializes_for_valid_features() { use serde_json as json; assert!(json::from_str::<EncodableFeature>("\"foo\"").is_ok()); assert!(json::from_str::<EncodableFeature>("\"\"").is_err()); assert!(json::from_str::<EncodableFeature>("\"/\"").is_err()); assert!(json::from_str::<EncodableFeature>("\"%/%\"").is_err()); assert!(json::from_str::<EncodableFeature>("\"a/a\"").is_ok()); assert!(json::from_str::<EncodableFeature>("\"32-column-tables\"").is_ok()); }
let expected = "a valid feature name containing only letters, \ numbers, hyphens, or underscores"; Err(de::Error::invalid_value(value, &expected)) } else { Ok(EncodableFeatureName(s)) } }
function_block-function_prefix_line
[]
Rust
programs/vote/src/vote_processor.rs
Flawm/solana
551c24da5792f4452c3c555e562809e8c9e742e5
use { crate::{id, vote_instruction::VoteInstruction, vote_state}, log::*, solana_metrics::inc_new_counter_info, solana_program_runtime::{ invoke_context::InvokeContext, sysvar_cache::get_sysvar_with_account_check, }, solana_sdk::{ feature_set, instruction::InstructionError, keyed_account::{get_signers, keyed_account_at_index, KeyedAccount}, program_utils::limited_deserialize, pubkey::Pubkey, sysvar::rent::Rent, }, std::collections::HashSet, }; pub fn process_instruction( first_instruction_account: usize, data: &[u8], invoke_context: &mut InvokeContext, ) -> Result<(), InstructionError> { let keyed_accounts = invoke_context.get_keyed_accounts()?; trace!("process_instruction: {:?}", data); trace!("keyed_accounts: {:?}", keyed_accounts); let me = &mut keyed_account_at_index(keyed_accounts, first_instruction_account)?; if me.owner()? != id() { return Err(InstructionError::InvalidAccountOwner); } let signers: HashSet<Pubkey> = get_signers(&keyed_accounts[first_instruction_account..]); match limited_deserialize(data)? { VoteInstruction::InitializeAccount(vote_init) => { let rent = get_sysvar_with_account_check::rent( keyed_account_at_index(keyed_accounts, first_instruction_account + 1)?, invoke_context, )?; verify_rent_exemption(me, &rent)?; let clock = get_sysvar_with_account_check::clock( keyed_account_at_index(keyed_accounts, first_instruction_account + 2)?, invoke_context, )?; vote_state::initialize_account(me, &vote_init, &signers, &clock) } VoteInstruction::Authorize(voter_pubkey, vote_authorize) => { let clock = get_sysvar_with_account_check::clock( keyed_account_at_index(keyed_accounts, first_instruction_account + 1)?, invoke_context, )?; vote_state::authorize( me, &voter_pubkey, vote_authorize, &signers, &clock, &invoke_context.feature_set, ) } VoteInstruction::UpdateValidatorIdentity => vote_state::update_validator_identity( me, keyed_account_at_index(keyed_accounts, first_instruction_account + 1)?.unsigned_key(), &signers, ), VoteInstruction::UpdateCommission(commission) => { vote_state::update_commission(me, commission, &signers) } VoteInstruction::Vote(vote) | VoteInstruction::VoteSwitch(vote, _) => { inc_new_counter_info!("vote-native", 1); let slot_hashes = get_sysvar_with_account_check::slot_hashes( keyed_account_at_index(keyed_accounts, first_instruction_account + 1)?, invoke_context, )?; let clock = get_sysvar_with_account_check::clock( keyed_account_at_index(keyed_accounts, first_instruction_account + 2)?, invoke_context, )?; vote_state::process_vote( me, &slot_hashes, &clock, &vote, &signers, &invoke_context.feature_set, ) } VoteInstruction::UpdateVoteState(vote_state_update) | VoteInstruction::UpdateVoteStateSwitch(vote_state_update, _) => { if invoke_context .feature_set .is_active(&feature_set::allow_votes_to_directly_update_vote_state::id()) { inc_new_counter_info!("vote-state-native", 1); let sysvar_cache = invoke_context.get_sysvar_cache(); let slot_hashes = sysvar_cache.get_slot_hashes()?; let clock = sysvar_cache.get_clock()?; vote_state::process_vote_state_update( me, slot_hashes.slot_hashes(), &clock, vote_state_update, &signers, ) } else { Err(InstructionError::InvalidInstructionData) } } VoteInstruction::Withdraw(lamports) => { let to = keyed_account_at_index(keyed_accounts, first_instruction_account + 1)?; let rent_sysvar = if invoke_context .feature_set .is_active(&feature_set::reject_non_rent_exempt_vote_withdraws::id()) { Some(invoke_context.get_sysvar_cache().get_rent()?) } else { None }; vote_state::withdraw(me, lamports, to, &signers, rent_sysvar.as_deref()) } VoteInstruction::AuthorizeChecked(vote_authorize) => { if invoke_context .feature_set .is_active(&feature_set::vote_stake_checked_instructions::id()) { let voter_pubkey = &keyed_account_at_index(keyed_accounts, first_instruction_account + 3)? .signer_key() .ok_or(InstructionError::MissingRequiredSignature)?; let clock = get_sysvar_with_account_check::clock( keyed_account_at_index(keyed_accounts, first_instruction_account + 1)?, invoke_context, )?; vote_state::authorize( me, voter_pubkey, vote_authorize, &signers, &clock, &invoke_context.feature_set, ) } else { Err(InstructionError::InvalidInstructionData) } } } } fn verify_rent_exemption( keyed_account: &KeyedAccount, rent: &Rent, ) -> Result<(), InstructionError> { if !rent.is_exempt(keyed_account.lamports()?, keyed_account.data_len()?) { Err(InstructionError::InsufficientFunds) } else { Ok(()) } } #[cfg(test)] mod tests { use { super::*, crate::{ vote_instruction::{ authorize, authorize_checked, create_account, update_commission, update_validator_identity, update_vote_state, update_vote_state_switch, vote, vote_switch, withdraw, VoteInstruction, }, vote_state::{Vote, VoteAuthorize, VoteInit, VoteState, VoteStateUpdate}, }, bincode::serialize, solana_program_runtime::invoke_context::mock_process_instruction, solana_sdk::{ account::{self, Account, AccountSharedData}, hash::Hash, instruction::{AccountMeta, Instruction}, sysvar::{self, clock::Clock, slot_hashes::SlotHashes}, }, std::str::FromStr, }; fn create_default_account() -> AccountSharedData { AccountSharedData::new(0, 0, &Pubkey::new_unique()) } fn process_instruction( instruction_data: &[u8], transaction_accounts: Vec<(Pubkey, AccountSharedData)>, instruction_accounts: Vec<AccountMeta>, expected_result: Result<(), InstructionError>, ) -> Vec<AccountSharedData> { mock_process_instruction( &id(), Vec::new(), instruction_data, transaction_accounts, instruction_accounts, expected_result, super::process_instruction, ) } fn process_instruction_as_one_arg( instruction: &Instruction, expected_result: Result<(), InstructionError>, ) -> Vec<AccountSharedData> { let mut pubkeys: HashSet<Pubkey> = instruction .accounts .iter() .map(|meta| meta.pubkey) .collect(); pubkeys.insert(sysvar::clock::id()); pubkeys.insert(sysvar::rent::id()); pubkeys.insert(sysvar::slot_hashes::id()); let transaction_accounts: Vec<_> = pubkeys .iter() .map(|pubkey| { ( *pubkey, if sysvar::clock::check_id(pubkey) { account::create_account_shared_data_for_test(&Clock::default()) } else if sysvar::slot_hashes::check_id(pubkey) { account::create_account_shared_data_for_test(&SlotHashes::default()) } else if sysvar::rent::check_id(pubkey) { account::create_account_shared_data_for_test(&Rent::free()) } else if *pubkey == invalid_vote_state_pubkey() { AccountSharedData::from(Account { owner: invalid_vote_state_pubkey(), ..Account::default() }) } else { AccountSharedData::from(Account { owner: id(), ..Account::default() }) }, ) }) .collect(); process_instruction( &instruction.data, transaction_accounts, instruction.accounts.clone(), expected_result, ) } fn invalid_vote_state_pubkey() -> Pubkey { Pubkey::from_str("BadVote111111111111111111111111111111111111").unwrap() } #[test] fn test_vote_process_instruction_decode_bail() { process_instruction( &[], Vec::new(), Vec::new(), Err(InstructionError::NotEnoughAccountKeys), ); } #[test] fn test_spoofed_vote() { process_instruction_as_one_arg( &vote( &invalid_vote_state_pubkey(), &Pubkey::new_unique(), Vote::default(), ), Err(InstructionError::InvalidAccountOwner), ); process_instruction_as_one_arg( &update_vote_state( &invalid_vote_state_pubkey(), &Pubkey::default(), VoteStateUpdate::default(), ), Err(InstructionError::InvalidAccountOwner), ); } #[test] fn test_vote_process_instruction() { solana_logger::setup(); let instructions = create_account( &Pubkey::new_unique(), &Pubkey::new_unique(), &VoteInit::default(), 101, ); process_instruction_as_one_arg(&instructions[1], Err(InstructionError::InvalidAccountData)); process_instruction_as_one_arg( &vote( &Pubkey::new_unique(), &Pubkey::new_unique(), Vote::default(), ), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &vote_switch( &Pubkey::new_unique(), &Pubkey::new_unique(), Vote::default(), Hash::default(), ), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &authorize( &Pubkey::new_unique(), &Pubkey::new_unique(), &Pubkey::new_unique(), VoteAuthorize::Voter, ), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &update_vote_state( &Pubkey::default(), &Pubkey::default(), VoteStateUpdate::default(), ), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &update_vote_state_switch( &Pubkey::default(), &Pubkey::default(), VoteStateUpdate::default(), Hash::default(), ), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &update_validator_identity( &Pubkey::new_unique(), &Pubkey::new_unique(), &Pubkey::new_unique(), ), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &update_commission(&Pubkey::new_unique(), &Pubkey::new_unique(), 0), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &withdraw( &Pubkey::new_unique(), &Pubkey::new_unique(), 0, &Pubkey::new_unique(), ), Err(InstructionError::InvalidAccountData), ); } #[test] fn test_vote_authorize_checked() { let vote_pubkey = Pubkey::new_unique(); let authorized_pubkey = Pubkey::new_unique(); let new_authorized_pubkey = Pubkey::new_unique(); let mut instruction = authorize_checked( &vote_pubkey, &authorized_pubkey, &new_authorized_pubkey, VoteAuthorize::Voter, ); instruction.accounts = instruction.accounts[0..2].to_vec(); process_instruction_as_one_arg(&instruction, Err(InstructionError::NotEnoughAccountKeys)); let mut instruction = authorize_checked( &vote_pubkey, &authorized_pubkey, &new_authorized_pubkey, VoteAuthorize::Withdrawer, ); instruction.accounts = instruction.accounts[0..2].to_vec(); process_instruction_as_one_arg(&instruction, Err(InstructionError::NotEnoughAccountKeys)); let mut instruction = authorize_checked( &vote_pubkey, &authorized_pubkey, &new_authorized_pubkey, VoteAuthorize::Voter, ); instruction.accounts[3] = AccountMeta::new_readonly(new_authorized_pubkey, false); process_instruction_as_one_arg( &instruction, Err(InstructionError::MissingRequiredSignature), ); let mut instruction = authorize_checked( &vote_pubkey, &authorized_pubkey, &new_authorized_pubkey, VoteAuthorize::Withdrawer, ); instruction.accounts[3] = AccountMeta::new_readonly(new_authorized_pubkey, false); process_instruction_as_one_arg( &instruction, Err(InstructionError::MissingRequiredSignature), ); let vote_account = AccountSharedData::new(100, VoteState::size_of(), &id()); let clock_address = sysvar::clock::id(); let clock_account = account::create_account_shared_data_for_test(&Clock::default()); let default_authorized_pubkey = Pubkey::default(); let authorized_account = create_default_account(); let new_authorized_account = create_default_account(); let transaction_accounts = vec![ (vote_pubkey, vote_account), (clock_address, clock_account), (default_authorized_pubkey, authorized_account), (new_authorized_pubkey, new_authorized_account), ]; let instruction_accounts = vec![ AccountMeta { pubkey: vote_pubkey, is_signer: false, is_writable: false, }, AccountMeta { pubkey: clock_address, is_signer: false, is_writable: false, }, AccountMeta { pubkey: default_authorized_pubkey, is_signer: true, is_writable: false, }, AccountMeta { pubkey: new_authorized_pubkey, is_signer: true, is_writable: false, }, ]; process_instruction( &serialize(&VoteInstruction::AuthorizeChecked(VoteAuthorize::Voter)).unwrap(), transaction_accounts.clone(), instruction_accounts.clone(), Ok(()), ); process_instruction( &serialize(&VoteInstruction::AuthorizeChecked( VoteAuthorize::Withdrawer, )) .unwrap(), transaction_accounts, instruction_accounts, Ok(()), ); } }
use { crate::{id, vote_instruction::VoteInstruction, vote_state}, log::*, solana_metrics::inc_new_counter_info, solana_program_runtime::{ invoke_context::InvokeContext, sysvar_cache::get_sysvar_with_account_check, }, solana_sdk::{ feature_set, instruction::InstructionError, keyed_account::{get_signers, keyed_account_at_index, KeyedAccount}, program_utils::limited_deserialize, pubkey::Pubkey, sysvar::rent::Rent, }, std::collections::HashSet, }; pub fn process_instruction( first_instruction_account: usize, data: &[u8], invoke_context: &mut InvokeContext, ) -> Result<(), InstructionError> { let keyed_accounts = invoke_context.get_keyed_accounts()?; trace!("process_instruction: {:?}", data); trace!("keyed_accounts: {:?}", keyed_accounts); let me = &mut keyed_account_at_index(keyed_accounts, first_instruction_account)?; if me.owner()? != id() { return Err(InstructionError::InvalidAccountOwner); } let signers: HashSet<Pubkey> = get_signers(&keyed_accounts[first_instruction_account..]); match limited_deserialize(data)? { VoteInstruction::InitializeAccount(vote_init) => { let rent = get_sysvar_with_account_check::rent( keyed_account_at_index(keyed_accounts, first_instruction_account + 1)?, invoke_context, )?; verify_rent_exemption(me, &rent)?; let clock = get_sysvar_with_account_check::clock( keyed_account_at_index(keyed_accounts, first_instruction_account + 2)?, invoke_context, )?; vote_state::initialize_account(me, &vote_init, &signers, &clock) } VoteInstruction::Authorize(voter_pubkey, vote_authorize) => { let clock = get_sysvar_with_account_check::clock( keyed_account_at_index(keyed_accounts, first_instruction_account + 1)?, invoke_context, )?; vote_state::authorize( me, &voter_pubkey, vote_authorize, &signers, &clock, &invoke_context.feature_set, ) } VoteInstruction::UpdateValidatorIdentity => vote_state::update_validator_identity( me, keyed_account_at_index(keyed_accounts, first_instruction_account + 1)?.unsigned_key(), &signers, ), VoteInstruction::UpdateCommission(commission) => { vote_state::update_commission(me, commission, &signers) } VoteInstruction::Vote(vote) | VoteInstruction::VoteSwitch(vote, _) => { inc_new_counter_info!("vote-native", 1); let slot_hashes = get_sysvar_with_account_check::slot_hashes( keyed_account_at_index(keyed_accounts, first_instruction_account + 1)?, invoke_context, )?; let clock = get_sysvar_with_account_check::clock( keyed_account_at_index(keyed_accounts, first_instruction_account + 2)?, invoke_context, )?; vote_state::process_vote( me, &slot_hashes, &clock, &vote, &signers, &invoke_context.feature_set, ) } VoteInstruction::UpdateVoteState(vote_state_update) | VoteInstruction::UpdateVoteStateSwitch(vote_state_update, _) => { if invoke_context .feature_set .is_active(&feature_set::allow_votes_to_directly_update_vote_state::id()) { inc_new_counter_info!("vote-state-native", 1); let sysvar_cache = invoke_context.get_sysvar_cache(); let slot_hashes = sysvar_cache.get_slot_hashes()?; let clock = sysvar_cache.get_clock()?; vote_state::process_vote_state_update( me, slot_hashes.slot_hashes(), &clock, vote_state_update, &signers, ) } else { Err(InstructionError::InvalidInstructionData) } } VoteInstruction::Withdraw(lamports) => { let to = keyed_account_at_index(keyed_accounts, first_instruction_account + 1)?; let rent_sysvar = if invoke_context .feature_set .is_active(&feature_set::reject_non_rent_exempt_vote_withdraws::id()) { Some(invoke_context.get_sysvar_cache().get_rent()?) } else { None }; vote_state::withdraw(me, lamports, to, &signers, rent_sysvar.as_deref()) } VoteInstruction::AuthorizeChecked(vote_authorize) => { if invoke_context .feature_set .is_active(&feature_set::vote_stake_checked_instructions::id()) { let voter_pubkey = &keyed_account_at_index(keyed_accounts, first_instruction_account + 3)? .signer_key() .ok_or(InstructionError::MissingRequiredSignature)?; let clock = get_sysvar_with_account_check::clock( keyed_account_at_index(keyed_accounts, first_instruction_account + 1)?, invoke_context, )?; vote_state::authorize( me, voter_pubkey, vote_authorize, &signers, &clock, &invoke_context.feature_set, ) } else { Err(InstructionError::InvalidInstructionData) } } } } fn verify_rent_exemption( keyed_account: &KeyedAccount, rent: &Rent, ) -> Result<(), InstructionError> { if !rent.is_exempt(keyed_account.lamports()?, keyed_account.data_len()?) { Err(InstructionError::InsufficientFunds) } else { Ok(()) } } #[cfg(test)] mod tests { use { super::*, crate::{ vote_instruction::{ authorize, authorize_checked, create_account, update_commission, update_validator_identity, update_vote_state, update_vote_state_switch, vote, vote_switch, withdraw, VoteInstruction, }, vote_state::{Vote, VoteAuthorize, VoteInit, VoteState, VoteStateUpdate}, }, bincode::serialize, solana_program_runtime::invoke_context::mock_process_instruction, solana_sdk::{ account::{self, Account, AccountSharedData}, hash::Hash, instruction::{AccountMeta, Instruction}, sysvar::{self, clock::Clock, slot_hashes::SlotHashes}, }, std::str::FromStr, }; fn create_default_account() -> AccountSharedData { AccountSharedData::new(0, 0, &Pubkey::new_unique()) } fn process_instruction( instruction_data: &[u8], transaction_accounts: Vec<(Pubkey, AccountSharedData)>, instruction_accounts: Vec<AccountMeta>, expected_result: Result<(), InstructionError>, ) -> Vec<AccountSharedData> { mock_process_instruction( &id(), Vec::new(), instruction_data, transaction_accounts, instruction_accounts, expected_result, super::process_instruction, ) } fn process_instruction_as_one_arg( instruction: &Instruction, expected_result: Result<(), InstructionError>, ) -> Vec<AccountSharedData> { let mut pubkeys: HashSet<Pubkey> = instruction .accounts .iter() .map(|meta| meta.pubkey) .collect(); pubkeys.insert(sysvar::clock::id()); pubkeys.insert(sysvar::rent::id()); pubkeys.insert(sysvar::slot_hashes::id()); let transaction_accounts: Vec<_> = pubkeys .iter() .map(|pubkey| { ( *pubkey, if sysvar::clock::check_id(pubkey) { account::create_account_shared_data_for_test(&Clock::default()) } else if sysvar::slot_hashes::check_id(pubkey) { account::create_account_shared_data_for_test(&SlotHashes::default()) } else if sysvar::rent::check_id(pubkey) { account::create_account_shared_data_for_test(&Rent::free()) } else if *pubkey == invalid_vote_state_pubkey() { AccountSharedData::from(Account { owner: invalid_vote_state_pubkey(), ..Account::default() }) } else { AccountSharedData::from(Account { owner: id(), ..Account::default() }) }, ) }) .collect(); process_instruction( &instruction.data, transaction_accounts, instruction.accounts.clone(), expected_result, ) } fn invalid_vote_state_pubkey() -> Pubkey { Pubkey::from_str("BadVote111111111111111111111111111111111111").unwrap() } #[test]
#[test] fn test_spoofed_vote() { process_instruction_as_one_arg( &vote( &invalid_vote_state_pubkey(), &Pubkey::new_unique(), Vote::default(), ), Err(InstructionError::InvalidAccountOwner), ); process_instruction_as_one_arg( &update_vote_state( &invalid_vote_state_pubkey(), &Pubkey::default(), VoteStateUpdate::default(), ), Err(InstructionError::InvalidAccountOwner), ); } #[test] fn test_vote_process_instruction() { solana_logger::setup(); let instructions = create_account( &Pubkey::new_unique(), &Pubkey::new_unique(), &VoteInit::default(), 101, ); process_instruction_as_one_arg(&instructions[1], Err(InstructionError::InvalidAccountData)); process_instruction_as_one_arg( &vote( &Pubkey::new_unique(), &Pubkey::new_unique(), Vote::default(), ), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &vote_switch( &Pubkey::new_unique(), &Pubkey::new_unique(), Vote::default(), Hash::default(), ), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &authorize( &Pubkey::new_unique(), &Pubkey::new_unique(), &Pubkey::new_unique(), VoteAuthorize::Voter, ), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &update_vote_state( &Pubkey::default(), &Pubkey::default(), VoteStateUpdate::default(), ), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &update_vote_state_switch( &Pubkey::default(), &Pubkey::default(), VoteStateUpdate::default(), Hash::default(), ), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &update_validator_identity( &Pubkey::new_unique(), &Pubkey::new_unique(), &Pubkey::new_unique(), ), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &update_commission(&Pubkey::new_unique(), &Pubkey::new_unique(), 0), Err(InstructionError::InvalidAccountData), ); process_instruction_as_one_arg( &withdraw( &Pubkey::new_unique(), &Pubkey::new_unique(), 0, &Pubkey::new_unique(), ), Err(InstructionError::InvalidAccountData), ); } #[test] fn test_vote_authorize_checked() { let vote_pubkey = Pubkey::new_unique(); let authorized_pubkey = Pubkey::new_unique(); let new_authorized_pubkey = Pubkey::new_unique(); let mut instruction = authorize_checked( &vote_pubkey, &authorized_pubkey, &new_authorized_pubkey, VoteAuthorize::Voter, ); instruction.accounts = instruction.accounts[0..2].to_vec(); process_instruction_as_one_arg(&instruction, Err(InstructionError::NotEnoughAccountKeys)); let mut instruction = authorize_checked( &vote_pubkey, &authorized_pubkey, &new_authorized_pubkey, VoteAuthorize::Withdrawer, ); instruction.accounts = instruction.accounts[0..2].to_vec(); process_instruction_as_one_arg(&instruction, Err(InstructionError::NotEnoughAccountKeys)); let mut instruction = authorize_checked( &vote_pubkey, &authorized_pubkey, &new_authorized_pubkey, VoteAuthorize::Voter, ); instruction.accounts[3] = AccountMeta::new_readonly(new_authorized_pubkey, false); process_instruction_as_one_arg( &instruction, Err(InstructionError::MissingRequiredSignature), ); let mut instruction = authorize_checked( &vote_pubkey, &authorized_pubkey, &new_authorized_pubkey, VoteAuthorize::Withdrawer, ); instruction.accounts[3] = AccountMeta::new_readonly(new_authorized_pubkey, false); process_instruction_as_one_arg( &instruction, Err(InstructionError::MissingRequiredSignature), ); let vote_account = AccountSharedData::new(100, VoteState::size_of(), &id()); let clock_address = sysvar::clock::id(); let clock_account = account::create_account_shared_data_for_test(&Clock::default()); let default_authorized_pubkey = Pubkey::default(); let authorized_account = create_default_account(); let new_authorized_account = create_default_account(); let transaction_accounts = vec![ (vote_pubkey, vote_account), (clock_address, clock_account), (default_authorized_pubkey, authorized_account), (new_authorized_pubkey, new_authorized_account), ]; let instruction_accounts = vec![ AccountMeta { pubkey: vote_pubkey, is_signer: false, is_writable: false, }, AccountMeta { pubkey: clock_address, is_signer: false, is_writable: false, }, AccountMeta { pubkey: default_authorized_pubkey, is_signer: true, is_writable: false, }, AccountMeta { pubkey: new_authorized_pubkey, is_signer: true, is_writable: false, }, ]; process_instruction( &serialize(&VoteInstruction::AuthorizeChecked(VoteAuthorize::Voter)).unwrap(), transaction_accounts.clone(), instruction_accounts.clone(), Ok(()), ); process_instruction( &serialize(&VoteInstruction::AuthorizeChecked( VoteAuthorize::Withdrawer, )) .unwrap(), transaction_accounts, instruction_accounts, Ok(()), ); } }
fn test_vote_process_instruction_decode_bail() { process_instruction( &[], Vec::new(), Vec::new(), Err(InstructionError::NotEnoughAccountKeys), ); }
function_block-full_function
[ { "content": "pub fn read_pubkey(current: &mut usize, data: &[u8]) -> Result<Pubkey, SanitizeError> {\n\n let len = std::mem::size_of::<Pubkey>();\n\n if data.len() < *current + len {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n let e = Pubkey::new(&data[*current..*current + len]);\n\n *current += len;\n\n Ok(e)\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 0, "score": 523196.66507640725 }, { "content": "pub fn spl_programs(rent: &Rent) -> Vec<(Pubkey, AccountSharedData)> {\n\n SPL_PROGRAMS\n\n .iter()\n\n .map(|(program_id, elf)| {\n\n (\n\n *program_id,\n\n AccountSharedData::from(Account {\n\n lamports: rent.minimum_balance(elf.len()).min(1),\n\n data: elf.to_vec(),\n\n owner: solana_sdk::bpf_loader::id(),\n\n executable: true,\n\n rent_epoch: 0,\n\n }),\n\n )\n\n })\n\n .collect()\n\n}\n", "file_path": "program-test/src/programs.rs", "rank": 1, "score": 491543.6484946439 }, { "content": "pub fn parse_sysvar(data: &[u8], pubkey: &Pubkey) -> Result<SysvarAccountType, ParseAccountError> {\n\n #[allow(deprecated)]\n\n let parsed_account = {\n\n if pubkey == &sysvar::clock::id() {\n\n deserialize::<Clock>(data)\n\n .ok()\n\n .map(|clock| SysvarAccountType::Clock(clock.into()))\n\n } else if pubkey == &sysvar::epoch_schedule::id() {\n\n deserialize(data).ok().map(SysvarAccountType::EpochSchedule)\n\n } else if pubkey == &sysvar::fees::id() {\n\n deserialize::<Fees>(data)\n\n .ok()\n\n .map(|fees| SysvarAccountType::Fees(fees.into()))\n\n } else if pubkey == &sysvar::recent_blockhashes::id() {\n\n deserialize::<RecentBlockhashes>(data)\n\n .ok()\n\n .map(|recent_blockhashes| {\n\n let recent_blockhashes = recent_blockhashes\n\n .iter()\n\n .map(|entry| UiRecentBlockhashesEntry {\n", "file_path": "account-decoder/src/parse_sysvar.rs", "rank": 2, "score": 491240.3400765221 }, { "content": "pub fn parse_config(data: &[u8], pubkey: &Pubkey) -> Result<ConfigAccountType, ParseAccountError> {\n\n let parsed_account = if pubkey == &stake_config::id() {\n\n get_config_data(data)\n\n .ok()\n\n .and_then(|data| deserialize::<StakeConfig>(data).ok())\n\n .map(|config| ConfigAccountType::StakeConfig(config.into()))\n\n } else {\n\n deserialize::<ConfigKeys>(data).ok().and_then(|key_list| {\n\n if !key_list.keys.is_empty() && key_list.keys[0].0 == validator_info::id() {\n\n parse_config_data::<String>(data, key_list.keys).and_then(|validator_info| {\n\n Some(ConfigAccountType::ValidatorInfo(UiConfig {\n\n keys: validator_info.keys,\n\n config_data: serde_json::from_str(&validator_info.config_data).ok()?,\n\n }))\n\n })\n\n } else {\n\n None\n\n }\n\n })\n\n };\n\n parsed_account.ok_or(ParseAccountError::AccountNotParsable(\n\n ParsableAccount::Config,\n\n ))\n\n}\n\n\n", "file_path": "account-decoder/src/parse_config.rs", "rank": 3, "score": 491240.3400765222 }, { "content": "pub fn parse_vote(data: &[u8]) -> Result<VoteAccountType, ParseAccountError> {\n\n let mut vote_state = VoteState::deserialize(data).map_err(ParseAccountError::from)?;\n\n let epoch_credits = vote_state\n\n .epoch_credits()\n\n .iter()\n\n .map(|(epoch, credits, previous_credits)| UiEpochCredits {\n\n epoch: *epoch,\n\n credits: credits.to_string(),\n\n previous_credits: previous_credits.to_string(),\n\n })\n\n .collect();\n\n let votes = vote_state\n\n .votes\n\n .iter()\n\n .map(|lockout| UiLockout {\n\n slot: lockout.slot,\n\n confirmation_count: lockout.confirmation_count,\n\n })\n\n .collect();\n\n let authorized_voters = vote_state\n", "file_path": "account-decoder/src/parse_vote.rs", "rank": 4, "score": 490027.35674022755 }, { "content": "pub fn read_u8(current: &mut usize, data: &[u8]) -> Result<u8, SanitizeError> {\n\n if data.len() < *current + 1 {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n let e = data[*current];\n\n *current += 1;\n\n Ok(e)\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 5, "score": 479129.44715166825 }, { "content": "pub fn vote(vote_pubkey: &Pubkey, authorized_voter_pubkey: &Pubkey, vote: Vote) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*vote_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::slot_hashes::id(), false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n AccountMeta::new_readonly(*authorized_voter_pubkey, true),\n\n ];\n\n\n\n Instruction::new_with_bincode(id(), &VoteInstruction::Vote(vote), account_metas)\n\n}\n\n\n", "file_path": "programs/vote/src/vote_instruction.rs", "rank": 6, "score": 476596.41145080177 }, { "content": "pub fn realloc(program_id: &Pubkey, address: &Pubkey, size: usize, bump: &mut u8) -> Instruction {\n\n let mut instruction_data = vec![REALLOC, *bump];\n\n instruction_data.extend_from_slice(&size.to_le_bytes());\n\n\n\n *bump += 1;\n\n\n\n Instruction::new_with_bytes(\n\n *program_id,\n\n &instruction_data,\n\n vec![AccountMeta::new(*address, false)],\n\n )\n\n}\n\n\n", "file_path": "programs/bpf/rust/realloc/src/instructions.rs", "rank": 7, "score": 474845.6043782558 }, { "content": "pub fn debug_account_data(data: &[u8], f: &mut fmt::DebugStruct<'_, '_>) {\n\n let data_len = cmp::min(MAX_DEBUG_ACCOUNT_DATA, data.len());\n\n if data_len > 0 {\n\n f.field(\"data\", &Hex(&data[..data_len]));\n\n }\n\n}\n\n\n\npub(crate) struct Hex<'a>(pub(crate) &'a [u8]);\n\nimpl fmt::Debug for Hex<'_> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n for &byte in self.0 {\n\n write!(f, \"{:02x}\", byte)?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "sdk/program/src/debug_account_data.rs", "rank": 8, "score": 460051.68920617487 }, { "content": "pub fn load_instruction_at(index: usize, data: &[u8]) -> Result<Instruction, SanitizeError> {\n\n deserialize_instruction(index, data)\n\n}\n\n\n", "file_path": "sdk/program/src/sysvar/instructions.rs", "rank": 9, "score": 454253.65128897474 }, { "content": "/// Deserialize with a limit based the maximum amount of data a program can expect to get.\n\n/// This function should be used in place of direct deserialization to help prevent OOM errors\n\npub fn limited_deserialize<T>(instruction_data: &[u8]) -> Result<T, InstructionError>\n\nwhere\n\n T: serde::de::DeserializeOwned,\n\n{\n\n solana_program::program_utils::limited_deserialize(\n\n instruction_data,\n\n crate::packet::PACKET_DATA_SIZE as u64,\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_limited_deserialize() {\n\n #[derive(Deserialize, Serialize)]\n\n enum Foo {\n\n Bar(Vec<u8>),\n\n }\n", "file_path": "sdk/src/program_utils.rs", "rank": 10, "score": 445777.2903842712 }, { "content": "pub fn get_token_account_mint(data: &[u8]) -> Option<Pubkey> {\n\n if data.len() == Account::get_packed_len() {\n\n Some(Pubkey::new(&data[0..32]))\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_token() {\n\n let mint_pubkey = SplTokenPubkey::new(&[2; 32]);\n\n let owner_pubkey = SplTokenPubkey::new(&[3; 32]);\n\n let mut account_data = vec![0; Account::get_packed_len()];\n\n let mut account = Account::unpack_unchecked(&account_data).unwrap();\n\n account.mint = mint_pubkey;\n\n account.owner = owner_pubkey;\n", "file_path": "account-decoder/src/parse_token.rs", "rank": 11, "score": 443108.405902763 }, { "content": "pub fn read_u16(current: &mut usize, data: &[u8]) -> Result<u16, SanitizeError> {\n\n if data.len() < *current + 2 {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n let mut fixed_data = [0u8; 2];\n\n fixed_data.copy_from_slice(&data[*current..*current + 2]);\n\n let e = u16::from_le_bytes(fixed_data);\n\n *current += 2;\n\n Ok(e)\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 12, "score": 439355.009370326 }, { "content": "/// Analyze a mint Pubkey that may be the native_mint and get the mint-account owner (token\n\n/// program_id) and decimals\n\npub fn get_mint_owner_and_decimals(bank: &Arc<Bank>, mint: &Pubkey) -> Result<(Pubkey, u8)> {\n\n if mint == &spl_token_native_mint() {\n\n Ok((spl_token_id(), spl_token::native_mint::DECIMALS))\n\n } else {\n\n let mint_account = bank.get_account(mint).ok_or_else(|| {\n\n Error::invalid_params(\"Invalid param: could not find mint\".to_string())\n\n })?;\n\n let decimals = get_mint_decimals(mint_account.data())?;\n\n Ok((*mint_account.owner(), decimals))\n\n }\n\n}\n\n\n", "file_path": "rpc/src/parsed_token_accounts.rs", "rank": 13, "score": 437306.57086203573 }, { "content": "#[allow(dead_code)]\n\npub fn sol_log_params(accounts: &[AccountInfo], data: &[u8]) {\n\n for (i, account) in accounts.iter().enumerate() {\n\n msg!(\"AccountInfo\");\n\n sol_log_64(0, 0, 0, 0, i as u64);\n\n msg!(\"- Is signer\");\n\n sol_log_64(0, 0, 0, 0, account.is_signer as u64);\n\n msg!(\"- Key\");\n\n account.key.log();\n\n msg!(\"- Lamports\");\n\n sol_log_64(0, 0, 0, 0, account.lamports());\n\n msg!(\"- Account data length\");\n\n sol_log_64(0, 0, 0, 0, account.data_len() as u64);\n\n msg!(\"- Owner\");\n\n account.owner.log();\n\n }\n\n msg!(\"Instruction data\");\n\n sol_log_slice(data);\n\n}\n\n\n\n/// Print the remaining compute units the program may consume\n", "file_path": "sdk/program/src/log.rs", "rank": 14, "score": 434431.21709057677 }, { "content": "/// Print some slices as base64\n\n///\n\n/// @param data - The slices to print\n\npub fn sol_log_data(data: &[&[u8]]) {\n\n #[cfg(target_arch = \"bpf\")]\n\n {\n\n extern \"C\" {\n\n fn sol_log_data(data: *const u8, data_len: u64);\n\n }\n\n\n\n unsafe { sol_log_data(data as *const _ as *const u8, data.len() as u64) };\n\n }\n\n\n\n #[cfg(not(target_arch = \"bpf\"))]\n\n crate::program_stubs::sol_log_data(data);\n\n}\n\n\n\n/// Print the hexadecimal representation of a slice\n\n///\n\n/// @param slice - The array to print\n", "file_path": "sdk/program/src/log.rs", "rank": 15, "score": 429143.36887710204 }, { "content": "pub fn from_keyed_account(account: &KeyedAccount) -> Result<Config, InstructionError> {\n\n if !config::check_id(account.unsigned_key()) {\n\n return Err(InstructionError::InvalidArgument);\n\n }\n\n from(&*account.try_account_ref()?).ok_or(InstructionError::InvalidArgument)\n\n}\n\n\n", "file_path": "programs/stake/src/config.rs", "rank": 16, "score": 427008.6742412091 }, { "content": "pub fn parse_memo_data(data: &[u8]) -> Result<String, Utf8Error> {\n\n from_utf8(data).map(|s| s.to_string())\n\n}\n\n\n\npub(crate) fn check_num_accounts(\n\n accounts: &[u8],\n\n num: usize,\n\n parsable_program: ParsableProgram,\n\n) -> Result<(), ParseInstructionError> {\n\n if accounts.len() < num {\n\n Err(ParseInstructionError::InstructionKeyMismatch(\n\n parsable_program,\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "transaction-status/src/parse_instruction.rs", "rank": 17, "score": 426376.1695341333 }, { "content": "/// Store the current `Instruction`'s index in the Instructions Sysvar data\n\npub fn store_current_index(data: &mut [u8], instruction_index: u16) {\n\n let last_index = data.len() - 2;\n\n data[last_index..last_index + 2].copy_from_slice(&instruction_index.to_le_bytes());\n\n}\n\n\n", "file_path": "sdk/program/src/sysvar/instructions.rs", "rank": 18, "score": 426376.0744382971 }, { "content": "pub fn parse_stake(data: &[u8]) -> Result<StakeAccountType, ParseAccountError> {\n\n let stake_state: StakeState = deserialize(data)\n\n .map_err(|_| ParseAccountError::AccountNotParsable(ParsableAccount::Stake))?;\n\n let parsed_account = match stake_state {\n\n StakeState::Uninitialized => StakeAccountType::Uninitialized,\n\n StakeState::Initialized(meta) => StakeAccountType::Initialized(UiStakeAccount {\n\n meta: meta.into(),\n\n stake: None,\n\n }),\n\n StakeState::Stake(meta, stake) => StakeAccountType::Delegated(UiStakeAccount {\n\n meta: meta.into(),\n\n stake: Some(stake.into()),\n\n }),\n\n StakeState::RewardsPool => StakeAccountType::RewardsPool,\n\n };\n\n Ok(parsed_account)\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\n#[serde(rename_all = \"camelCase\", tag = \"type\", content = \"info\")]\n", "file_path": "account-decoder/src/parse_stake.rs", "rank": 19, "score": 422521.0938721789 }, { "content": "pub fn is_set_authority_instruction(instruction_data: &[u8]) -> bool {\n\n !instruction_data.is_empty() && 4 == instruction_data[0]\n\n}\n\n\n", "file_path": "sdk/program/src/bpf_loader_upgradeable.rs", "rank": 20, "score": 420746.58300823264 }, { "content": "pub fn withdraw(\n\n vote_pubkey: &Pubkey,\n\n authorized_withdrawer_pubkey: &Pubkey,\n\n lamports: u64,\n\n to_pubkey: &Pubkey,\n\n) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*vote_pubkey, false),\n\n AccountMeta::new(*to_pubkey, false),\n\n AccountMeta::new_readonly(*authorized_withdrawer_pubkey, true),\n\n ];\n\n\n\n Instruction::new_with_bincode(id(), &VoteInstruction::Withdraw(lamports), account_metas)\n\n}\n", "file_path": "programs/vote/src/vote_instruction.rs", "rank": 21, "score": 418818.6182002791 }, { "content": "pub fn assign(pubkey: &Pubkey, owner: &Pubkey) -> Instruction {\n\n let account_metas = vec![AccountMeta::new(*pubkey, true)];\n\n Instruction::new_with_bincode(\n\n system_program::id(),\n\n &SystemInstruction::Assign { owner: *owner },\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/program/src/system_instruction.rs", "rank": 22, "score": 417726.12902963546 }, { "content": "/// Activate a feature\n\npub fn activate(feature_id: &Pubkey, funding_address: &Pubkey, rent: &Rent) -> Vec<Instruction> {\n\n activate_with_lamports(\n\n feature_id,\n\n funding_address,\n\n rent.minimum_balance(Feature::size_of()),\n\n )\n\n}\n\n\n", "file_path": "sdk/program/src/feature.rs", "rank": 23, "score": 414360.73368772626 }, { "content": "fn initialize_account(vote_pubkey: &Pubkey, vote_init: &VoteInit) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*vote_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::rent::id(), false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n AccountMeta::new_readonly(vote_init.node_pubkey, true),\n\n ];\n\n\n\n Instruction::new_with_bincode(\n\n id(),\n\n &VoteInstruction::InitializeAccount(*vote_init),\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "programs/vote/src/vote_instruction.rs", "rank": 24, "score": 413650.1225285565 }, { "content": "pub fn parse_nonce(data: &[u8]) -> Result<UiNonceState, ParseAccountError> {\n\n let nonce_state: Versions = bincode::deserialize(data)\n\n .map_err(|_| ParseAccountError::from(InstructionError::InvalidAccountData))?;\n\n let nonce_state = nonce_state.convert_to_current();\n\n match nonce_state {\n\n // This prevents parsing an allocated System-owned account with empty data of any non-zero\n\n // length as `uninitialized` nonce. An empty account of the wrong length can never be\n\n // initialized as a nonce account, and an empty account of the correct length may not be an\n\n // uninitialized nonce account, since it can be assigned to another program.\n\n State::Uninitialized => Err(ParseAccountError::from(\n\n InstructionError::InvalidAccountData,\n\n )),\n\n State::Initialized(data) => Ok(UiNonceState::Initialized(UiNonceData {\n\n authority: data.authority.to_string(),\n\n blockhash: data.blockhash.to_string(),\n\n fee_calculator: data.fee_calculator.into(),\n\n })),\n\n }\n\n}\n\n\n", "file_path": "account-decoder/src/parse_nonce.rs", "rank": 25, "score": 413387.21512334223 }, { "content": "/// Set a program's return data\n\npub fn set_return_data(data: &[u8]) {\n\n #[cfg(target_arch = \"bpf\")]\n\n {\n\n extern \"C\" {\n\n fn sol_set_return_data(data: *const u8, length: u64);\n\n }\n\n\n\n unsafe { sol_set_return_data(data.as_ptr(), data.len() as u64) };\n\n }\n\n\n\n #[cfg(not(target_arch = \"bpf\"))]\n\n crate::program_stubs::sol_set_return_data(data)\n\n}\n\n\n", "file_path": "sdk/program/src/program.rs", "rank": 26, "score": 412691.22509461315 }, { "content": "fn check_num_vote_accounts(accounts: &[u8], num: usize) -> Result<(), ParseInstructionError> {\n\n check_num_accounts(accounts, num, ParsableProgram::Vote)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use {\n\n super::*,\n\n solana_sdk::{hash::Hash, message::Message, pubkey::Pubkey},\n\n solana_vote_program::{\n\n vote_instruction,\n\n vote_state::{Vote, VoteAuthorize, VoteInit},\n\n },\n\n };\n\n\n\n #[test]\n\n #[allow(clippy::same_item_push)]\n\n fn test_parse_vote_instruction() {\n\n let mut keys: Vec<Pubkey> = vec![];\n\n for _ in 0..5 {\n", "file_path": "transaction-status/src/parse_vote.rs", "rank": 27, "score": 411757.1018811194 }, { "content": "pub fn finalize(account_pubkey: &Pubkey, program_id: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*account_pubkey, true),\n\n AccountMeta::new_readonly(rent::id(), false),\n\n ];\n\n Instruction::new_with_bincode(*program_id, &LoaderInstruction::Finalize, account_metas)\n\n}\n", "file_path": "sdk/program/src/loader_instruction.rs", "rank": 28, "score": 411692.67247353715 }, { "content": "pub fn create_account(\n\n from_pubkey: &Pubkey,\n\n vote_pubkey: &Pubkey,\n\n vote_init: &VoteInit,\n\n lamports: u64,\n\n) -> Vec<Instruction> {\n\n let space = VoteState::size_of() as u64;\n\n let create_ix =\n\n system_instruction::create_account(from_pubkey, vote_pubkey, lamports, space, &id());\n\n let init_ix = initialize_account(vote_pubkey, vote_init);\n\n vec![create_ix, init_ix]\n\n}\n\n\n", "file_path": "programs/vote/src/vote_instruction.rs", "rank": 29, "score": 410115.3684684355 }, { "content": "/// Deserialize with a limit based the maximum amount of data a program can expect to get.\n\n/// This function should be used in place of direct deserialization to help prevent OOM errors\n\npub fn limited_deserialize<T>(instruction_data: &[u8], limit: u64) -> Result<T, InstructionError>\n\nwhere\n\n T: serde::de::DeserializeOwned,\n\n{\n\n bincode::options()\n\n .with_limit(limit)\n\n .with_fixint_encoding() // As per https://github.com/servo/bincode/issues/333, these two options are needed\n\n .allow_trailing_bytes() // to retain the behavior of bincode::deserialize with the new `options()` method\n\n .deserialize_from(instruction_data)\n\n .map_err(|_| InstructionError::InvalidInstructionData)\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use {super::*, solana_program::system_instruction::SystemInstruction};\n\n\n\n #[test]\n\n fn test_limited_deserialize_advance_nonce_account() {\n\n let item = SystemInstruction::AdvanceNonceAccount;\n\n let serialized = bincode::serialize(&item).unwrap();\n", "file_path": "sdk/program/src/program_utils.rs", "rank": 30, "score": 409029.78165020735 }, { "content": "/// Get the return data from invoked program\n\npub fn get_return_data() -> Option<(Pubkey, Vec<u8>)> {\n\n #[cfg(target_arch = \"bpf\")]\n\n {\n\n use std::cmp::min;\n\n\n\n extern \"C\" {\n\n fn sol_get_return_data(data: *mut u8, length: u64, program_id: *mut Pubkey) -> u64;\n\n }\n\n\n\n let mut buf = [0u8; MAX_RETURN_DATA];\n\n let mut program_id = Pubkey::default();\n\n\n\n let size =\n\n unsafe { sol_get_return_data(buf.as_mut_ptr(), buf.len() as u64, &mut program_id) };\n\n\n\n if size == 0 {\n\n None\n\n } else {\n\n let size = min(size as usize, MAX_RETURN_DATA);\n\n Some((program_id, buf[..size as usize].to_vec()))\n\n }\n\n }\n\n\n\n #[cfg(not(target_arch = \"bpf\"))]\n\n crate::program_stubs::sol_get_return_data()\n\n}\n", "file_path": "sdk/program/src/program.rs", "rank": 31, "score": 408950.72433154134 }, { "content": "fn deposit_many(bank: &Bank, pubkeys: &mut Vec<Pubkey>, num: usize) -> Result<(), LamportsError> {\n\n for t in 0..num {\n\n let pubkey = solana_sdk::pubkey::new_rand();\n\n let account =\n\n AccountSharedData::new((t + 1) as u64, 0, AccountSharedData::default().owner());\n\n pubkeys.push(pubkey);\n\n assert!(bank.get_account(&pubkey).is_none());\n\n bank.deposit(&pubkey, (t + 1) as u64)?;\n\n assert_eq!(bank.get_account(&pubkey).unwrap(), account);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "runtime/benches/accounts.rs", "rank": 32, "score": 407861.03459435044 }, { "content": "#[inline(never)]\n\npub fn recurse(data: &mut [u8]) {\n\n if data.len() <= 1 {\n\n return;\n\n }\n\n recurse(&mut data[1..]);\n\n sol_log_64(line!() as u64, 0, 0, 0, data[0] as u64);\n\n}\n\n\n\n/// # Safety\n\n#[inline(never)]\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn entrypoint(input: *mut u8) -> u64 {\n\n msg!(\"Call depth\");\n\n let depth = *(input.add(16) as *mut u8);\n\n sol_log_64(line!() as u64, 0, 0, 0, depth as u64);\n\n let mut data = Vec::with_capacity(depth as usize);\n\n for i in 0_u8..depth {\n\n data.push(i);\n\n }\n\n recurse(&mut data);\n\n SUCCESS\n\n}\n\n\n\ncustom_panic_default!();\n", "file_path": "programs/bpf/rust/call_depth/src/lib.rs", "rank": 33, "score": 406362.2234191039 }, { "content": "pub fn advance_nonce_account(nonce_pubkey: &Pubkey, authorized_pubkey: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*nonce_pubkey, false),\n\n #[allow(deprecated)]\n\n AccountMeta::new_readonly(recent_blockhashes::id(), false),\n\n AccountMeta::new_readonly(*authorized_pubkey, true),\n\n ];\n\n Instruction::new_with_bincode(\n\n system_program::id(),\n\n &SystemInstruction::AdvanceNonceAccount,\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/program/src/system_instruction.rs", "rank": 34, "score": 404701.27070091455 }, { "content": "fn get_mint_decimals(data: &[u8]) -> Result<u8> {\n\n Mint::unpack(data)\n\n .map_err(|_| {\n\n Error::invalid_params(\"Invalid param: Token mint could not be unpacked\".to_string())\n\n })\n\n .map(|mint| mint.decimals)\n\n}\n", "file_path": "rpc/src/parsed_token_accounts.rs", "rank": 35, "score": 403018.8782478464 }, { "content": "// create_account() should be removed, use create_account_with_authorized() instead\n\npub fn create_account(\n\n vote_pubkey: &Pubkey,\n\n node_pubkey: &Pubkey,\n\n commission: u8,\n\n lamports: u64,\n\n) -> AccountSharedData {\n\n create_account_with_authorized(node_pubkey, vote_pubkey, vote_pubkey, commission, lamports)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use {\n\n super::*,\n\n crate::vote_state,\n\n solana_sdk::{\n\n account::AccountSharedData,\n\n account_utils::StateMut,\n\n hash::hash,\n\n keyed_account::{get_signers, keyed_account_at_index},\n\n },\n", "file_path": "programs/vote/src/vote_state/mod.rs", "rank": 36, "score": 402687.89395611256 }, { "content": "pub fn append_u8(buf: &mut Vec<u8>, data: u8) {\n\n let start = buf.len();\n\n buf.resize(buf.len() + 1, 0);\n\n buf[start] = data;\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 37, "score": 402252.1672356057 }, { "content": "pub fn create_account_with_authorized(\n\n node_pubkey: &Pubkey,\n\n authorized_voter: &Pubkey,\n\n authorized_withdrawer: &Pubkey,\n\n commission: u8,\n\n lamports: u64,\n\n) -> AccountSharedData {\n\n let mut vote_account = AccountSharedData::new(lamports, VoteState::size_of(), &id());\n\n\n\n let vote_state = VoteState::new(\n\n &VoteInit {\n\n node_pubkey: *node_pubkey,\n\n authorized_voter: *authorized_voter,\n\n authorized_withdrawer: *authorized_withdrawer,\n\n commission,\n\n },\n\n &Clock::default(),\n\n );\n\n\n\n let versioned = VoteStateVersions::new_current(vote_state);\n\n VoteState::to(&versioned, &mut vote_account).unwrap();\n\n\n\n vote_account\n\n}\n\n\n", "file_path": "programs/vote/src/vote_state/mod.rs", "rank": 38, "score": 399942.5073913904 }, { "content": "fn deserialize_instruction(index: usize, data: &[u8]) -> Result<Instruction, SanitizeError> {\n\n const IS_SIGNER_BIT: usize = 0;\n\n const IS_WRITABLE_BIT: usize = 1;\n\n\n\n let mut current = 0;\n\n let num_instructions = read_u16(&mut current, data)?;\n\n if index >= num_instructions as usize {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n\n\n // index into the instruction byte-offset table.\n\n current += index * 2;\n\n let start = read_u16(&mut current, data)?;\n\n\n\n current = start as usize;\n\n let num_accounts = read_u16(&mut current, data)?;\n\n let mut accounts = Vec::with_capacity(num_accounts as usize);\n\n for _ in 0..num_accounts {\n\n let meta_byte = read_u8(&mut current, data)?;\n\n let mut is_signer = false;\n", "file_path": "sdk/program/src/sysvar/instructions.rs", "rank": 39, "score": 398618.9079778306 }, { "content": "/// Return true if the first keyed_account is executable, used to determine if\n\n/// the loader should call a program's 'main'\n\npub fn is_executable(keyed_accounts: &[KeyedAccount]) -> Result<bool, InstructionError> {\n\n Ok(!keyed_accounts.is_empty() && keyed_accounts[0].executable()?)\n\n}\n\n\n\nimpl<'a, T> State<T> for crate::keyed_account::KeyedAccount<'a>\n\nwhere\n\n T: serde::Serialize + serde::de::DeserializeOwned,\n\n{\n\n fn state(&self) -> Result<T, InstructionError> {\n\n self.try_account_ref()?.state()\n\n }\n\n fn set_state(&self, state: &T) -> Result<(), InstructionError> {\n\n self.try_account_ref_mut()?.set_state(state)\n\n }\n\n}\n\n\n", "file_path": "sdk/src/keyed_account.rs", "rank": 40, "score": 396556.2551656987 }, { "content": "pub fn is_sysvar_id(id: &Pubkey) -> bool {\n\n ALL_IDS.iter().any(|key| key == id)\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! declare_sysvar_id(\n\n ($name:expr, $type:ty) => (\n\n $crate::declare_id!($name);\n\n\n\n impl $crate::sysvar::SysvarId for $type {\n\n fn id() -> $crate::pubkey::Pubkey {\n\n id()\n\n }\n\n\n\n fn check_id(pubkey: &$crate::pubkey::Pubkey) -> bool {\n\n check_id(pubkey)\n\n }\n\n }\n\n\n\n #[cfg(test)]\n", "file_path": "sdk/program/src/sysvar/mod.rs", "rank": 41, "score": 394651.7945393168 }, { "content": "pub fn append_slice(buf: &mut Vec<u8>, data: &[u8]) {\n\n let start = buf.len();\n\n buf.resize(buf.len() + data.len(), 0);\n\n let end = buf.len();\n\n buf[start..end].copy_from_slice(data);\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 42, "score": 393371.2355909633 }, { "content": "pub fn to_account(feature: &Feature, account: &mut AccountSharedData) -> Option<()> {\n\n bincode::serialize_into(account.data_as_mut_slice(), feature).ok()\n\n}\n\n\n", "file_path": "sdk/src/feature.rs", "rank": 43, "score": 393183.1849822006 }, { "content": "pub fn decompress(data: &[u8]) -> Result<Vec<u8>, io::Error> {\n\n let method_size = bincode::serialized_size(&CompressionMethod::NoCompression).unwrap();\n\n if (data.len() as u64) < method_size {\n\n return Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"data len too small: {}\", data.len()),\n\n ));\n\n }\n\n let method = bincode::deserialize(&data[..method_size as usize]).map_err(|err| {\n\n io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"method deserialize failed: {}\", err),\n\n )\n\n })?;\n\n\n\n let mut reader = decompress_reader(method, &data[method_size as usize..])?;\n\n let mut uncompressed_data = vec![];\n\n reader.read_to_end(&mut uncompressed_data)?;\n\n Ok(uncompressed_data)\n\n}\n\n\n", "file_path": "storage-bigtable/src/compression.rs", "rank": 44, "score": 390838.90848316206 }, { "content": "pub fn authorized_from(account: &AccountSharedData) -> Option<Authorized> {\n\n from(account).and_then(|state: StakeState| state.authorized())\n\n}\n\n\n", "file_path": "programs/stake/src/stake_state.rs", "rank": 45, "score": 388643.12299703946 }, { "content": "pub fn compress_best(data: &[u8]) -> Result<Vec<u8>, io::Error> {\n\n let mut candidates = vec![];\n\n for method in CompressionMethod::into_enum_iter() {\n\n candidates.push(compress(method, data)?);\n\n }\n\n\n\n Ok(candidates\n\n .into_iter()\n\n .min_by(|a, b| a.len().cmp(&b.len()))\n\n .unwrap())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_compress_uncompress() {\n\n let data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];\n\n assert_eq!(\n", "file_path": "storage-bigtable/src/compression.rs", "rank": 46, "score": 385001.8253466621 }, { "content": "#[allow(deprecated)]\n\npub fn create_account_with_data_for_test<'a, I>(recent_blockhash_iter: I) -> AccountSharedData\n\nwhere\n\n I: IntoIterator<Item = IterItem<'a>>,\n\n{\n\n create_account_with_data_and_fields(recent_blockhash_iter, DUMMY_INHERITABLE_ACCOUNT_FIELDS)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #![allow(deprecated)]\n\n use {\n\n super::*,\n\n crate::account::from_account,\n\n rand::{seq::SliceRandom, thread_rng},\n\n solana_program::{\n\n hash::{Hash, HASH_BYTES},\n\n sysvar::recent_blockhashes::Entry,\n\n },\n\n };\n\n\n", "file_path": "sdk/src/recent_blockhashes_account.rs", "rank": 47, "score": 384534.5027463379 }, { "content": "pub fn is_upgrade_instruction(instruction_data: &[u8]) -> bool {\n\n !instruction_data.is_empty() && 3 == instruction_data[0]\n\n}\n\n\n", "file_path": "sdk/program/src/bpf_loader_upgradeable.rs", "rank": 48, "score": 381876.1814244379 }, { "content": "pub fn is_close_instruction(instruction_data: &[u8]) -> bool {\n\n !instruction_data.is_empty() && 5 == instruction_data[0]\n\n}\n\n\n", "file_path": "sdk/program/src/bpf_loader_upgradeable.rs", "rank": 49, "score": 381876.1814244379 }, { "content": "pub fn create_account(lamports: u64) -> RefCell<AccountSharedData> {\n\n RefCell::new(\n\n AccountSharedData::new_data_with_space(\n\n lamports,\n\n &Versions::new_current(State::Uninitialized),\n\n State::size(),\n\n &crate::system_program::id(),\n\n )\n\n .expect(\"nonce_account\"),\n\n )\n\n}\n\n\n", "file_path": "sdk/src/nonce_account.rs", "rank": 50, "score": 374779.80289703136 }, { "content": "pub fn lamports_per_signature_of(account: &AccountSharedData) -> Option<u64> {\n\n let state = StateMut::<Versions>::state(account)\n\n .ok()?\n\n .convert_to_current();\n\n match state {\n\n State::Initialized(data) => Some(data.fee_calculator.lamports_per_signature),\n\n _ => None,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use {super::*, crate::pubkey::Pubkey};\n\n\n\n #[test]\n\n fn test_verify_bad_account_owner_fails() {\n\n let program_id = Pubkey::new_unique();\n\n assert_ne!(program_id, crate::system_program::id());\n\n let account = AccountSharedData::new_data_with_space(\n\n 42,\n\n &Versions::new_current(State::Uninitialized),\n\n State::size(),\n\n &program_id,\n\n )\n\n .expect(\"nonce_account\");\n\n assert!(!verify_nonce_account(&account, &Hash::default()));\n\n }\n\n}\n", "file_path": "sdk/src/nonce_account.rs", "rank": 51, "score": 374770.6684579104 }, { "content": "pub fn limited_deserialize<T>(data: &[u8]) -> bincode::Result<T>\n\nwhere\n\n T: serde::de::DeserializeOwned,\n\n{\n\n bincode::options()\n\n .with_limit(PACKET_DATA_SIZE as u64)\n\n .with_fixint_encoding()\n\n .allow_trailing_bytes()\n\n .deserialize_from(data)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use {\n\n super::*,\n\n solana_sdk::{\n\n hash::Hash,\n\n signature::{Keypair, Signer},\n\n system_transaction,\n\n },\n", "file_path": "perf/src/packet.rs", "rank": 52, "score": 374582.43937055825 }, { "content": "pub fn authorize(\n\n vote_pubkey: &Pubkey,\n\n authorized_pubkey: &Pubkey, // currently authorized\n\n new_authorized_pubkey: &Pubkey,\n\n vote_authorize: VoteAuthorize,\n\n) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*vote_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n AccountMeta::new_readonly(*authorized_pubkey, true),\n\n ];\n\n\n\n Instruction::new_with_bincode(\n\n id(),\n\n &VoteInstruction::Authorize(*new_authorized_pubkey, vote_authorize),\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "programs/vote/src/vote_instruction.rs", "rank": 53, "score": 373381.1094695481 }, { "content": "pub fn get_account(rpc_client: &RpcClient, nonce_pubkey: &Pubkey) -> Result<Account, Error> {\n\n get_account_with_commitment(rpc_client, nonce_pubkey, CommitmentConfig::default())\n\n}\n\n\n", "file_path": "client/src/nonce_utils.rs", "rank": 54, "score": 371596.1452205375 }, { "content": "pub fn load_current_index(data: &[u8]) -> u16 {\n\n let mut instr_fixed_data = [0u8; 2];\n\n let len = data.len();\n\n instr_fixed_data.copy_from_slice(&data[len - 2..len]);\n\n u16::from_le_bytes(instr_fixed_data)\n\n}\n\n\n", "file_path": "sdk/program/src/sysvar/instructions.rs", "rank": 55, "score": 371301.1237054889 }, { "content": "#[inline]\n\npub fn sol_memset(s: &mut [u8], c: u8, n: usize) {\n\n #[cfg(target_arch = \"bpf\")]\n\n {\n\n extern \"C\" {\n\n fn sol_memset_(s: *mut u8, c: u8, n: u64);\n\n }\n\n unsafe {\n\n sol_memset_(s.as_mut_ptr(), c, n as u64);\n\n }\n\n }\n\n\n\n #[cfg(not(target_arch = \"bpf\"))]\n\n crate::program_stubs::sol_memset(s.as_mut_ptr(), c, n);\n\n}\n", "file_path": "sdk/program/src/program_memory.rs", "rank": 56, "score": 370123.2944133586 }, { "content": "#[allow(deprecated)]\n\npub fn create_account_with_data<'a, I>(lamports: u64, recent_blockhash_iter: I) -> AccountSharedData\n\nwhere\n\n I: IntoIterator<Item = IterItem<'a>>,\n\n{\n\n #[allow(deprecated)]\n\n create_account_with_data_and_fields(recent_blockhash_iter, (lamports, INITIAL_RENT_EPOCH))\n\n}\n\n\n\n#[deprecated(\n\n since = \"1.9.0\",\n\n note = \"Please do not use, will no longer be available in the future\"\n\n)]\n", "file_path": "sdk/src/recent_blockhashes_account.rs", "rank": 57, "score": 368636.3103618147 }, { "content": "pub fn deactivate_stake(stake_pubkey: &Pubkey, authorized_pubkey: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*stake_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n AccountMeta::new_readonly(*authorized_pubkey, true),\n\n ];\n\n Instruction::new_with_bincode(id(), &StakeInstruction::Deactivate, account_metas)\n\n}\n\n\n", "file_path": "sdk/program/src/stake/instruction.rs", "rank": 58, "score": 367746.6653223587 }, { "content": "// A helper function to convert spl_token::id() as spl_sdk::pubkey::Pubkey to\n\n// solana_sdk::pubkey::Pubkey\n\npub fn spl_token_id() -> Pubkey {\n\n Pubkey::new_from_array(spl_token::id().to_bytes())\n\n}\n\n\n", "file_path": "account-decoder/src/parse_token.rs", "rank": 59, "score": 367480.3414837506 }, { "content": "pub fn vote_switch(\n\n vote_pubkey: &Pubkey,\n\n authorized_voter_pubkey: &Pubkey,\n\n vote: Vote,\n\n proof_hash: Hash,\n\n) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*vote_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::slot_hashes::id(), false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n AccountMeta::new_readonly(*authorized_voter_pubkey, true),\n\n ];\n\n\n\n Instruction::new_with_bincode(\n\n id(),\n\n &VoteInstruction::VoteSwitch(vote, proof_hash),\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "programs/vote/src/vote_instruction.rs", "rank": 60, "score": 365613.66564637586 }, { "content": "pub fn update_commission(\n\n vote_pubkey: &Pubkey,\n\n authorized_withdrawer_pubkey: &Pubkey,\n\n commission: u8,\n\n) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*vote_pubkey, false),\n\n AccountMeta::new_readonly(*authorized_withdrawer_pubkey, true),\n\n ];\n\n\n\n Instruction::new_with_bincode(\n\n id(),\n\n &VoteInstruction::UpdateCommission(commission),\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "programs/vote/src/vote_instruction.rs", "rank": 61, "score": 365362.4027245701 }, { "content": "pub fn authorize_checked(\n\n vote_pubkey: &Pubkey,\n\n authorized_pubkey: &Pubkey, // currently authorized\n\n new_authorized_pubkey: &Pubkey,\n\n vote_authorize: VoteAuthorize,\n\n) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*vote_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n AccountMeta::new_readonly(*authorized_pubkey, true),\n\n AccountMeta::new_readonly(*new_authorized_pubkey, true),\n\n ];\n\n\n\n Instruction::new_with_bincode(\n\n id(),\n\n &VoteInstruction::AuthorizeChecked(vote_authorize),\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "programs/vote/src/vote_instruction.rs", "rank": 62, "score": 365343.21357594506 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_withdraw_from_vote_account(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n vote_account_pubkey: &Pubkey,\n\n withdraw_authority: SignerIndex,\n\n withdraw_amount: SpendAmount,\n\n destination_account_pubkey: &Pubkey,\n\n sign_only: bool,\n\n dump_transaction_message: bool,\n\n blockhash_query: &BlockhashQuery,\n\n nonce_account: Option<&Pubkey>,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n fee_payer: SignerIndex,\n\n) -> ProcessResult {\n\n let withdraw_authority = config.signers[withdraw_authority];\n\n let recent_blockhash = blockhash_query.get_blockhash(rpc_client, config.commitment)?;\n\n\n\n let fee_payer = config.signers[fee_payer];\n\n let nonce_authority = config.signers[nonce_authority];\n", "file_path": "cli/src/vote.rs", "rank": 63, "score": 364913.5023669765 }, { "content": "pub fn parse_withdraw_from_vote_account(\n\n matches: &ArgMatches<'_>,\n\n default_signer: &DefaultSigner,\n\n wallet_manager: &mut Option<Arc<RemoteWalletManager>>,\n\n) -> Result<CliCommandInfo, CliError> {\n\n let vote_account_pubkey =\n\n pubkey_of_signer(matches, \"vote_account_pubkey\", wallet_manager)?.unwrap();\n\n let destination_account_pubkey =\n\n pubkey_of_signer(matches, \"destination_account_pubkey\", wallet_manager)?.unwrap();\n\n let mut withdraw_amount = SpendAmount::new_from_matches(matches, \"amount\");\n\n // As a safeguard for vote accounts for running validators, `ALL` withdraws only the amount in\n\n // excess of the rent-exempt minimum. In order to close the account with this subcommand, a\n\n // validator must specify the withdrawal amount precisely.\n\n if withdraw_amount == SpendAmount::All {\n\n withdraw_amount = SpendAmount::RentExempt;\n\n }\n\n\n\n let (withdraw_authority, withdraw_authority_pubkey) =\n\n signer_of(matches, \"authorized_withdrawer\", wallet_manager)?;\n\n\n", "file_path": "cli/src/vote.rs", "rank": 64, "score": 364913.5023669765 }, { "content": "pub fn get_config_data(bytes: &[u8]) -> Result<&[u8], bincode::Error> {\n\n deserialize::<ConfigKeys>(bytes)\n\n .and_then(|keys| serialized_size(&keys))\n\n .map(|offset| &bytes[offset as usize..])\n\n}\n\n\n", "file_path": "programs/config/src/lib.rs", "rank": 65, "score": 364752.85931248526 }, { "content": "pub fn create_account(feature: &Feature, lamports: u64) -> AccountSharedData {\n\n let data_len = Feature::size_of().max(bincode::serialized_size(feature).unwrap() as usize);\n\n let mut account = AccountSharedData::new(lamports, data_len, &id());\n\n to_account(feature, &mut account).unwrap();\n\n account\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn feature_deserialize_none() {\n\n let just_initialized = AccountSharedData::new(42, Feature::size_of(), &id());\n\n assert_eq!(\n\n from_account(&just_initialized),\n\n Some(Feature { activated_at: None })\n\n );\n\n }\n\n}\n", "file_path": "sdk/src/feature.rs", "rank": 66, "score": 363476.8166636118 }, { "content": "pub fn append_u16(buf: &mut Vec<u8>, data: u16) {\n\n let start = buf.len();\n\n buf.resize(buf.len() + 2, 0);\n\n let end = buf.len();\n\n buf[start..end].copy_from_slice(&data.to_le_bytes());\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 67, "score": 362487.7619484757 }, { "content": "pub fn transfer(from_pubkey: &Pubkey, to_pubkey: &Pubkey, lamports: u64) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*from_pubkey, true),\n\n AccountMeta::new(*to_pubkey, false),\n\n ];\n\n Instruction::new_with_bincode(\n\n system_program::id(),\n\n &SystemInstruction::Transfer { lamports },\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/program/src/system_instruction.rs", "rank": 68, "score": 362385.5530782786 }, { "content": "pub fn initialize_checked(stake_pubkey: &Pubkey, authorized: &Authorized) -> Instruction {\n\n Instruction::new_with_bincode(\n\n id(),\n\n &StakeInstruction::InitializeChecked,\n\n vec![\n\n AccountMeta::new(*stake_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::rent::id(), false),\n\n AccountMeta::new_readonly(authorized.staker, false),\n\n AccountMeta::new_readonly(authorized.withdrawer, true),\n\n ],\n\n )\n\n}\n\n\n", "file_path": "sdk/program/src/stake/instruction.rs", "rank": 69, "score": 361392.5557862566 }, { "content": "pub fn get_system_account_kind(account: &AccountSharedData) -> Option<SystemAccountKind> {\n\n if system_program::check_id(account.owner()) {\n\n if account.data().is_empty() {\n\n Some(SystemAccountKind::System)\n\n } else if account.data().len() == nonce::State::size() {\n\n match account.state().ok()? {\n\n nonce::state::Versions::Current(state) => match *state {\n\n nonce::State::Initialized(_) => Some(SystemAccountKind::Nonce),\n\n _ => None,\n\n },\n\n }\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "runtime/src/system_instruction_processor.rs", "rank": 70, "score": 360181.027519381 }, { "content": "pub fn compress(method: CompressionMethod, data: &[u8]) -> Result<Vec<u8>, io::Error> {\n\n let mut compressed_data = bincode::serialize(&method).unwrap();\n\n compressed_data.extend(\n\n match method {\n\n CompressionMethod::Bzip2 => {\n\n let mut e = bzip2::write::BzEncoder::new(Vec::new(), bzip2::Compression::best());\n\n e.write_all(data)?;\n\n e.finish()?\n\n }\n\n CompressionMethod::Gzip => {\n\n let mut e =\n\n flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());\n\n e.write_all(data)?;\n\n e.finish()?\n\n }\n\n CompressionMethod::Zstd => {\n\n let mut e = zstd::stream::write::Encoder::new(Vec::new(), 0).unwrap();\n\n e.write_all(data)?;\n\n e.finish()?\n\n }\n\n CompressionMethod::NoCompression => data.to_vec(),\n\n }\n\n .into_iter(),\n\n );\n\n\n\n Ok(compressed_data)\n\n}\n\n\n", "file_path": "storage-bigtable/src/compression.rs", "rank": 71, "score": 359949.9918753644 }, { "content": "#[inline]\n\npub fn sol_memcpy(dst: &mut [u8], src: &[u8], n: usize) {\n\n #[cfg(target_arch = \"bpf\")]\n\n {\n\n extern \"C\" {\n\n fn sol_memcpy_(dst: *mut u8, src: *const u8, n: u64);\n\n }\n\n unsafe {\n\n sol_memcpy_(dst.as_mut_ptr(), src.as_ptr(), n as u64);\n\n }\n\n }\n\n\n\n #[cfg(not(target_arch = \"bpf\"))]\n\n crate::program_stubs::sol_memcpy(dst.as_mut_ptr(), src.as_ptr(), n);\n\n}\n\n\n\n/// Memmove\n\n///\n\n/// @param dst - Destination\n\n/// @param src - Source\n\n/// @param n - Number of bytes to copy\n", "file_path": "sdk/program/src/program_memory.rs", "rank": 72, "score": 358147.9738141801 }, { "content": "#[allow(clippy::result_unit_err)]\n\npub fn decode_shortu16_len(bytes: &[u8]) -> Result<(usize, usize), ()> {\n\n let mut val = 0;\n\n for (nth_byte, byte) in bytes.iter().take(MAX_ENCODING_LENGTH).enumerate() {\n\n match visit_byte(*byte, val, nth_byte).map_err(|_| ())? {\n\n VisitStatus::More(new_val) => val = new_val,\n\n VisitStatus::Done(new_val) => {\n\n return Ok((usize::from(new_val), nth_byte.saturating_add(1)));\n\n }\n\n }\n\n }\n\n Err(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use {\n\n super::*,\n\n assert_matches::assert_matches,\n\n bincode::{deserialize, serialize},\n\n };\n", "file_path": "sdk/program/src/short_vec.rs", "rank": 73, "score": 358108.7574209826 }, { "content": "pub fn create_account(lamports: u64, config: &Config) -> AccountSharedData {\n\n create_config_account(vec![], config, lamports)\n\n}\n\n\n", "file_path": "programs/stake/src/config.rs", "rank": 74, "score": 357651.4159966882 }, { "content": "pub fn create_account_with_seed(\n\n from_pubkey: &Pubkey,\n\n vote_pubkey: &Pubkey,\n\n base: &Pubkey,\n\n seed: &str,\n\n vote_init: &VoteInit,\n\n lamports: u64,\n\n) -> Vec<Instruction> {\n\n let space = VoteState::size_of() as u64;\n\n let create_ix = system_instruction::create_account_with_seed(\n\n from_pubkey,\n\n vote_pubkey,\n\n base,\n\n seed,\n\n lamports,\n\n space,\n\n &id(),\n\n );\n\n let init_ix = initialize_account(vote_pubkey, vote_init);\n\n vec![create_ix, init_ix]\n\n}\n\n\n", "file_path": "programs/vote/src/vote_instruction.rs", "rank": 75, "score": 357129.4800874105 }, { "content": "pub fn update_vote_state_switch(\n\n vote_pubkey: &Pubkey,\n\n authorized_voter_pubkey: &Pubkey,\n\n vote_state_update: VoteStateUpdate,\n\n proof_hash: Hash,\n\n) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*vote_pubkey, false),\n\n AccountMeta::new_readonly(*authorized_voter_pubkey, true),\n\n ];\n\n\n\n Instruction::new_with_bincode(\n\n id(),\n\n &VoteInstruction::UpdateVoteStateSwitch(vote_state_update, proof_hash),\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "programs/vote/src/vote_instruction.rs", "rank": 76, "score": 354332.3754117263 }, { "content": "pub fn account_identity_ok<T: ReadableAccount>(account: &T) -> Result<(), Error> {\n\n if account.owner() != &system_program::id() {\n\n Err(Error::InvalidAccountOwner)\n\n } else if account.data().is_empty() {\n\n Err(Error::UnexpectedDataSize)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "client/src/nonce_utils.rs", "rank": 77, "score": 353495.2959382156 }, { "content": "fn check_num_system_accounts(accounts: &[u8], num: usize) -> Result<(), ParseInstructionError> {\n\n check_num_accounts(accounts, num, ParsableProgram::System)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use {\n\n super::*,\n\n solana_sdk::{message::Message, pubkey::Pubkey, system_instruction},\n\n };\n\n\n\n #[test]\n\n #[allow(clippy::same_item_push)]\n\n fn test_parse_system_instruction() {\n\n let mut keys: Vec<Pubkey> = vec![];\n\n for _ in 0..6 {\n\n keys.push(solana_sdk::pubkey::new_rand());\n\n }\n\n\n\n let lamports = 55;\n", "file_path": "transaction-status/src/parse_system.rs", "rank": 78, "score": 353466.8067225936 }, { "content": "fn check_num_stake_accounts(accounts: &[u8], num: usize) -> Result<(), ParseInstructionError> {\n\n check_num_accounts(accounts, num, ParsableProgram::Stake)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use {\n\n super::*,\n\n solana_sdk::{\n\n message::Message,\n\n pubkey::Pubkey,\n\n stake::{\n\n instruction::{self, LockupArgs},\n\n state::{Authorized, Lockup, StakeAuthorize},\n\n },\n\n },\n\n };\n\n\n\n #[test]\n\n #[allow(clippy::same_item_push)]\n", "file_path": "transaction-status/src/parse_stake.rs", "rank": 79, "score": 353466.80672259367 }, { "content": "fn check_num_token_accounts(accounts: &[u8], num: usize) -> Result<(), ParseInstructionError> {\n\n check_num_accounts(accounts, num, ParsableProgram::SplToken)\n\n}\n\n\n", "file_path": "transaction-status/src/parse_token.rs", "rank": 80, "score": 353466.80672259355 }, { "content": "fn shared_new<T: WritableAccount>(lamports: u64, space: usize, owner: &Pubkey) -> T {\n\n T::create(\n\n lamports,\n\n vec![0u8; space],\n\n *owner,\n\n bool::default(),\n\n Epoch::default(),\n\n )\n\n}\n\n\n", "file_path": "sdk/src/account.rs", "rank": 81, "score": 351636.41457751114 }, { "content": "/// Create and sign new SystemInstruction::Transfer transaction to many destinations\n\npub fn transfer_many(from_pubkey: &Pubkey, to_lamports: &[(Pubkey, u64)]) -> Vec<Instruction> {\n\n to_lamports\n\n .iter()\n\n .map(|(to_pubkey, lamports)| transfer(from_pubkey, to_pubkey, *lamports))\n\n .collect()\n\n}\n\n\n", "file_path": "sdk/program/src/system_instruction.rs", "rank": 82, "score": 351543.0326208861 }, { "content": "/// Emit a program data.\n\n///\n\n/// The general form is:\n\n///\n\n/// ```notrust\n\n/// \"Program data: <binary-data-in-base64>*\"\n\n/// ```\n\n///\n\n/// That is, any program-generated output is guaranteed to be prefixed by \"Program data: \"\n\npub fn program_data(log_collector: &Option<Rc<RefCell<LogCollector>>>, data: &[&[u8]]) {\n\n ic_logger_msg!(\n\n log_collector,\n\n \"Program data: {}\",\n\n data.iter().map(base64::encode).join(\" \")\n\n );\n\n}\n\n\n", "file_path": "program-runtime/src/stable_log.rs", "rank": 83, "score": 350067.5641374654 }, { "content": "#[test]\n\nfn test_vote_authorize_and_withdraw() {\n\n let mint_keypair = Keypair::new();\n\n let mint_pubkey = mint_keypair.pubkey();\n\n let faucet_addr = run_local_faucet(mint_keypair, None);\n\n let test_validator =\n\n TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);\n\n\n\n let rpc_client =\n\n RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());\n\n let default_signer = Keypair::new();\n\n\n\n let mut config = CliConfig::recent_for_tests();\n\n config.json_rpc_url = test_validator.rpc_url();\n\n config.signers = vec![&default_signer];\n\n\n\n request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 100_000)\n\n .unwrap();\n\n\n\n // Create vote account\n\n let vote_account_keypair = Keypair::new();\n", "file_path": "cli/tests/vote.rs", "rank": 84, "score": 349755.7784350868 }, { "content": "pub fn withdraw(\n\n stake_pubkey: &Pubkey,\n\n withdrawer_pubkey: &Pubkey,\n\n to_pubkey: &Pubkey,\n\n lamports: u64,\n\n custodian_pubkey: Option<&Pubkey>,\n\n) -> Instruction {\n\n let mut account_metas = vec![\n\n AccountMeta::new(*stake_pubkey, false),\n\n AccountMeta::new(*to_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n AccountMeta::new_readonly(sysvar::stake_history::id(), false),\n\n AccountMeta::new_readonly(*withdrawer_pubkey, true),\n\n ];\n\n\n\n if let Some(custodian_pubkey) = custodian_pubkey {\n\n account_metas.push(AccountMeta::new_readonly(*custodian_pubkey, true));\n\n }\n\n\n\n Instruction::new_with_bincode(id(), &StakeInstruction::Withdraw(lamports), account_metas)\n\n}\n\n\n", "file_path": "sdk/program/src/stake/instruction.rs", "rank": 85, "score": 348678.53872266226 }, { "content": "#[cfg(not(target_arch = \"bpf\"))]\n\npub fn construct_instructions_data(instructions: &[BorrowedInstruction]) -> Vec<u8> {\n\n let mut data = serialize_instructions(instructions);\n\n // add room for current instruction index.\n\n data.resize(data.len() + 2, 0);\n\n\n\n data\n\n}\n\n\n\n/// Borrowed version of AccountMeta\n\npub struct BorrowedAccountMeta<'a> {\n\n pub pubkey: &'a Pubkey,\n\n pub is_signer: bool,\n\n pub is_writable: bool,\n\n}\n\n\n\n/// Borrowed version of Instruction\n\npub struct BorrowedInstruction<'a> {\n\n pub program_id: &'a Pubkey,\n\n pub accounts: Vec<BorrowedAccountMeta<'a>>,\n\n pub data: &'a [u8],\n", "file_path": "sdk/program/src/sysvar/instructions.rs", "rank": 86, "score": 348097.15615386027 }, { "content": "/// Given `data` with `len >= 32`, return the first 32 bytes.\n\npub fn read32(data: &[u8]) -> [u8; 32] {\n\n let mut buf32 = [0u8; 32];\n\n buf32[..].copy_from_slice(&data[..32]);\n\n buf32\n\n}\n\n\n", "file_path": "zk-token-sdk/src/range_proof/util.rs", "rank": 87, "score": 345756.6216045262 }, { "content": "pub fn initialize(stake_pubkey: &Pubkey, authorized: &Authorized, lockup: &Lockup) -> Instruction {\n\n Instruction::new_with_bincode(\n\n id(),\n\n &StakeInstruction::Initialize(*authorized, *lockup),\n\n vec![\n\n AccountMeta::new(*stake_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::rent::id(), false),\n\n ],\n\n )\n\n}\n\n\n", "file_path": "sdk/program/src/stake/instruction.rs", "rank": 88, "score": 345688.2148756957 }, { "content": "// Process instruction to get return data from another program\n\nfn get_return_data_process_instruction(\n\n _program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n input: &[u8],\n\n) -> ProgramResult {\n\n msg!(\"Processing get_return_data instruction before CPI\");\n\n let account_info_iter = &mut accounts.iter();\n\n let invoked_program_info = next_account_info(account_info_iter)?;\n\n invoke(\n\n &Instruction {\n\n program_id: *invoked_program_info.key,\n\n accounts: vec![],\n\n data: input.to_vec(),\n\n },\n\n &[invoked_program_info.clone()],\n\n )?;\n\n let return_data = get_return_data().unwrap();\n\n msg!(\"Processing get_return_data instruction after CPI\");\n\n msg!(\"{}\", from_utf8(&return_data.1).unwrap());\n\n assert_eq!(return_data.1, input.to_vec());\n\n Ok(())\n\n}\n\n\n\n// Process instruction to echo input back to another program\n", "file_path": "program-test/tests/return_data.rs", "rank": 89, "score": 344082.78629843093 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn set_return_data_process_instruction(\n\n _program_id: &Pubkey,\n\n _accounts: &[AccountInfo],\n\n input: &[u8],\n\n) -> ProgramResult {\n\n msg!(\"Processing invoked instruction before set_return_data\");\n\n set_return_data(input);\n\n msg!(\"Processing invoked instruction after set_return_data\");\n\n Ok(())\n\n}\n\n\n\n#[tokio::test]\n\nasync fn return_data() {\n\n let get_return_data_program_id = Pubkey::new_unique();\n\n let mut program_test = ProgramTest::new(\n\n \"get_return_data\",\n\n get_return_data_program_id,\n\n processor!(get_return_data_process_instruction),\n\n );\n\n let set_return_data_program_id = Pubkey::new_unique();\n", "file_path": "program-test/tests/return_data.rs", "rank": 90, "score": 344061.8094779912 }, { "content": "/// Deserializes without checking that the entire slice has been consumed\n\n///\n\n/// Normally, `try_from_slice` checks the length of the final slice to ensure\n\n/// that the deserialization uses up all of the bytes in the slice.\n\n///\n\n/// Note that there is a potential issue with this function. Any buffer greater than\n\n/// or equal to the expected size will properly deserialize. For example, if the\n\n/// user passes a buffer destined for a different type, the error won't get caught\n\n/// as easily.\n\npub fn try_from_slice_unchecked<T: BorshDeserialize>(data: &[u8]) -> Result<T, Error> {\n\n let mut data_mut = data;\n\n let result = T::deserialize(&mut data_mut)?;\n\n Ok(result)\n\n}\n\n\n\n/// Helper struct which to count how much data would be written during serialization\n", "file_path": "sdk/program/src/borsh.rs", "rank": 91, "score": 343392.6523271941 }, { "content": "pub fn create_account_shared_data_for_test<S: Sysvar>(sysvar: &S) -> AccountSharedData {\n\n AccountSharedData::from(create_account_with_fields(\n\n sysvar,\n\n DUMMY_INHERITABLE_ACCOUNT_FIELDS,\n\n ))\n\n}\n\n\n", "file_path": "sdk/src/account.rs", "rank": 92, "score": 342019.7380315432 }, { "content": "#[test]\n\nfn test_offline_vote_authorize_and_withdraw() {\n\n let mint_keypair = Keypair::new();\n\n let mint_pubkey = mint_keypair.pubkey();\n\n let faucet_addr = run_local_faucet(mint_keypair, None);\n\n let test_validator =\n\n TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);\n\n\n\n let rpc_client =\n\n RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());\n\n let default_signer = Keypair::new();\n\n\n\n let mut config_payer = CliConfig::recent_for_tests();\n\n config_payer.json_rpc_url = test_validator.rpc_url();\n\n config_payer.signers = vec![&default_signer];\n\n\n\n let mut config_offline = CliConfig::recent_for_tests();\n\n config_offline.json_rpc_url = String::default();\n\n config_offline.command = CliCommand::ClusterVersion;\n\n let offline_keypair = Keypair::new();\n\n config_offline.signers = vec![&offline_keypair];\n", "file_path": "cli/tests/vote.rs", "rank": 93, "score": 341978.8037968295 }, { "content": "/// Create a date account. The date is set to the Unix epoch.\n\npub fn create_account(\n\n payer_pubkey: &Pubkey,\n\n date_pubkey: &Pubkey,\n\n lamports: u64,\n\n) -> Vec<Instruction> {\n\n config_instruction::create_account::<DateConfig>(payer_pubkey, date_pubkey, lamports, vec![])\n\n}\n\n\n", "file_path": "programs/config/src/date_instruction.rs", "rank": 94, "score": 341379.5155912363 }, { "content": "pub fn create_account(\n\n from_pubkey: &Pubkey,\n\n stake_pubkey: &Pubkey,\n\n authorized: &Authorized,\n\n lockup: &Lockup,\n\n lamports: u64,\n\n) -> Vec<Instruction> {\n\n vec![\n\n system_instruction::create_account(\n\n from_pubkey,\n\n stake_pubkey,\n\n lamports,\n\n std::mem::size_of::<StakeState>() as u64,\n\n &id(),\n\n ),\n\n initialize(stake_pubkey, authorized, lockup),\n\n ]\n\n}\n\n\n", "file_path": "sdk/program/src/stake/instruction.rs", "rank": 95, "score": 341372.7517745247 }, { "content": "pub fn create_account(\n\n from_pubkey: &Pubkey,\n\n to_pubkey: &Pubkey,\n\n lamports: u64,\n\n space: u64,\n\n owner: &Pubkey,\n\n) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*from_pubkey, true),\n\n AccountMeta::new(*to_pubkey, true),\n\n ];\n\n Instruction::new_with_bincode(\n\n system_program::id(),\n\n &SystemInstruction::CreateAccount {\n\n lamports,\n\n space,\n\n owner: *owner,\n\n },\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/program/src/system_instruction.rs", "rank": 96, "score": 341372.7517745247 }, { "content": "pub fn withdraw_nonce_account(\n\n nonce_pubkey: &Pubkey,\n\n authorized_pubkey: &Pubkey,\n\n to_pubkey: &Pubkey,\n\n lamports: u64,\n\n) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*nonce_pubkey, false),\n\n AccountMeta::new(*to_pubkey, false),\n\n #[allow(deprecated)]\n\n AccountMeta::new_readonly(recent_blockhashes::id(), false),\n\n AccountMeta::new_readonly(rent::id(), false),\n\n AccountMeta::new_readonly(*authorized_pubkey, true),\n\n ];\n\n Instruction::new_with_bincode(\n\n system_program::id(),\n\n &SystemInstruction::WithdrawNonceAccount(lamports),\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/program/src/system_instruction.rs", "rank": 97, "score": 341073.6729644332 }, { "content": "pub fn authorize_nonce_account(\n\n nonce_pubkey: &Pubkey,\n\n authorized_pubkey: &Pubkey,\n\n new_authority: &Pubkey,\n\n) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*nonce_pubkey, false),\n\n AccountMeta::new_readonly(*authorized_pubkey, true),\n\n ];\n\n Instruction::new_with_bincode(\n\n system_program::id(),\n\n &SystemInstruction::AuthorizeNonceAccount(*new_authority),\n\n account_metas,\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use {\n\n super::*,\n", "file_path": "sdk/program/src/system_instruction.rs", "rank": 98, "score": 341063.84730626945 }, { "content": "pub fn create_loadable_account_for_test(name: &str) -> AccountSharedData {\n\n create_loadable_account_with_fields(name, DUMMY_INHERITABLE_ACCOUNT_FIELDS)\n\n}\n", "file_path": "sdk/src/native_loader.rs", "rank": 99, "score": 340208.93336621585 } ]
Rust
src/widgets/image_widget.rs
alexislozano/rust-pushrod
57f4129861a2e22608cbc9cb0aa151b5fb424c62
use crate::render::callbacks::CallbackRegistry; use crate::render::widget::*; use crate::render::widget_cache::WidgetContainer; use crate::render::widget_config::{ CompassPosition, Config, WidgetConfig, CONFIG_COLOR_BASE, CONFIG_IMAGE_POSITION, CONFIG_SIZE, }; use crate::render::Points; use sdl2::image::LoadTexture; use sdl2::rect::Rect; use sdl2::render::{Canvas, TextureQuery}; use sdl2::video::Window; use std::collections::HashMap; use std::path::Path; pub struct ImageWidget { config: WidgetConfig, system_properties: HashMap<i32, String>, callback_registry: CallbackRegistry, image_name: String, scaled: bool, } impl ImageWidget { pub fn new(image_name: String, x: i32, y: i32, w: u32, h: u32, scaled: bool) -> Self { Self { config: WidgetConfig::new(x, y, w, h), system_properties: HashMap::new(), callback_registry: CallbackRegistry::new(), image_name, scaled, } } } impl Widget for ImageWidget { fn draw(&mut self, c: &mut Canvas<Window>) { let base_color = self.get_color(CONFIG_COLOR_BASE); c.set_draw_color(base_color); c.fill_rect(self.get_drawing_area()).unwrap(); let texture_creator = c.texture_creator(); let texture = texture_creator .load_texture(Path::new(&self.image_name)) .unwrap(); let widget_w = self.get_size(CONFIG_SIZE)[0] as i32; let widget_h = self.get_size(CONFIG_SIZE)[1] as i32; let TextureQuery { width, height, .. } = texture.query(); let texture_x = match self.get_compass(CONFIG_IMAGE_POSITION) { CompassPosition::NW | CompassPosition::W | CompassPosition::SW => { self.get_config().to_x(0) } CompassPosition::N | CompassPosition::Center | CompassPosition::S => { self.get_config().to_x((widget_w - width as i32) / 2) } CompassPosition::NE | CompassPosition::E | CompassPosition::SE => { self.get_config().to_x(widget_w - width as i32) } }; let texture_y = match self.get_compass(CONFIG_IMAGE_POSITION) { CompassPosition::NW | CompassPosition::N | CompassPosition::NE => { self.get_config().to_y(0) } CompassPosition::W | CompassPosition::Center | CompassPosition::E => { self.get_config().to_y((widget_h - height as i32) / 2) } CompassPosition::SW | CompassPosition::S | CompassPosition::SE => { self.get_config().to_y(widget_h - height as i32) } }; if !self.scaled { c.copy( &texture, None, Rect::new(texture_x, texture_y, width, height), ) .unwrap(); } else { c.copy( &texture, None, Rect::new( self.get_config().to_x(0), self.get_config().to_y(0), widget_w as u32, widget_h as u32, ), ) .unwrap(); } } fn on_config_changed(&mut self, _k: u8, _v: Config) { if _k == CONFIG_IMAGE_POSITION { self.get_config().set_invalidate(true); } } default_widget_properties!(); default_widget_callbacks!(); }
use crate::render::callbacks::CallbackRegistry; use crate::render::widget::*; use crate::render::widget_cache::WidgetContainer; use crate::render::widget_config::{ CompassPosition, Config, WidgetConfig, CONFIG_COLOR_BASE, CONFIG_IMAGE_POSITION, CONFIG_SIZE, }; use crate::render::Points; use sdl2::image::LoadTexture; use sdl2::rect::Rect; use sdl2::render::{Canvas, TextureQuery}; use sdl2::video::Window; use std::collections::HashMap; use std::path::Path; pub struct ImageWidget { config: WidgetConfig, system_properties: HashMap<i32, String>, callback_registry: CallbackRegistry, image_name: String, scaled: bool, } impl ImageWidget { pub fn new(image_name: String, x: i32, y: i32, w: u32, h: u32, scaled: bool) -> Self { Self { config: WidgetConfig::new(x, y, w, h), system_properties: HashMap::new(), callback_registry: CallbackRegistry::new(), image_name, scaled, } } } impl Widget for ImageWidget { fn draw(&mut self, c: &mut Canvas<Window>) { let base_color = self.get_color(CONFIG_COLOR_BASE); c.set_draw_color(base_color); c.fill_rect(self.get_drawing_area()).unwrap(); let texture_creator = c.texture_creator(); let texture = texture_creator .load_texture(Path::new(&self.image_name)) .unwrap(); let widget_w = self.get_size(CONFIG_SIZE)[0] as i32; let widget_h = self.get_size(CONFIG_SIZE)[1] as i32; let TextureQuery { width, height, .. } = texture.query(); let texture_x =
; let texture_y = match self.get_compass(CONFIG_IMAGE_POSITION) { CompassPosition::NW | CompassPosition::N | CompassPosition::NE => { self.get_config().to_y(0) } CompassPosition::W | CompassPosition::Center | CompassPosition::E => { self.get_config().to_y((widget_h - height as i32) / 2) } CompassPosition::SW | CompassPosition::S | CompassPosition::SE => { self.get_config().to_y(widget_h - height as i32) } }; if !self.scaled { c.copy( &texture, None, Rect::new(texture_x, texture_y, width, height), ) .unwrap(); } else { c.copy( &texture, None, Rect::new( self.get_config().to_x(0), self.get_config().to_y(0), widget_w as u32, widget_h as u32, ), ) .unwrap(); } } fn on_config_changed(&mut self, _k: u8, _v: Config) { if _k == CONFIG_IMAGE_POSITION { self.get_config().set_invalidate(true); } } default_widget_properties!(); default_widget_callbacks!(); }
match self.get_compass(CONFIG_IMAGE_POSITION) { CompassPosition::NW | CompassPosition::W | CompassPosition::SW => { self.get_config().to_x(0) } CompassPosition::N | CompassPosition::Center | CompassPosition::S => { self.get_config().to_x((widget_w - width as i32) / 2) } CompassPosition::NE | CompassPosition::E | CompassPosition::SE => { self.get_config().to_x(widget_w - width as i32) } }
if_condition
[ { "content": "pub fn widget_id_for_name(widgets: &[WidgetContainer], name: String) -> usize {\n\n match widgets.iter().find(|x| x.get_widget_name() == name.clone()) {\n\n Some(x) => x.get_widget_id() as usize,\n\n None => 0 as usize,\n\n }\n\n}\n", "file_path": "src/render/callbacks.rs", "rank": 0, "score": 104114.82841385523 }, { "content": "pub trait ConfigConvenience {}\n\n\n", "file_path": "src/render/widget.rs", "rank": 1, "score": 78340.59394868134 }, { "content": "/// This trait is shared by all `Widget` objects that have a presence on the screen. Functions that\n\n/// must be implemented are documented in the trait.\n\n///\n\n/// ## Implementation Notes\n\n/// If no custom `get_config` function is defined, and no custom `get_system_properties` function\n\n/// is defined, you can omit the definition of both, and use the `default_widget_properties!()`\n\n/// macro to auto-generate this code in your `impl` of this `trait`. Keep in mind, however, that\n\n/// these automatically generated implementation details could change in future releases of this\n\n/// library, so it is best to use the default implementation if possible.\n\npub trait Widget {\n\n /// Draws the widget. If you wish to modify the canvas object, you must declare it as `mut` in\n\n /// your implementation (ie `fn draw(&mut self, mut canvas: Canvas<Window>)`). The `_canvas`\n\n /// is the currently active drawing canvas at the time this function is called. This called\n\n /// during the draw loop of the `Engine`.\n\n fn draw(&mut self, _c: &mut Canvas<Window>) {}\n\n\n\n /// Retrieves the `WidgetConfig` object for this `Widget`.\n\n fn get_config(&mut self) -> &mut WidgetConfig;\n\n\n\n /// Retrieves a `HashMap` containing system properties used by the `Pushrod` event engine.\n\n fn get_system_properties(&mut self) -> &mut HashMap<i32, String>;\n\n\n\n /// Retrieves a `Callback` registry for this `Widget`.\n\n fn get_callbacks(&mut self) -> &mut CallbackRegistry;\n\n\n\n /// When a mouse enters the bounds of the `Widget`, this function is triggered. This function\n\n /// implementation is **optional**.\n\n fn mouse_entered(&mut self, _widgets: &[WidgetContainer]) {\n\n self.mouse_entered_callback(_widgets);\n", "file_path": "src/render/widget.rs", "rank": 2, "score": 76640.06526104518 }, { "content": "pub fn main() {\n\n let sdl_context = sdl2::init().unwrap();\n\n let video_subsystem = sdl_context.video().unwrap();\n\n let window = video_subsystem\n\n .window(\"pushrod-render progress demo\", 400, 180)\n\n .position_centered()\n\n .opengl()\n\n .build()\n\n .unwrap();\n\n let mut engine = Engine::new();\n\n let mut widget1 = ProgressWidget::new(20, 20, 360, 40);\n\n\n\n widget1.set_color(CONFIG_COLOR_SECONDARY, Color::RGB(255, 0, 0));\n\n widget1.set_numeric(CONFIG_PROGRESS, 25);\n\n\n\n let mut widget2 = ProgressWidget::new(20, 70, 360, 40);\n\n\n\n widget2.set_color(CONFIG_COLOR_SECONDARY, Color::RGB(255, 0, 0));\n\n widget2.set_numeric(CONFIG_PROGRESS, 50);\n\n\n", "file_path": "examples/progress.rs", "rank": 3, "score": 69476.94097661939 }, { "content": "pub fn main() {\n\n const WIDTH: u32 = 500;\n\n const HEIGHT: u32 = 270;\n\n\n\n let sdl_context = sdl2::init().unwrap();\n\n let video_subsystem = sdl_context.video().unwrap();\n\n let window = video_subsystem\n\n .window(\"pushrod-render image demo\", WIDTH, HEIGHT)\n\n .position_centered()\n\n .opengl()\n\n .build()\n\n .unwrap();\n\n let mut engine = Engine::new();\n\n let mut widget1 =\n\n ImageWidget::new(String::from(\"assets/rust-48x48.jpg\"), 20, 16, 60, 60, false);\n\n\n\n widget1.set_color(CONFIG_COLOR_BASE, Color::RGB(0, 0, 0));\n\n widget1.set_compass(CONFIG_IMAGE_POSITION, CompassPosition::NW);\n\n\n\n let mut widget2 =\n", "file_path": "examples/image.rs", "rank": 4, "score": 69476.94097661939 }, { "content": "pub fn main() {\n\n let sdl_context = sdl2::init().unwrap();\n\n let video_subsystem = sdl_context.video().unwrap();\n\n let window = video_subsystem\n\n .window(\"pushrod-render demo\", 800, 600)\n\n .position_centered()\n\n .opengl()\n\n .build()\n\n .unwrap();\n\n let mut engine = Engine::new();\n\n let mut new_base_widget = BaseWidget::new(100, 100, 600, 400);\n\n\n\n new_base_widget\n\n .get_config()\n\n .set_color(CONFIG_COLOR_BORDER, Color::RGB(0, 0, 0));\n\n new_base_widget\n\n .get_config()\n\n .set_numeric(CONFIG_BORDER_WIDTH, 2);\n\n\n\n new_base_widget\n", "file_path": "examples/render.rs", "rank": 5, "score": 69476.94097661939 }, { "content": "pub fn main() {\n\n let sdl_context = sdl2::init().unwrap();\n\n let video_subsystem = sdl_context.video().unwrap();\n\n let window = video_subsystem\n\n .window(\"pushrod-render timer demo\", 400, 180)\n\n .position_centered()\n\n .opengl()\n\n .build()\n\n .unwrap();\n\n let mut engine = Engine::new();\n\n let mut widget1 = ProgressWidget::new(20, 20, 360, 40);\n\n\n\n widget1.set_color(CONFIG_COLOR_SECONDARY, Color::RGB(255, 0, 0));\n\n widget1.set_numeric(CONFIG_PROGRESS, 25);\n\n\n\n let mut widget2 = ProgressWidget::new(20, 70, 360, 40);\n\n\n\n widget2.set_color(CONFIG_COLOR_SECONDARY, Color::RGB(255, 0, 0));\n\n widget2.set_numeric(CONFIG_PROGRESS, 50);\n\n\n", "file_path": "examples/timer.rs", "rank": 6, "score": 69476.94097661939 }, { "content": "pub fn main() {\n\n let sdl_context = sdl2::init().unwrap();\n\n let video_subsystem = sdl_context.video().unwrap();\n\n let window = video_subsystem\n\n .window(\"pushrod text widget demo\", 500, 200)\n\n .position_centered()\n\n .opengl()\n\n .build()\n\n .unwrap();\n\n let mut engine = Engine::new();\n\n let mut widget1 = TextWidget::new(\n\n String::from(\"assets/OpenSans-Regular.ttf\"),\n\n sdl2::ttf::FontStyle::NORMAL,\n\n 28,\n\n TextJustify::Left,\n\n String::from(\"Left Justified\"),\n\n 20,\n\n 16,\n\n 460,\n\n 40,\n", "file_path": "examples/text.rs", "rank": 7, "score": 69476.94097661939 }, { "content": "pub fn main() {\n\n let sdl_context = sdl2::init().unwrap();\n\n let video_subsystem = sdl_context.video().unwrap();\n\n let window = video_subsystem\n\n .window(\"pushrod-render image button demo\", 400, 180)\n\n .position_centered()\n\n .opengl()\n\n .build()\n\n .unwrap();\n\n let mut engine = Engine::new();\n\n let widget1 = CheckboxWidget::new(20, 20, 360, 30, String::from(\" Checkbox Item 1\"), 22, false);\n\n let widget2 = CheckboxWidget::new(20, 70, 360, 30, String::from(\" Checked Checkbox\"), 22, true);\n\n let widget3 = CheckboxWidget::new(\n\n 20,\n\n 120,\n\n 360,\n\n 30,\n\n String::from(\" Unchecked Checkbox\"),\n\n 22,\n\n false,\n", "file_path": "examples/checkbox_button.rs", "rank": 8, "score": 66746.08698959815 }, { "content": "pub fn main() {\n\n let sdl_context = sdl2::init().unwrap();\n\n let video_subsystem = sdl_context.video().unwrap();\n\n let window = video_subsystem\n\n .window(\"pushrod-render push button demo\", 400, 100)\n\n .position_centered()\n\n .opengl()\n\n .build()\n\n .unwrap();\n\n let mut engine = Engine::new();\n\n let mut button1 = PushButtonWidget::new(20, 20, 360, 60, String::from(\"Click me!\"), 40);\n\n\n\n button1.set_color(CONFIG_COLOR_BORDER, Color::RGB(0, 0, 0));\n\n button1.set_numeric(CONFIG_BORDER_WIDTH, 2);\n\n button1.on_click(|x, _widgets| {\n\n eprintln!(\"Click me clicked!\");\n\n });\n\n\n\n engine.setup(400, 100);\n\n\n\n engine.add_widget(Box::new(button1), String::from(\"button1\"));\n\n\n\n engine.run(sdl_context, window);\n\n}\n", "file_path": "examples/push_button.rs", "rank": 9, "score": 66746.08698959815 }, { "content": "pub fn main() {\n\n let sdl_context = sdl2::init().unwrap();\n\n let video_subsystem = sdl_context.video().unwrap();\n\n let window = video_subsystem\n\n .window(\"pushrod-render toggle button demo\", 400, 100)\n\n .position_centered()\n\n .opengl()\n\n .build()\n\n .unwrap();\n\n let mut engine = Engine::new();\n\n let mut button1 = ToggleButtonWidget::new(20, 20, 170, 60, String::from(\"1\"), 40, false);\n\n\n\n button1.set_color(CONFIG_COLOR_BORDER, Color::RGB(0, 0, 0));\n\n button1.set_numeric(CONFIG_BORDER_WIDTH, 2);\n\n button1.on_toggle(|x, _widgets, _state| {\n\n eprintln!(\"1 Toggled: {}\", _state);\n\n });\n\n\n\n let mut button2 = ToggleButtonWidget::new(210, 20, 170, 60, String::from(\"2\"), 40, true);\n\n\n", "file_path": "examples/toggle_button.rs", "rank": 10, "score": 66746.08698959815 }, { "content": "pub fn main() {\n\n let sdl_context = sdl2::init().unwrap();\n\n let video_subsystem = sdl_context.video().unwrap();\n\n let window = video_subsystem\n\n .window(\"pushrod-render image button demo\", 400, 180)\n\n .position_centered()\n\n .opengl()\n\n .build()\n\n .unwrap();\n\n let mut engine = Engine::new();\n\n let widget1 = ImageButtonWidget::new(\n\n 20,\n\n 20,\n\n 360,\n\n 40,\n\n String::from(\" Rust Logo\"),\n\n 24,\n\n String::from(\"assets/rust-48x48.jpg\"),\n\n );\n\n let widget2 = ImageButtonWidget::new(\n", "file_path": "examples/image_button.rs", "rank": 11, "score": 66746.08698959815 }, { "content": "fn time_ms() -> u64 {\n\n let since_the_epoch = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();\n\n\n\n (since_the_epoch.as_secs() * 1_000) + u64::from(since_the_epoch.subsec_millis())\n\n}\n\n\n\n/// This is the storage object for the `TimerWidget`. It stores the config, properties, callback registry,\n\n/// an enabled flag, timeout, a last-time-triggered value, and a timeout callback store.\n\npub struct TimerWidget {\n\n config: WidgetConfig,\n\n system_properties: HashMap<i32, String>,\n\n callback_registry: CallbackRegistry,\n\n enabled: bool,\n\n timeout: u64,\n\n initiated: u64,\n\n on_timeout: TimerCallbackType,\n\n}\n\n\n\n/// Creates a new `TimerWidget`. This `Widget` will call a function defined in `on_timeout` when\n\n/// a specific number of milliseconds has elapsed.\n", "file_path": "src/widgets/timer_widget.rs", "rank": 12, "score": 59960.305440723314 }, { "content": " /// `Widget`'s hidden flag - any children that refer to this object as a `parent_id` will not\n\n /// be drawn, and their events will not be received.\n\n hidden: bool,\n\n\n\n /// `Widget`'s enabled flag - any mouse events are ignored, but drawing is still performed.\n\n enabled: bool,\n\n\n\n /// `Widget`'s redraw flag. Set `true` if the object needs to be redrawn, `false` otherwise.\n\n invalidated: bool,\n\n}\n\n\n\n/// This is the implementation of the `WidgetConfig`.\n\nimpl WidgetConfig {\n\n /// Constructor - takes the X, Y, W, and H coordinates of the `Widget`, physically in the\n\n /// main `Canvas`.\n\n pub fn new(x: i32, y: i32, w: u32, h: u32) -> Self {\n\n Self {\n\n config: [\n\n (CONFIG_ORIGIN, Config::Points(vec![x, y])),\n\n (CONFIG_SIZE, Config::Size(vec![w, h])),\n", "file_path": "src/render/widget_config.rs", "rank": 13, "score": 51789.868852533546 }, { "content": " /// This stores a numeric value in the form of an `i32` value.\n\n Numeric(i32),\n\n\n\n /// This stores a `String` of text.\n\n Text(String),\n\n\n\n /// This stores a `true`/`false` boolean flag.\n\n Toggle(bool),\n\n\n\n /// This stores a `ComapssPosition`.\n\n CompassPosition(CompassPosition),\n\n}\n\n\n\n/// This is the store for the `WidgetConfig`, which each `Widget` object needs. This stores\n\n/// information about the `Widget`. It currently contains the point of origin, size, a `HashMap` of\n\n/// different `Color`s, a border width, and an invalidation flag.\n\npub struct WidgetConfig {\n\n /// The `HashMap` store for configuration objects.\n\n pub config: HashMap<u8, Config>,\n\n\n", "file_path": "src/render/widget_config.rs", "rank": 14, "score": 51780.280285958266 }, { "content": " pub fn hide(&mut self) {\n\n self.hidden = true;\n\n self.invalidated = true;\n\n }\n\n\n\n /// Displays the `Widget` on the screen.\n\n pub fn show(&mut self) {\n\n self.hidden = false;\n\n self.invalidated = true;\n\n }\n\n\n\n /// Indicates whether or not this `Widget` has been hidden from view - `true` if this `Widget`\n\n /// is hidden, `false` otherwise.\n\n pub fn is_hidden(&self) -> bool {\n\n self.hidden\n\n }\n\n\n\n /// Sets a point for a configuration key.\n\n pub fn set_point(&mut self, config: u8, x: i32, y: i32) {\n\n self.config.insert(config, Config::Points(vec![x, y]));\n", "file_path": "src/render/widget_config.rs", "rank": 15, "score": 51779.55321262876 }, { "content": " }\n\n\n\n /// Sets a size for a configuration key.\n\n pub fn set_size(&mut self, config: u8, w: u32, h: u32) {\n\n self.config.insert(config, Config::Size(vec![w, h]));\n\n }\n\n\n\n /// Sets a color for a configuration key.\n\n pub fn set_color(&mut self, config: u8, color: Color) {\n\n self.config.insert(config, Config::Color(color));\n\n }\n\n\n\n /// Sets a numeric value for a configuration key.\n\n pub fn set_numeric(&mut self, config: u8, value: i32) {\n\n self.config.insert(config, Config::Numeric(value));\n\n }\n\n\n\n /// Sets a text value for a configuration key.\n\n pub fn set_text(&mut self, config: u8, text: String) {\n\n self.config.insert(config, Config::Text(text.clone()));\n", "file_path": "src/render/widget_config.rs", "rank": 16, "score": 51778.933597849355 }, { "content": " (CONFIG_COLOR_BASE, Config::Color(Color::RGB(255, 255, 255))),\n\n (CONFIG_BORDER_WIDTH, Config::Numeric(0)),\n\n ]\n\n .iter()\n\n .cloned()\n\n .collect(),\n\n hidden: false,\n\n enabled: true,\n\n invalidated: true,\n\n }\n\n }\n\n\n\n /// Converts an X point to the physical X point on the `Canvas` plus the point of origin.\n\n /// Returns `i32` containing the modified X coordinate. This is a convenience method for the\n\n /// `Widget` to draw based on a 0x0 point of origin.\n\n pub fn to_x(&self, x: i32) -> i32 {\n\n self.get_point(CONFIG_ORIGIN)[0] + x\n\n }\n\n\n\n /// Converts a Y point to the physical Y point on the `Canvas` plus the point of origin.\n", "file_path": "src/render/widget_config.rs", "rank": 17, "score": 51777.350098757895 }, { "content": " /// Returns `i32` containing the modified Y coordinate. This is a convenience method for the\n\n /// `Widget` to draw based on a 0x0 point of origin.\n\n pub fn to_y(&self, y: i32) -> i32 {\n\n self.get_point(CONFIG_ORIGIN)[1] + y\n\n }\n\n\n\n /// Sets the invalidation state of the `Widget`, telling the `Engine` that the `Widget`\n\n /// contents has changed, and must be redrawn. Setting the `flag` to `true` indicates that\n\n /// the `Widget` needs to be redrawn on the screen, `false` indicates that it its state has\n\n /// not changed, and its image can be pulled from a buffer if necessary, skipping the `draw`\n\n /// call.\n\n pub fn set_invalidate(&mut self, flag: bool) {\n\n self.invalidated = flag;\n\n }\n\n\n\n /// Returns the `invalidation` state. Returns a `bool` containing the state.\n\n pub fn invalidated(&self) -> bool {\n\n self.invalidated\n\n }\n\n\n", "file_path": "src/render/widget_config.rs", "rank": 18, "score": 51777.00304786948 }, { "content": " _ => 0,\n\n }\n\n }\n\n\n\n /// Retrieves text for a configuration key. Returns a blank string if not set.\n\n pub fn get_text(&self, k: u8) -> String {\n\n match self.config.get(&k) {\n\n Some(Config::Text(text)) => text.clone(),\n\n _ => String::from(\"\"),\n\n }\n\n }\n\n\n\n /// Retrieves a boolean toggle for a configuration key. Returns `false` if not set.\n\n pub fn get_toggle(&self, k: u8) -> bool {\n\n match self.config.get(&k) {\n\n Some(Config::Toggle(toggle)) => *toggle,\n\n _ => false,\n\n }\n\n }\n\n\n\n /// Retrieves a `CompassPosition` toggle for a configuration key. Returns `CompassPosition::W` if not set.\n\n pub fn get_compass(&self, k: u8) -> CompassPosition {\n\n match self.config.get(&k) {\n\n Some(Config::CompassPosition(position)) => position.clone(),\n\n _ => CompassPosition::W,\n\n }\n\n }\n\n}\n", "file_path": "src/render/widget_config.rs", "rank": 19, "score": 51776.40667716351 }, { "content": "\n\n/// `Widget` Secondary `Color` key for `colors` `HashMap`. This is the color the `Widget` should\n\n/// display for any secondary properties, such as a fill color for a progress widget, a spinner,\n\n/// etc. This stored as a `Config::Color`.\n\npub const CONFIG_COLOR_SECONDARY: u8 = 5;\n\n\n\n/// `Widget` configuration to store its origin on the screen. This is a `Config::Points` object in the\n\n/// config.\n\npub const CONFIG_ORIGIN: u8 = 6;\n\n\n\n/// `Widget` configuration that stores the size of the `Widget`. This is a `Config::Size` object\n\n/// in the config.\n\npub const CONFIG_SIZE: u8 = 7;\n\n\n\n/// `Widget` configuration that stores the display border in pixels. This is stored as a\n\n/// `Config::Numeric` value.\n\npub const CONFIG_BORDER_WIDTH: u8 = 8;\n\n\n\n/// `Widget` text store, used to display text on the screen. This is stored as a `Config::Text`\n\n/// value.\n", "file_path": "src/render/widget_config.rs", "rank": 20, "score": 51774.914221003586 }, { "content": " }\n\n\n\n /// Sets a toggle for a configuration key.\n\n pub fn set_toggle(&mut self, config: u8, flag: bool) {\n\n self.config.insert(config, Config::Toggle(flag));\n\n }\n\n\n\n /// Sets a compass position for a configuration key.\n\n pub fn set_compass(&mut self, config: u8, value: CompassPosition) {\n\n self.config.insert(config, Config::CompassPosition(value));\n\n }\n\n\n\n /// Retrieves a `Points` for a configuration key. Returns `Points::default` if not set.\n\n pub fn get_point(&self, k: u8) -> Points {\n\n match self.config.get(&k) {\n\n Some(Config::Points(point)) => point.clone(),\n\n _ => Points::default(),\n\n }\n\n }\n\n\n", "file_path": "src/render/widget_config.rs", "rank": 21, "score": 51773.39766155731 }, { "content": "pub const CONFIG_TEXT: u8 = 9;\n\n\n\n/// `Widget` progress value store. This is stored as a `Config::Numeric` value.\n\npub const CONFIG_PROGRESS: u8 = 10;\n\n\n\n/// `Widget` image position direction, controls the position of an `Image` within the bounds of a\n\n/// `Widget`. This is stored as a `Config::CompassPosition` value.\n\npub const CONFIG_IMAGE_POSITION: u8 = 11;\n\n\n\n/// `TextWidget` font size control. This is stored as a `Config::Numeric` value.\n\npub const CONFIG_FONT_SIZE: u8 = 12;\n\n\n\n/// `PushButtonWidget` selected state. This is stored as a `Config::Toggle` value.\n\npub const CONFIG_SELECTED_STATE: u8 = 13;\n\n\n\n/// This enum is used by the `ImageWidget`, which controls the positioning of the image being\n\n/// rendered within the bounds of the `Widget`.\n\n#[derive(Clone, Debug)]\n\npub enum CompassPosition {\n\n /// Upper left-hand corner of the bounds.\n", "file_path": "src/render/widget_config.rs", "rank": 22, "score": 51772.81954595402 }, { "content": " /// Enables the `Widget` for interaction.\n\n pub fn enable(&mut self) {\n\n self.enabled = true;\n\n self.invalidated = true;\n\n }\n\n\n\n /// Disables the `Widget`, preventing interaction.\n\n pub fn disable(&mut self) {\n\n self.enabled = false;\n\n self.invalidated = true;\n\n }\n\n\n\n /// Indicates whether or not this `Widget` is enabled or disabled - `true` if enabled,\n\n /// `false` otherwise.\n\n pub fn is_enabled(&self) -> bool {\n\n self.enabled\n\n }\n\n\n\n /// Prevents the `Widget` from being drawn on the screen, or being interacted with. No events\n\n /// are received by this `Widget` when hidden.\n", "file_path": "src/render/widget_config.rs", "rank": 23, "score": 51772.811437425524 }, { "content": "/// that is in an unselected state. This stored as a `Config::Color`.\n\npub const CONFIG_COLOR_BASE: u8 = 0;\n\n\n\n/// `Widget` Hover `Color` key for `colors` `HashMap`. This is the base fill color of a `Widget`\n\n/// that has a mouse hovering over the top of the `Widget`, or when a `mouse_entered` event is\n\n/// triggered. This is optional; the `Widget` does not need to honor this color if it does not\n\n/// support a hover state. This stored as a `Config::Color`.\n\npub const CONFIG_COLOR_HOVER: u8 = 1;\n\n\n\n/// `Widget` Border `Color` key for `colors` `HashMap`. This should be used for the color of the\n\n/// border, if the `Widget` draws a border. This stored as a `Config::Color`.\n\npub const CONFIG_COLOR_BORDER: u8 = 2;\n\n\n\n/// `Widget` Text `Color` key for `colors` `HashMap`. This should be the color for the text being\n\n/// displayed inside the `Widget`. This stored as a `Config::Color`.\n\npub const CONFIG_COLOR_TEXT: u8 = 3;\n\n\n\n/// `Widget` Selected `Color` key for `colors` `HashMap`. This is the color the `Widget` should\n\n/// display when in selected state. This stored as a `Config::Color`.\n\npub const CONFIG_COLOR_SELECTED: u8 = 4;\n", "file_path": "src/render/widget_config.rs", "rank": 24, "score": 51772.26303841476 }, { "content": " /// Retrieves a `Size` for a configuration key. Returns a `Size::default` if not set.\n\n pub fn get_size(&self, k: u8) -> Size {\n\n match self.config.get(&k) {\n\n Some(Config::Size(size)) => size.clone(),\n\n _ => Size::default(),\n\n }\n\n }\n\n\n\n /// Retrieves a `Color` for a configuration key. Returns white if not set.\n\n pub fn get_color(&self, k: u8) -> Color {\n\n match self.config.get(&k) {\n\n Some(Config::Color(color)) => *color,\n\n _ => Color::RGB(255, 255, 255),\n\n }\n\n }\n\n\n\n /// Retrieves a numeric value for a configuration key. Returns 0 if not set.\n\n pub fn get_numeric(&self, k: u8) -> i32 {\n\n match self.config.get(&k) {\n\n Some(Config::Numeric(numeric)) => *numeric,\n", "file_path": "src/render/widget_config.rs", "rank": 25, "score": 51770.261829523944 }, { "content": " /// Bottom center of the bounds.\n\n S,\n\n\n\n /// Lower right-hand corner of the bounds.\n\n SE,\n\n}\n\n\n\n/// Configuration object type - allows configurations to be set using `Piston`, `Pushrod`, or\n\n/// native types.\n\n#[derive(Clone, Debug)]\n\npub enum Config {\n\n /// This stores a `Points` type.\n\n Points(Points),\n\n\n\n /// This stores a `Size` type.\n\n Size(Size),\n\n\n\n /// This stores a `Color`.\n\n Color(Color),\n\n\n", "file_path": "src/render/widget_config.rs", "rank": 26, "score": 51768.65845056288 }, { "content": "// Pushrod Rendering Library\n\n// Widget Configuration Store\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::render::{Points, Size};\n\nuse sdl2::pixels::Color;\n\nuse std::collections::HashMap;\n\n\n\n/// `Widget` Base `Color` key for `colors` `HashMap`. This is the base fill color of a `Widget`\n", "file_path": "src/render/widget_config.rs", "rank": 27, "score": 51766.481649559515 }, { "content": " NW,\n\n\n\n /// Centered top of the bounds.\n\n N,\n\n\n\n /// Upper right-hand corner of the bounds.\n\n NE,\n\n\n\n /// Centered left side of the bounds.\n\n W,\n\n\n\n /// Center of the bounds.\n\n Center,\n\n\n\n /// Centered right side of the bounds.\n\n E,\n\n\n\n /// Lower left-hand corner of the bounds.\n\n SW,\n\n\n", "file_path": "src/render/widget_config.rs", "rank": 28, "score": 51764.04237620951 }, { "content": " msg: msg.clone(),\n\n }\n\n }\n\n}\n\n\n\n/// This is the `Widget` implementation of the `TextWidget`. Text is rendered onto a 3D texture, then\n\n/// copied to the canvas after rendering. It uses blended mode texture mapping, which may be slow (as\n\n/// described by the SDL2 documentation), so this might change later to use 8 bit color mapping.\n\nimpl Widget for TextWidget {\n\n fn draw(&mut self, c: &mut Canvas<Window>) {\n\n let base_color = self.get_color(CONFIG_COLOR_BASE);\n\n let text_max_width =\n\n self.get_size(CONFIG_SIZE)[0] - ((self.get_numeric(CONFIG_BORDER_WIDTH) * 2) as u32);\n\n\n\n let ttf_context = sdl2::ttf::init().map_err(|e| e.to_string()).unwrap();\n\n let texture_creator = c.texture_creator();\n\n let mut font = ttf_context\n\n .load_font(Path::new(&self.font_name), self.font_size as u16)\n\n .unwrap();\n\n let font_color = self.get_color(CONFIG_COLOR_TEXT);\n", "file_path": "src/widgets/text_widget.rs", "rank": 30, "score": 31030.452167466505 }, { "content": " TextJustify::Right => self.get_config().to_x(widget_w - width as i32),\n\n\n\n TextJustify::Center => self.get_config().to_x((widget_w - width as i32) / 2),\n\n };\n\n\n\n c.set_draw_color(base_color);\n\n c.fill_rect(self.get_drawing_area()).unwrap();\n\n\n\n c.copy(\n\n &texture,\n\n None,\n\n Rect::new(texture_x, texture_y, width, height),\n\n )\n\n .unwrap();\n\n }\n\n\n\n /// Monitors for changes in the text, color changes, or font sizes.\n\n fn on_config_changed(&mut self, _k: u8, _v: Config) {\n\n match _k {\n\n CONFIG_COLOR_TEXT => self.get_config().set_invalidate(true),\n", "file_path": "src/widgets/text_widget.rs", "rank": 32, "score": 31029.140065188134 }, { "content": "\n\n font.set_style(self.font_style);\n\n\n\n let surface = font\n\n .render(&self.msg)\n\n .blended_wrapped(font_color, text_max_width)\n\n .map_err(|e| e.to_string())\n\n .unwrap();\n\n let texture = texture_creator\n\n .create_texture_from_surface(&surface)\n\n .map_err(|e| e.to_string())\n\n .unwrap();\n\n\n\n let TextureQuery { width, height, .. } = texture.query();\n\n\n\n let texture_y = self.get_config().to_y(0);\n\n let widget_w = self.get_size(CONFIG_SIZE)[0] as i32;\n\n let texture_x = match self.justification {\n\n TextJustify::Left => self.get_config().to_x(0),\n\n\n", "file_path": "src/widgets/text_widget.rs", "rank": 34, "score": 31029.034190464212 }, { "content": "\n\nuse sdl2::render::Canvas;\n\nuse sdl2::video::Window;\n\n\n\nuse crate::render::widget_config::CompassPosition::Center;\n\nuse crate::widgets::image_widget::ImageWidget;\n\nuse crate::widgets::text_widget::{TextJustify, TextWidget};\n\nuse sdl2::pixels::Color;\n\nuse std::collections::HashMap;\n\n\n\n/// This is the callback type that is used when an `on_toggle` callback is triggered from this\n\n/// `Widget`.\n\npub type OnToggleCallbackType =\n\n Option<Box<dyn FnMut(&mut CheckboxWidget, &[WidgetContainer], bool)>>;\n\n\n\n/// This is the storage object for the `ToggleButtonWidget`. It stores the config, properties, callback registry.\n\npub struct CheckboxWidget {\n\n config: WidgetConfig,\n\n system_properties: HashMap<i32, String>,\n\n callback_registry: CallbackRegistry,\n", "file_path": "src/widgets/checkbox_widget.rs", "rank": 35, "score": 31027.92266412705 }, { "content": " /// `Widget` given the `xywh` coordinates, and the `percentage` of fill from 0-100. The\n\n /// base color and border colors are set to white and black, respectively. Use the\n\n /// `COLOR_SECONDARY` setting to change the color of the fill for the progress bar.\n\n pub fn new(x: i32, y: i32, w: u32, h: u32) -> Self {\n\n let mut base_widget = BaseWidget::new(x, y, w, h);\n\n\n\n base_widget\n\n .get_config()\n\n .set_color(CONFIG_COLOR_BASE, Color::RGB(255, 255, 255));\n\n\n\n base_widget\n\n .get_config()\n\n .set_color(CONFIG_COLOR_BORDER, Color::RGB(0, 0, 0));\n\n\n\n base_widget.get_config().set_numeric(CONFIG_BORDER_WIDTH, 1);\n\n\n\n Self {\n\n config: WidgetConfig::new(x, y, w, h),\n\n system_properties: HashMap::new(),\n\n callback_registry: CallbackRegistry::new(),\n", "file_path": "src/widgets/progress_widget.rs", "rank": 37, "score": 31027.584377819523 }, { "content": " /// layout of the font, the message to display, and the x, y, w, h coordinates of the text.\n\n pub fn new(\n\n font_name: String,\n\n font_style: FontStyle,\n\n font_size: i32,\n\n justification: TextJustify,\n\n msg: String,\n\n x: i32,\n\n y: i32,\n\n w: u32,\n\n h: u32,\n\n ) -> Self {\n\n Self {\n\n config: WidgetConfig::new(x, y, w, h),\n\n system_properties: HashMap::new(),\n\n callback_registry: CallbackRegistry::new(),\n\n font_name,\n\n font_style,\n\n font_size,\n\n justification,\n", "file_path": "src/widgets/text_widget.rs", "rank": 38, "score": 31025.746582841282 }, { "content": "\n\nuse sdl2::pixels::Color;\n\nuse sdl2::rect::Rect;\n\nuse sdl2::video::Window;\n\n\n\nuse sdl2::render::Canvas;\n\nuse std::collections::HashMap;\n\n\n\n/// This is the storage object for the `ProgressWidget`. It stores the config, properties, callback registry,\n\n/// the base widget, and progress from 0 to 100.\n\npub struct ProgressWidget {\n\n config: WidgetConfig,\n\n system_properties: HashMap<i32, String>,\n\n callback_registry: CallbackRegistry,\n\n base_widget: BaseWidget,\n\n}\n\n\n\n/// Creates a new `ProgressWidget`, which draws a progress bar inside a `BaseWidget`.\n\nimpl ProgressWidget {\n\n /// Creates a new instance of the `ProgressWidget` object. It draws a progress bar-style\n", "file_path": "src/widgets/progress_widget.rs", "rank": 39, "score": 31022.89330342899 }, { "content": " /// Assigns the callback closure that will be used when the `Widget` toggles state.\n\n pub fn on_toggle<F>(&mut self, callback: F)\n\n where\n\n F: FnMut(&mut CheckboxWidget, &[WidgetContainer], bool) + 'static,\n\n {\n\n self.on_toggle = Some(Box::new(callback));\n\n }\n\n\n\n /// Internal function that triggers the `on_toggle` callback.\n\n fn call_toggle_callback(&mut self, widgets: &[WidgetContainer]) {\n\n if let Some(mut cb) = self.on_toggle.take() {\n\n cb(self, widgets, self.selected);\n\n self.on_toggle = Some(cb);\n\n }\n\n }\n\n}\n\n\n\n/// This is the `Widget` implementation of the `ToggleButtonWidget`.\n\nimpl Widget for CheckboxWidget {\n\n /// Draws the `CheckboxWidget` contents.\n", "file_path": "src/widgets/checkbox_widget.rs", "rank": 40, "score": 31022.831861026196 }, { "content": " text_widget: TextWidget,\n\n unchecked_widget: ImageWidget,\n\n checked_widget: ImageWidget,\n\n active: bool,\n\n selected: bool,\n\n in_bounds: bool,\n\n on_toggle: OnToggleCallbackType,\n\n}\n\n\n\n/// This is the implementation of the `ToggleButtonWidget` that draws a button on the screen that can be\n\n/// toggled on or off.\n\nimpl CheckboxWidget {\n\n /// Creates a new `ToggleButtonWidget` given the `x, y, w, h` coordinates, the `text` to display\n\n /// inside the button, `font_size` of the font to display, and the initial `selected` state: `true`\n\n /// being selected, `false` otherwise.\n\n pub fn new(\n\n x: i32,\n\n y: i32,\n\n w: u32,\n\n h: u32,\n", "file_path": "src/widgets/checkbox_widget.rs", "rank": 42, "score": 31022.272376133227 }, { "content": " }\n\n\n\n /// Disables the timer. Once disabled, the `on_timeout` callback will never be called.\n\n pub fn disable(&mut self) {\n\n self.enabled = false;\n\n }\n\n\n\n /// Returns the `enabled` state.\n\n pub fn is_enabled(&self) -> bool {\n\n self.enabled\n\n }\n\n\n\n /// Assigns the callback closure that will be used when a timer tick is triggered.\n\n pub fn on_timeout<F>(&mut self, callback: F)\n\n where\n\n F: FnMut(&mut TimerWidget, &[WidgetContainer]) + 'static,\n\n {\n\n self.on_timeout = Some(Box::new(callback));\n\n }\n\n\n", "file_path": "src/widgets/timer_widget.rs", "rank": 43, "score": 31022.12288754141 }, { "content": " text: String,\n\n font_size: i32,\n\n selected: bool,\n\n ) -> Self {\n\n let mut text_widget = TextWidget::new(\n\n String::from(\"assets/OpenSans-Regular.ttf\"),\n\n sdl2::ttf::FontStyle::NORMAL,\n\n font_size,\n\n TextJustify::Left,\n\n text.clone(),\n\n x + h as i32 + 6,\n\n y + 2,\n\n w - h - 10,\n\n h - 4,\n\n );\n\n\n\n let text_color = if selected {\n\n Color::RGB(255, 255, 255)\n\n } else {\n\n Color::RGB(0, 0, 0)\n", "file_path": "src/widgets/checkbox_widget.rs", "rank": 44, "score": 31022.051505933556 }, { "content": "impl TimerWidget {\n\n /// Creates a new `TimerWidget` object to call the `on_timeout` timeout callback every `timeout`\n\n /// milliseconds. Setting `enabled` to `true` will automatically enable the timer, where as\n\n /// `false` will add the timer, but it will not be enabled.\n\n pub fn new(timeout: u64, enabled: bool) -> Self {\n\n Self {\n\n config: WidgetConfig::new(0, 0, 0, 0),\n\n system_properties: HashMap::new(),\n\n callback_registry: CallbackRegistry::new(),\n\n enabled,\n\n timeout,\n\n initiated: time_ms(),\n\n on_timeout: None,\n\n }\n\n }\n\n\n\n /// Re-enables the timer. This will also reset the elapsed timer.\n\n pub fn enable(&mut self) {\n\n self.initiated = time_ms();\n\n self.enabled = true;\n", "file_path": "src/widgets/timer_widget.rs", "rank": 45, "score": 31022.031502043017 }, { "content": "}\n\n\n\n/// This is the storage object for the `TextWidget`. It stores the config, properties, callback registry,\n\n/// the font name, style, size, justification, and text message.\n\npub struct TextWidget {\n\n config: WidgetConfig,\n\n system_properties: HashMap<i32, String>,\n\n callback_registry: CallbackRegistry,\n\n font_name: String,\n\n font_style: FontStyle,\n\n font_size: i32,\n\n justification: TextJustify,\n\n msg: String,\n\n}\n\n\n\n/// Creates a new `TextWidget`, which draws a unit of text on the screen, given the specified font,\n\n/// size, justification, and layout coordinates.\n\nimpl TextWidget {\n\n /// Creates a new `TextWidget` object. Requires the name of the font (the path to the font file),\n\n /// the style of font (`sdl2::ttf::FontStyle`), the size in pixels of the font, the `TextJustify`\n", "file_path": "src/widgets/text_widget.rs", "rank": 47, "score": 31020.49741203624 }, { "content": "// Pushrod Widget Library\n\n// Timer Widget\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::render::callbacks::CallbackRegistry;\n\nuse crate::render::widget::*;\n\nuse crate::render::widget_cache::WidgetContainer;\n\nuse crate::render::widget_config::WidgetConfig;\n\n\n\nuse std::collections::HashMap;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\n\n\npub type TimerCallbackType = Option<Box<dyn FnMut(&mut TimerWidget, &[WidgetContainer])>>;\n\n\n", "file_path": "src/widgets/timer_widget.rs", "rank": 48, "score": 31019.82879815008 }, { "content": "\n\nuse sdl2::render::{Canvas, TextureQuery};\n\nuse sdl2::ttf::FontStyle;\n\nuse sdl2::video::Window;\n\n\n\nuse sdl2::rect::Rect;\n\nuse std::collections::HashMap;\n\nuse std::path::Path;\n\n\n\n/// This enum is used by the `TextWidget`, which controls the justification of the text being\n\n/// rendered within the bounds of the `Widget`.\n\npub enum TextJustify {\n\n /// Left-justified text.\n\n Left,\n\n\n\n /// Center-justified text: `(total width - text width) / 2`\n\n Center,\n\n\n\n /// Right-justified text: `(total width - text width)`\n\n Right,\n", "file_path": "src/widgets/text_widget.rs", "rank": 49, "score": 31019.781408282222 }, { "content": " base_widget,\n\n }\n\n }\n\n}\n\n\n\n/// This is the `Widget` implementation of the `ProgressWidget`. It contains a `BaseWidget` within\n\n/// its bounds to draw the base background, then draws the progress fill over the top.\n\nimpl Widget for ProgressWidget {\n\n fn draw(&mut self, c: &mut Canvas<Window>) {\n\n self.base_widget.draw(c);\n\n\n\n let base_color = self.get_color(CONFIG_COLOR_SECONDARY);\n\n let progress = (f64::from(self.get_size(CONFIG_SIZE)[0])\n\n * (f64::from(self.get_numeric(CONFIG_PROGRESS)) / 100.0)) as u32;\n\n\n\n c.set_draw_color(base_color);\n\n c.fill_rect(Rect::new(\n\n self.config.to_x(1),\n\n self.config.to_y(1),\n\n progress,\n", "file_path": "src/widgets/progress_widget.rs", "rank": 50, "score": 31018.99538503591 }, { "content": " };\n\n\n\n let mut config = WidgetConfig::new(x, y, w, h);\n\n let mut unchecked_widget = ImageWidget::new(\n\n String::from(\"assets/checkbox_unselected.png\"),\n\n x + 2,\n\n y + 2,\n\n h - 4,\n\n h - 4,\n\n true,\n\n );\n\n let mut checked_widget = ImageWidget::new(\n\n String::from(\"assets/checkbox_selected.png\"),\n\n x + 2,\n\n y + 2,\n\n h - 4,\n\n h - 4,\n\n true,\n\n );\n\n\n", "file_path": "src/widgets/checkbox_widget.rs", "rank": 51, "score": 31018.686481636534 }, { "content": " self.get_size(CONFIG_SIZE)[1] - 2,\n\n ))\n\n .unwrap();\n\n }\n\n\n\n /// Responds to a screen redraw only if the `CONFIG_PROGRESS` key was changed.\n\n fn on_config_changed(&mut self, _k: u8, _v: Config) {\n\n if _k == CONFIG_PROGRESS {\n\n self.get_config().set_invalidate(true);\n\n }\n\n }\n\n\n\n default_widget_properties!();\n\n default_widget_callbacks!();\n\n}\n", "file_path": "src/widgets/progress_widget.rs", "rank": 52, "score": 31017.19129572874 }, { "content": " /// Internal function that triggers the `on_timeout` callback.\n\n fn call_timeout_callback(&mut self, widgets: &[WidgetContainer]) {\n\n if let Some(mut cb) = self.on_timeout.take() {\n\n cb(self, widgets);\n\n self.on_timeout = Some(cb);\n\n }\n\n }\n\n}\n\n\n\n/// This is the `Widget` implementation of the `TimerWidget`.\n\nimpl Widget for TimerWidget {\n\n /// The `TimerWidget` responds to the `tick` callback, which is used to determine the timer\n\n /// display ticks. This function is _only_ called when the timer tick occurs, so if there is a\n\n /// function inside the drawing loop that drops frames, this timer may not get called reliably.\n\n fn tick(&mut self, _widgets: &[WidgetContainer]) {\n\n if !self.enabled {\n\n return;\n\n }\n\n\n\n let elapsed = time_ms() - self.initiated;\n", "file_path": "src/widgets/timer_widget.rs", "rank": 53, "score": 31015.690736063232 }, { "content": " }\n\n\n\n /// Overrides the `button_clicked` callback to handle toggling.\n\n fn button_clicked(\n\n &mut self,\n\n _widgets: &[WidgetContainer],\n\n _button: u8,\n\n _clicks: u8,\n\n _state: bool,\n\n ) {\n\n if _button == 1 {\n\n if _state {\n\n self.active = true;\n\n } else {\n\n self.active = false;\n\n\n\n if self.in_bounds {\n\n self.selected = !self.selected;\n\n self.set_toggle(CONFIG_SELECTED_STATE, self.selected);\n\n self.call_toggle_callback(_widgets);\n", "file_path": "src/widgets/checkbox_widget.rs", "rank": 54, "score": 31015.362780696727 }, { "content": " text_widget.set_color(CONFIG_COLOR_TEXT, Color::RGB(0, 0, 0));\n\n unchecked_widget.set_compass(CONFIG_IMAGE_POSITION, Center);\n\n checked_widget.set_compass(CONFIG_IMAGE_POSITION, Center);\n\n\n\n config.set_toggle(CONFIG_SELECTED_STATE, selected);\n\n\n\n Self {\n\n config,\n\n system_properties: HashMap::new(),\n\n callback_registry: CallbackRegistry::new(),\n\n text_widget,\n\n unchecked_widget,\n\n checked_widget,\n\n active: false,\n\n selected,\n\n in_bounds: false,\n\n on_toggle: None,\n\n }\n\n }\n\n\n", "file_path": "src/widgets/checkbox_widget.rs", "rank": 56, "score": 31014.135699650637 }, { "content": " } else {\n\n self.unchecked_widget.draw(c);\n\n }\n\n }\n\n\n\n self.text_widget.draw(c);\n\n }\n\n\n\n /// When a mouse enters the bounds of the `Widget`, this function is triggered.\n\n fn mouse_entered(&mut self, _widgets: &[WidgetContainer]) {\n\n self.in_bounds = true;\n\n self.mouse_entered_callback(_widgets);\n\n self.get_config().set_invalidate(true);\n\n }\n\n\n\n /// When a mouse exits the bounds of the `Widget`, this function is triggered.\n\n fn mouse_exited(&mut self, _widgets: &[WidgetContainer]) {\n\n self.in_bounds = false;\n\n self.mouse_exited_callback(_widgets);\n\n self.get_config().set_invalidate(true);\n", "file_path": "src/widgets/checkbox_widget.rs", "rank": 57, "score": 31014.05662433173 }, { "content": "// Pushrod Widget Library\n\n// Checkbox Widget\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::render::callbacks::CallbackRegistry;\n\nuse crate::render::widget::*;\n\nuse crate::render::widget_cache::WidgetContainer;\n\nuse crate::render::widget_config::*;\n\nuse crate::render::Points;\n", "file_path": "src/widgets/checkbox_widget.rs", "rank": 58, "score": 31013.539158684536 }, { "content": "// Pushrod Widget Library\n\n// Text Widget\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::render::callbacks::CallbackRegistry;\n\nuse crate::render::widget::*;\n\nuse crate::render::widget_cache::WidgetContainer;\n\nuse crate::render::widget_config::*;\n\nuse crate::render::Points;\n", "file_path": "src/widgets/text_widget.rs", "rank": 59, "score": 31013.539158684536 }, { "content": "// Pushrod Widget Library\n\n// Progress Widget\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::render::callbacks::CallbackRegistry;\n\nuse crate::render::widget::*;\n\nuse crate::render::widget_cache::WidgetContainer;\n\nuse crate::render::widget_config::*;\n\nuse crate::render::Points;\n", "file_path": "src/widgets/progress_widget.rs", "rank": 60, "score": 31013.539158684536 }, { "content": " CONFIG_COLOR_BASE => self.get_config().set_invalidate(true),\n\n CONFIG_FONT_SIZE => match _v {\n\n Config::Numeric(size) => {\n\n self.font_size = size;\n\n self.get_config().set_invalidate(true);\n\n }\n\n _ => (),\n\n },\n\n CONFIG_TEXT => match _v {\n\n Config::Text(text) => {\n\n self.msg = text.clone();\n\n self.get_config().set_invalidate(true);\n\n }\n\n _ => (),\n\n },\n\n\n\n _ => (),\n\n };\n\n }\n\n\n\n default_widget_properties!();\n\n default_widget_callbacks!();\n\n}\n", "file_path": "src/widgets/text_widget.rs", "rank": 61, "score": 31011.583295568904 }, { "content": " fn draw(&mut self, c: &mut Canvas<Window>) {\n\n // Paint the base widget first. Forcing a draw() call here will ignore invalidation.\n\n // Invalidation is controlled by the top level widget (this box).\n\n if self.active {\n\n if self.in_bounds {\n\n if self.selected {\n\n self.unchecked_widget.draw(c);\n\n } else {\n\n self.checked_widget.draw(c);\n\n }\n\n } else {\n\n if self.selected {\n\n self.checked_widget.draw(c);\n\n } else {\n\n self.unchecked_widget.draw(c);\n\n }\n\n }\n\n } else {\n\n if self.selected {\n\n self.checked_widget.draw(c);\n", "file_path": "src/widgets/checkbox_widget.rs", "rank": 62, "score": 31011.347359968207 }, { "content": " }\n\n }\n\n\n\n self.get_config().set_invalidate(true);\n\n }\n\n\n\n self.button_clicked_callback(_widgets, _button, _clicks, _state);\n\n }\n\n\n\n default_widget_properties!();\n\n default_widget_callbacks!();\n\n}\n", "file_path": "src/widgets/checkbox_widget.rs", "rank": 63, "score": 31010.920857662346 }, { "content": "\n\n if elapsed > self.timeout {\n\n self.initiated = time_ms();\n\n self.call_timeout_callback(_widgets);\n\n }\n\n }\n\n\n\n default_widget_properties!();\n\n}\n", "file_path": "src/widgets/timer_widget.rs", "rank": 64, "score": 31008.032476674838 }, { "content": " text_widget: TextWidget,\n\n active: bool,\n\n in_bounds: bool,\n\n on_click: OnClickCallbackType,\n\n}\n\n\n\nimpl PushButtonWidget {\n\n pub fn new(x: i32, y: i32, w: u32, h: u32, text: String, font_size: i32) -> Self {\n\n let mut base_widget = BaseWidget::new(x, y, w, h);\n\n let mut text_widget = TextWidget::new(\n\n String::from(\"assets/OpenSans-Regular.ttf\"),\n\n sdl2::ttf::FontStyle::NORMAL,\n\n font_size,\n\n TextJustify::Center,\n\n text.clone(),\n\n x + 2,\n\n y + 2,\n\n w - 4,\n\n h - 4,\n\n );\n", "file_path": "src/widgets/push_button_widget.rs", "rank": 65, "score": 30032.871016042962 }, { "content": " system_properties: HashMap<i32, String>,\n\n callback_registry: CallbackRegistry,\n\n base_widget: BaseWidget,\n\n text_widget: TextWidget,\n\n image_widget: ImageWidget,\n\n active: bool,\n\n in_bounds: bool,\n\n on_click: OnClickCallbackType,\n\n}\n\n\n\nimpl ImageButtonWidget {\n\n pub fn new(\n\n x: i32,\n\n y: i32,\n\n w: u32,\n\n h: u32,\n\n text: String,\n\n font_size: i32,\n\n image_name: String,\n\n ) -> Self {\n", "file_path": "src/widgets/image_button_widget.rs", "rank": 66, "score": 30031.81196354577 }, { "content": "\n\nuse sdl2::render::Canvas;\n\nuse sdl2::video::Window;\n\n\n\nuse crate::widgets::text_widget::{TextJustify, TextWidget};\n\nuse sdl2::pixels::Color;\n\nuse std::collections::HashMap;\n\n\n\n/// This is the callback type that is used when an `on_toggle` callback is triggered from this\n\n/// `Widget`.\n\npub type OnToggleCallbackType =\n\n Option<Box<dyn FnMut(&mut ToggleButtonWidget, &[WidgetContainer], bool)>>;\n\n\n\n/// This is the storage object for the `ToggleButtonWidget`. It stores the config, properties, callback registry.\n\npub struct ToggleButtonWidget {\n\n config: WidgetConfig,\n\n system_properties: HashMap<i32, String>,\n\n callback_registry: CallbackRegistry,\n\n base_widget: BaseWidget,\n\n text_widget: TextWidget,\n", "file_path": "src/widgets/toggle_button_widget.rs", "rank": 67, "score": 30030.008084421494 }, { "content": "};\n\nuse crate::render::Points;\n\n\n\nuse sdl2::render::Canvas;\n\nuse sdl2::video::Window;\n\n\n\nuse crate::widgets::text_widget::{TextJustify, TextWidget};\n\nuse sdl2::pixels::Color;\n\nuse std::collections::HashMap;\n\n\n\n/// This is the callback type that is used when an `on_click` callback is triggered from this\n\n/// `Widget`.\n\npub type OnClickCallbackType = Option<Box<dyn FnMut(&mut PushButtonWidget, &[WidgetContainer])>>;\n\n\n\n/// This is the storage object for the `PushButtonWidget`. It stores the config, properties, callback registry.\n\npub struct PushButtonWidget {\n\n config: WidgetConfig,\n\n system_properties: HashMap<i32, String>,\n\n callback_registry: CallbackRegistry,\n\n base_widget: BaseWidget,\n", "file_path": "src/widgets/push_button_widget.rs", "rank": 68, "score": 30028.21875751871 }, { "content": " active: bool,\n\n selected: bool,\n\n in_bounds: bool,\n\n on_toggle: OnToggleCallbackType,\n\n}\n\n\n\n/// This is the implementation of the `ToggleButtonWidget` that draws a button on the screen that can be\n\n/// toggled on or off.\n\nimpl ToggleButtonWidget {\n\n /// Creates a new `ToggleButtonWidget` given the `x, y, w, h` coordinates, the `text` to display\n\n /// inside the button, `font_size` of the font to display, and the initial `selected` state: `true`\n\n /// being selected, `false` otherwise.\n\n pub fn new(\n\n x: i32,\n\n y: i32,\n\n w: u32,\n\n h: u32,\n\n text: String,\n\n font_size: i32,\n\n selected: bool,\n", "file_path": "src/widgets/toggle_button_widget.rs", "rank": 69, "score": 30027.19523055628 }, { "content": " CONFIG_IMAGE_POSITION,\n\n};\n\nuse crate::render::Points;\n\n\n\nuse sdl2::render::Canvas;\n\nuse sdl2::video::Window;\n\n\n\nuse crate::render::widget_config::CompassPosition::Center;\n\nuse crate::widgets::image_widget::ImageWidget;\n\nuse crate::widgets::text_widget::{TextJustify, TextWidget};\n\nuse sdl2::pixels::Color;\n\nuse std::collections::HashMap;\n\n\n\n/// This is the callback type that is used when an `on_click` callback is triggered from this\n\n/// `Widget`.\n\npub type OnClickCallbackType = Option<Box<dyn FnMut(&mut ImageButtonWidget, &[WidgetContainer])>>;\n\n\n\n/// This is the storage object for the `ImageButtonWidget`. It stores the config, properties, callback registry.\n\npub struct ImageButtonWidget {\n\n config: WidgetConfig,\n", "file_path": "src/widgets/image_button_widget.rs", "rank": 70, "score": 30024.867239243868 }, { "content": "\n\n self.base_widget.set_color(CONFIG_COLOR_BASE, base_color);\n\n self.text_widget.set_color(CONFIG_COLOR_TEXT, text_color);\n\n self.text_widget.set_color(CONFIG_COLOR_BASE, base_color);\n\n self.get_config().set_invalidate(true);\n\n }\n\n\n\n /// Assigns the callback closure that will be used when the `Widget` toggles state.\n\n pub fn on_toggle<F>(&mut self, callback: F)\n\n where\n\n F: FnMut(&mut ToggleButtonWidget, &[WidgetContainer], bool) + 'static,\n\n {\n\n self.on_toggle = Some(Box::new(callback));\n\n }\n\n\n\n /// Internal function that triggers the `on_toggle` callback.\n\n fn call_toggle_callback(&mut self, widgets: &[WidgetContainer]) {\n\n if let Some(mut cb) = self.on_toggle.take() {\n\n cb(self, widgets, self.selected);\n\n self.on_toggle = Some(cb);\n", "file_path": "src/widgets/toggle_button_widget.rs", "rank": 71, "score": 30024.516167029844 }, { "content": " let mut base_widget = BaseWidget::new(x, y, w, h);\n\n let mut text_widget = TextWidget::new(\n\n String::from(\"assets/OpenSans-Regular.ttf\"),\n\n sdl2::ttf::FontStyle::NORMAL,\n\n font_size,\n\n TextJustify::Left,\n\n text.clone(),\n\n x + h as i32 + 6,\n\n y + 2,\n\n w - h - 10,\n\n h - 4,\n\n );\n\n let mut image_widget = ImageWidget::new(image_name, x + 2, y + 2, h - 4, h - 4, false);\n\n\n\n base_widget.set_color(CONFIG_COLOR_BASE, Color::RGB(255, 255, 255));\n\n text_widget.set_color(CONFIG_COLOR_TEXT, Color::RGB(0, 0, 0));\n\n image_widget.set_compass(CONFIG_IMAGE_POSITION, Center);\n\n\n\n Self {\n\n config: WidgetConfig::new(x, y, w, h),\n", "file_path": "src/widgets/image_button_widget.rs", "rank": 72, "score": 30023.10607511125 }, { "content": "\n\n fn draw_unhovered(&mut self) {\n\n self.base_widget\n\n .set_color(CONFIG_COLOR_BASE, Color::RGB(255, 255, 255));\n\n self.text_widget\n\n .set_color(CONFIG_COLOR_TEXT, Color::RGB(0, 0, 0));\n\n self.text_widget\n\n .set_color(CONFIG_COLOR_BASE, Color::RGB(255, 255, 255));\n\n self.get_config().set_invalidate(true);\n\n }\n\n\n\n /// Assigns the callback closure that will be used when a button click is triggered.\n\n pub fn on_click<F>(&mut self, callback: F)\n\n where\n\n F: FnMut(&mut ImageButtonWidget, &[WidgetContainer]) + 'static,\n\n {\n\n self.on_click = Some(Box::new(callback));\n\n }\n\n\n\n /// Internal function that triggers the `on_click` callback.\n", "file_path": "src/widgets/image_button_widget.rs", "rank": 73, "score": 30022.98946015199 }, { "content": " pub fn on_click<F>(&mut self, callback: F)\n\n where\n\n F: FnMut(&mut PushButtonWidget, &[WidgetContainer]) + 'static,\n\n {\n\n self.on_click = Some(Box::new(callback));\n\n }\n\n\n\n /// Internal function that triggers the `on_click` callback.\n\n fn call_click_callback(&mut self, widgets: &[WidgetContainer]) {\n\n if let Some(mut cb) = self.on_click.take() {\n\n cb(self, widgets);\n\n self.on_click = Some(cb);\n\n }\n\n }\n\n}\n\n\n\n/// This is the `Widget` implementation of the `PushButtonWidget`.\n\nimpl Widget for PushButtonWidget {\n\n fn draw(&mut self, c: &mut Canvas<Window>) {\n\n // Paint the base widget first. Forcing a draw() call here will ignore invalidation.\n", "file_path": "src/widgets/push_button_widget.rs", "rank": 74, "score": 30020.713817485877 }, { "content": "\n\n base_widget.set_color(CONFIG_COLOR_BASE, Color::RGB(255, 255, 255));\n\n base_widget.set_color(CONFIG_COLOR_BORDER, Color::RGB(0, 0, 0));\n\n base_widget.set_numeric(CONFIG_BORDER_WIDTH, 2);\n\n\n\n text_widget.set_color(CONFIG_COLOR_TEXT, Color::RGB(0, 0, 0));\n\n\n\n Self {\n\n config: WidgetConfig::new(x, y, w, h),\n\n system_properties: HashMap::new(),\n\n callback_registry: CallbackRegistry::new(),\n\n base_widget,\n\n text_widget,\n\n active: false,\n\n in_bounds: false,\n\n on_click: None,\n\n }\n\n }\n\n\n\n fn draw_hovered(&mut self) {\n", "file_path": "src/widgets/push_button_widget.rs", "rank": 75, "score": 30020.40171330193 }, { "content": " Color::RGB(255, 255, 255)\n\n } else {\n\n Color::RGB(0, 0, 0)\n\n };\n\n\n\n let mut config = WidgetConfig::new(x, y, w, h);\n\n\n\n base_widget.set_color(CONFIG_COLOR_BASE, base_color);\n\n base_widget.set_color(CONFIG_COLOR_BORDER, Color::RGB(0, 0, 0));\n\n base_widget.set_numeric(CONFIG_BORDER_WIDTH, 2);\n\n\n\n text_widget.set_color(CONFIG_COLOR_BASE, base_color);\n\n text_widget.set_color(CONFIG_COLOR_TEXT, text_color);\n\n\n\n config.set_toggle(CONFIG_SELECTED_STATE, selected);\n\n\n\n Self {\n\n config,\n\n system_properties: HashMap::new(),\n\n callback_registry: CallbackRegistry::new(),\n", "file_path": "src/widgets/toggle_button_widget.rs", "rank": 76, "score": 30020.138253505575 }, { "content": "// Pushrod Widget Library\n\n// Push Button Widget\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::render::callbacks::CallbackRegistry;\n\nuse crate::render::widget::*;\n\nuse crate::render::widget_cache::WidgetContainer;\n\nuse crate::render::widget_config::{\n\n WidgetConfig, CONFIG_BORDER_WIDTH, CONFIG_COLOR_BASE, CONFIG_COLOR_BORDER, CONFIG_COLOR_TEXT,\n", "file_path": "src/widgets/push_button_widget.rs", "rank": 77, "score": 30019.027998419228 }, { "content": "// Pushrod Widget Library\n\n// Image Button Widget\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::render::callbacks::CallbackRegistry;\n\nuse crate::render::widget::*;\n\nuse crate::render::widget_cache::WidgetContainer;\n\nuse crate::render::widget_config::{\n\n WidgetConfig, CONFIG_BORDER_WIDTH, CONFIG_COLOR_BASE, CONFIG_COLOR_BORDER, CONFIG_COLOR_TEXT,\n", "file_path": "src/widgets/image_button_widget.rs", "rank": 78, "score": 30019.027998419228 }, { "content": " ) -> Self {\n\n let mut base_widget = BaseWidget::new(x, y, w, h);\n\n let mut text_widget = TextWidget::new(\n\n String::from(\"assets/OpenSans-Regular.ttf\"),\n\n sdl2::ttf::FontStyle::NORMAL,\n\n font_size,\n\n TextJustify::Center,\n\n text.clone(),\n\n x + 2,\n\n y + 2,\n\n w - 4,\n\n h - 4,\n\n );\n\n\n\n let base_color = if selected {\n\n Color::RGB(0, 0, 0)\n\n } else {\n\n Color::RGB(255, 255, 255)\n\n };\n\n let text_color = if selected {\n", "file_path": "src/widgets/toggle_button_widget.rs", "rank": 79, "score": 30018.464368161323 }, { "content": " self.base_widget\n\n .set_color(CONFIG_COLOR_BASE, Color::RGB(0, 0, 0));\n\n self.text_widget\n\n .set_color(CONFIG_COLOR_TEXT, Color::RGB(255, 255, 255));\n\n self.text_widget\n\n .set_color(CONFIG_COLOR_BASE, Color::RGB(0, 0, 0));\n\n self.get_config().set_invalidate(true);\n\n }\n\n\n\n fn draw_unhovered(&mut self) {\n\n self.base_widget\n\n .set_color(CONFIG_COLOR_BASE, Color::RGB(255, 255, 255));\n\n self.text_widget\n\n .set_color(CONFIG_COLOR_TEXT, Color::RGB(0, 0, 0));\n\n self.text_widget\n\n .set_color(CONFIG_COLOR_BASE, Color::RGB(255, 255, 255));\n\n self.get_config().set_invalidate(true);\n\n }\n\n\n\n /// Assigns the callback closure that will be used when a button click is triggered.\n", "file_path": "src/widgets/push_button_widget.rs", "rank": 80, "score": 30017.624749021157 }, { "content": " fn call_click_callback(&mut self, widgets: &[WidgetContainer]) {\n\n if let Some(mut cb) = self.on_click.take() {\n\n cb(self, widgets);\n\n self.on_click = Some(cb);\n\n }\n\n }\n\n}\n\n\n\n/// This is the `Widget` implementation of the `ImageButtonWidget`.\n\nimpl Widget for ImageButtonWidget {\n\n fn draw(&mut self, c: &mut Canvas<Window>) {\n\n // Paint the base widget first. Forcing a draw() call here will ignore invalidation.\n\n // Invalidation is controlled by the top level widget (this box).\n\n self.base_widget.draw(c);\n\n self.text_widget.draw(c);\n\n self.image_widget.draw(c);\n\n }\n\n\n\n /// When a mouse enters the bounds of the `Widget`, this function is triggered. This function\n\n /// implementation is **optional**.\n", "file_path": "src/widgets/image_button_widget.rs", "rank": 81, "score": 30016.111329485775 }, { "content": " }\n\n }\n\n}\n\n\n\n/// This is the `Widget` implementation of the `ToggleButtonWidget`.\n\nimpl Widget for ToggleButtonWidget {\n\n /// Draws the `ToggleButtonWidget` contents.\n\n fn draw(&mut self, c: &mut Canvas<Window>) {\n\n // Paint the base widget first. Forcing a draw() call here will ignore invalidation.\n\n // Invalidation is controlled by the top level widget (this box).\n\n self.base_widget.draw(c);\n\n self.text_widget.draw(c);\n\n }\n\n\n\n /// When a mouse enters the bounds of the `Widget`, this function is triggered.\n\n fn mouse_entered(&mut self, _widgets: &[WidgetContainer]) {\n\n if self.active {\n\n self.draw_hovered();\n\n }\n\n\n", "file_path": "src/widgets/toggle_button_widget.rs", "rank": 82, "score": 30016.046690964406 }, { "content": " system_properties: HashMap::new(),\n\n callback_registry: CallbackRegistry::new(),\n\n base_widget,\n\n text_widget,\n\n image_widget,\n\n active: false,\n\n in_bounds: false,\n\n on_click: None,\n\n }\n\n }\n\n\n\n fn draw_hovered(&mut self) {\n\n self.base_widget\n\n .set_color(CONFIG_COLOR_BASE, Color::RGB(0, 0, 0));\n\n self.text_widget\n\n .set_color(CONFIG_COLOR_TEXT, Color::RGB(255, 255, 255));\n\n self.text_widget\n\n .set_color(CONFIG_COLOR_BASE, Color::RGB(0, 0, 0));\n\n self.get_config().set_invalidate(true);\n\n }\n", "file_path": "src/widgets/image_button_widget.rs", "rank": 83, "score": 30015.7185710381 }, { "content": "// Pushrod Widget Library\n\n// Toggle Button Widget\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::render::callbacks::CallbackRegistry;\n\nuse crate::render::widget::*;\n\nuse crate::render::widget_cache::WidgetContainer;\n\nuse crate::render::widget_config::*;\n\nuse crate::render::Points;\n", "file_path": "src/widgets/toggle_button_widget.rs", "rank": 84, "score": 30015.65284483865 }, { "content": " };\n\n\n\n self.base_widget.set_color(CONFIG_COLOR_BASE, base_color);\n\n self.text_widget.set_color(CONFIG_COLOR_TEXT, text_color);\n\n self.text_widget.set_color(CONFIG_COLOR_BASE, base_color);\n\n self.get_config().set_invalidate(true);\n\n }\n\n\n\n /// Draws the state when the mouse leaves the scope of the `Widget`.\n\n fn draw_unhovered(&mut self) {\n\n let base_color = if self.selected {\n\n Color::RGB(0, 0, 0)\n\n } else {\n\n Color::RGB(255, 255, 255)\n\n };\n\n let text_color = if self.selected {\n\n Color::RGB(255, 255, 255)\n\n } else {\n\n Color::RGB(0, 0, 0)\n\n };\n", "file_path": "src/widgets/toggle_button_widget.rs", "rank": 85, "score": 30015.59387038631 }, { "content": " _state: bool,\n\n ) {\n\n if _button == 1 {\n\n if _state {\n\n self.draw_hovered();\n\n self.active = true;\n\n } else {\n\n self.active = false;\n\n\n\n if self.in_bounds {\n\n self.selected = !self.selected;\n\n self.set_toggle(CONFIG_SELECTED_STATE, self.selected);\n\n self.call_toggle_callback(_widgets);\n\n }\n\n }\n\n }\n\n\n\n self.button_clicked_callback(_widgets, _button, _clicks, _state);\n\n }\n\n\n\n default_widget_properties!();\n\n default_widget_callbacks!();\n\n}\n", "file_path": "src/widgets/toggle_button_widget.rs", "rank": 86, "score": 30015.277462938193 }, { "content": " /// When a mouse button is clicked within (or outside of) the bounds of the `Widget`, this\n\n /// function is called. If a mouse button is clicked, and the mouse leaves the bounds of the\n\n /// `Widget`, the mouse release event will still be triggered for the last `Widget` which\n\n /// received the mouse down state. This prevents `Widget`s from becoming confused. This\n\n /// behavior is tracked by the main loop, not by the `Widget` code. Therefore, when a mouse\n\n /// button is released outside of the bounds of _this_ `Widget`, you must adjust your state\n\n /// accordingly, if you pay attention to the `button_clicked` function. This function\n\n /// implementation is **optional**.\n\n fn button_clicked(\n\n &mut self,\n\n _widgets: &[WidgetContainer],\n\n _button: u8,\n\n _clicks: u8,\n\n _state: bool,\n\n ) {\n\n if _button == 1 {\n\n if _state {\n\n self.draw_hovered();\n\n self.active = true;\n\n } else {\n", "file_path": "src/widgets/image_button_widget.rs", "rank": 87, "score": 30014.1322951735 }, { "content": " fn mouse_entered(&mut self, _widgets: &[WidgetContainer]) {\n\n if self.active {\n\n self.draw_hovered();\n\n }\n\n\n\n self.in_bounds = true;\n\n self.mouse_entered_callback(_widgets);\n\n }\n\n\n\n /// When a mouse exits the bounds of the `Widget`, this function is triggered. This function\n\n /// implementation is **optional**.\n\n fn mouse_exited(&mut self, _widgets: &[WidgetContainer]) {\n\n if self.active {\n\n self.draw_unhovered();\n\n }\n\n\n\n self.in_bounds = false;\n\n self.mouse_exited_callback(_widgets);\n\n }\n\n\n", "file_path": "src/widgets/image_button_widget.rs", "rank": 88, "score": 30013.529175302123 }, { "content": " self.in_bounds = true;\n\n self.mouse_entered_callback(_widgets);\n\n }\n\n\n\n /// When a mouse exits the bounds of the `Widget`, this function is triggered.\n\n fn mouse_exited(&mut self, _widgets: &[WidgetContainer]) {\n\n if self.active {\n\n self.draw_unhovered();\n\n }\n\n\n\n self.in_bounds = false;\n\n self.mouse_exited_callback(_widgets);\n\n }\n\n\n\n /// Overrides the `button_clicked` callback to handle toggling.\n\n fn button_clicked(\n\n &mut self,\n\n _widgets: &[WidgetContainer],\n\n _button: u8,\n\n _clicks: u8,\n", "file_path": "src/widgets/toggle_button_widget.rs", "rank": 89, "score": 30013.44139683877 }, { "content": " // Invalidation is controlled by the top level widget (this box).\n\n self.base_widget.draw(c);\n\n self.text_widget.draw(c);\n\n }\n\n\n\n /// When a mouse enters the bounds of the `Widget`, this function is triggered. This function\n\n /// implementation is **optional**.\n\n fn mouse_entered(&mut self, _widgets: &[WidgetContainer]) {\n\n if self.active {\n\n self.draw_hovered();\n\n }\n\n\n\n self.in_bounds = true;\n\n self.mouse_entered_callback(_widgets);\n\n }\n\n\n\n /// When a mouse exits the bounds of the `Widget`, this function is triggered. This function\n\n /// implementation is **optional**.\n\n fn mouse_exited(&mut self, _widgets: &[WidgetContainer]) {\n\n if self.active {\n", "file_path": "src/widgets/push_button_widget.rs", "rank": 90, "score": 30013.36564473527 }, { "content": " _state: bool,\n\n ) {\n\n if _button == 1 {\n\n if _state {\n\n self.draw_hovered();\n\n self.active = true;\n\n } else {\n\n let had_bounds = self.active;\n\n\n\n self.draw_unhovered();\n\n self.active = false;\n\n\n\n if self.in_bounds && had_bounds {\n\n // Callback here\n\n eprintln!(\"Call callback here: clicks={}\", _clicks);\n\n self.call_click_callback(_widgets);\n\n }\n\n }\n\n }\n\n\n\n self.button_clicked_callback(_widgets, _button, _clicks, _state);\n\n }\n\n\n\n default_widget_properties!();\n\n default_widget_callbacks!();\n\n}\n", "file_path": "src/widgets/push_button_widget.rs", "rank": 91, "score": 30012.825636246143 }, { "content": " base_widget,\n\n text_widget,\n\n active: false,\n\n selected,\n\n in_bounds: false,\n\n on_toggle: None,\n\n }\n\n }\n\n\n\n /// Draws the state when the mouse is over the top of the `Widget`.\n\n fn draw_hovered(&mut self) {\n\n let base_color = if self.selected {\n\n Color::RGB(255, 255, 255)\n\n } else {\n\n Color::RGB(0, 0, 0)\n\n };\n\n let text_color = if self.selected {\n\n Color::RGB(0, 0, 0)\n\n } else {\n\n Color::RGB(255, 255, 255)\n", "file_path": "src/widgets/toggle_button_widget.rs", "rank": 92, "score": 30012.610326022233 }, { "content": " self.draw_unhovered();\n\n }\n\n\n\n self.in_bounds = false;\n\n self.mouse_exited_callback(_widgets);\n\n }\n\n\n\n /// When a mouse button is clicked within (or outside of) the bounds of the `Widget`, this\n\n /// function is called. If a mouse button is clicked, and the mouse leaves the bounds of the\n\n /// `Widget`, the mouse release event will still be triggered for the last `Widget` which\n\n /// received the mouse down state. This prevents `Widget`s from becoming confused. This\n\n /// behavior is tracked by the main loop, not by the `Widget` code. Therefore, when a mouse\n\n /// button is released outside of the bounds of _this_ `Widget`, you must adjust your state\n\n /// accordingly, if you pay attention to the `button_clicked` function. This function\n\n /// implementation is **optional**.\n\n fn button_clicked(\n\n &mut self,\n\n _widgets: &[WidgetContainer],\n\n _button: u8,\n\n _clicks: u8,\n", "file_path": "src/widgets/push_button_widget.rs", "rank": 93, "score": 30012.155258301762 }, { "content": " let had_bounds = self.active;\n\n\n\n self.draw_unhovered();\n\n self.active = false;\n\n\n\n if self.in_bounds && had_bounds {\n\n // Callback here\n\n eprintln!(\"Call callback here: clicks={}\", _clicks);\n\n self.call_click_callback(_widgets);\n\n }\n\n }\n\n }\n\n\n\n self.button_clicked_callback(_widgets, _button, _clicks, _state);\n\n }\n\n\n\n default_widget_properties!();\n\n default_widget_callbacks!();\n\n}\n", "file_path": "src/widgets/image_button_widget.rs", "rank": 94, "score": 30010.262547557828 }, { "content": "/// determines the color of the border. The width of the border is controlled by the\n\n/// `get_config().border_width` property.\n\npub struct BaseWidget {\n\n config: WidgetConfig,\n\n system_properties: HashMap<i32, String>,\n\n callback_registry: CallbackRegistry,\n\n}\n\n\n\n/// Base top-level implementation of the `BaseWidget`, which other classes can extend.\n\nimpl BaseWidget {\n\n /// Constructs a new base widget, given the points of origin and size.\n\n pub fn new(x: i32, y: i32, w: u32, h: u32) -> Self {\n\n Self {\n\n config: WidgetConfig::new(x, y, w, h),\n\n system_properties: HashMap::new(),\n\n callback_registry: CallbackRegistry::new(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/render/widget.rs", "rank": 95, "score": 25081.96050298144 }, { "content": "/// Implementation for drawing a `BaseWidget`, with the `Widget` trait objects applied.\n\nimpl Widget for BaseWidget {\n\n fn draw(&mut self, mut _canvas: &mut Canvas<Window>) {\n\n let base_color = self.get_config().get_color(CONFIG_COLOR_BASE);\n\n let border_color = self.get_config().get_color(CONFIG_COLOR_BORDER);\n\n\n\n _canvas.set_draw_color(base_color);\n\n\n\n _canvas.fill_rect(self.get_drawing_area()).unwrap();\n\n\n\n if self.get_config().get_numeric(CONFIG_BORDER_WIDTH) > 0 && base_color != border_color {\n\n _canvas.set_draw_color(border_color);\n\n\n\n for border in 0..self.get_config().get_numeric(CONFIG_BORDER_WIDTH) {\n\n _canvas\n\n .draw_rect(Rect::new(\n\n self.config.to_x(border),\n\n self.config.to_y(border),\n\n self.get_config().get_size(CONFIG_SIZE)[0] - (border as u32 * 2),\n\n self.get_config().get_size(CONFIG_SIZE)[1] - (border as u32 * 2),\n", "file_path": "src/render/widget.rs", "rank": 96, "score": 25074.19522406358 }, { "content": " }\n\n\n\n /// Sets the origin of the `Widget`, adjusting the X and Y coordinates. Automatically sets the\n\n /// `invalidate` flag to `true` when adjusted, but only if the new origin is not the same as\n\n /// the previous origin.\n\n fn set_origin(&mut self, _origin: Points) {\n\n let old_origin = self.get_config().get_point(CONFIG_ORIGIN);\n\n\n\n if _origin[0] != old_origin[0] || _origin[1] != old_origin[1] {\n\n self.get_config()\n\n .set_point(CONFIG_ORIGIN, _origin[0], _origin[1]);\n\n self.get_config().set_invalidate(true);\n\n }\n\n }\n\n\n\n /// Sets the size of the `Widget`, adjusting the width and height. Automatically\n\n /// sets the `invalidate` flag to `true` when adjusted, but only if the new size is not the\n\n /// same as the previous size.\n\n fn set_size(&mut self, _size: Vec<u32>) {\n\n let old_size = self.get_config().get_size(CONFIG_SIZE);\n", "file_path": "src/render/widget.rs", "rank": 97, "score": 25072.527708117035 }, { "content": " }\n\n\n\n /// Retrieves a numeric value for a configuration key. Returns 0 if not set.\n\n fn get_numeric(&mut self, k: u8) -> i32 {\n\n self.get_config().get_numeric(k)\n\n }\n\n\n\n /// Retrieves text for a configuration key. Returns a blank string if not set.\n\n fn get_text(&mut self, k: u8) -> String {\n\n self.get_config().get_text(k)\n\n }\n\n\n\n /// Retrieves a boolean toggle for a configuration key. Returns `false` if not set.\n\n fn get_toggle(&mut self, k: u8) -> bool {\n\n self.get_config().get_toggle(k)\n\n }\n\n\n\n /// Retrieves a `CompassPosition` toggle for a configuration key. Returns `CompassPosition::W` if not set.\n\n fn get_compass(&mut self, k: u8) -> CompassPosition {\n\n self.get_config().get_compass(k)\n", "file_path": "src/render/widget.rs", "rank": 98, "score": 25072.403102244007 }, { "content": " _clicks: u8,\n\n _state: bool,\n\n ) {\n\n }\n\n\n\n /// This callback is called when a setter is used to configure a value. It is _not_ called when a\n\n /// call to `get_config()` using the setter is called, so it is best to use the top-level setters\n\n /// and getters for the configuration values - at least, until the `get_config()` call can be made\n\n /// private.\n\n fn on_config_changed(&mut self, _k: u8, _v: Config) {}\n\n\n\n /// Sets a point for a configuration key.\n\n fn set_point(&mut self, config: u8, x: i32, y: i32) {\n\n self.get_config().set_point(config, x, y);\n\n self.on_config_changed(config, Config::Points(vec![x, y]));\n\n }\n\n\n\n /// Sets a color for a configuration key.\n\n fn set_color(&mut self, config: u8, color: Color) {\n\n self.get_config().set_color(config, color);\n", "file_path": "src/render/widget.rs", "rank": 99, "score": 25071.6612325653 } ]
Rust
src/nfa.rs
MarioJim/finite-automata-tui
c22e9c0de28a199efbdfcabda7e8184fecaf1a20
use multimap::MultiMap; use std::collections::{HashMap, HashSet}; use std::convert::TryFrom; use std::fmt; #[derive(Debug)] pub struct NFA { pub alphabet: HashSet<String>, initial_state: String, states: HashMap<String, NFAState>, } #[derive(Debug)] struct NFAState { is_final: bool, transitions: MultiMap<String, String>, } impl NFA { fn transition(&self, from_state: &str, symbol: &str) -> Option<&Vec<String>> { self.states .get(from_state) .unwrap() .transitions .get_vec(symbol) } pub fn resolve_transitions(&self, symbols: Vec<&str>) -> Result<Vec<String>, String> { let mut current_states: HashSet<&str> = [self.initial_state.as_ref()].iter().cloned().collect(); for symbol in symbols { if !self.alphabet.contains(symbol) { return Err(format!("Symbol {} isn't the in alphabet", symbol)); } let mut next_states: HashSet<&str> = HashSet::with_capacity(self.states.len()); for current_state in current_states { if let Some(v) = self.transition(current_state, &symbol) { for state_to_transition in v { next_states.insert(state_to_transition.as_str()); } } } current_states = next_states; } Ok(current_states.iter().map(|&s| s.to_string()).collect()) } pub fn is_any_state_final(&self, states: Vec<String>) -> bool { for state in states { if self.states.get(state.as_str()).unwrap().is_final { return true; } } false } } impl TryFrom<String> for NFA { type Error = &'static str; fn try_from(file: String) -> Result<Self, Self::Error> { let mut lines = file.lines(); let mut states: HashMap<String, NFAState> = HashMap::new(); for state_name in lines.next().unwrap().split(',') { states.insert( state_name.to_string(), NFAState { is_final: false, transitions: MultiMap::new(), }, ); } let mut alphabet: HashSet<String> = HashSet::new(); match lines.next() { Some(s) => { let symbols: Vec<&str> = s.split(',').collect(); for symbol in symbols { alphabet.insert(symbol.to_string()); } } None => return Err("Couldn't find the alphabet definition"), }; let initial_state = match lines.next() { Some(s) => match states.contains_key(s) { true => s.to_string(), false => return Err("Couldn't find initial state"), }, None => return Err("Couldn't find the initial state definition"), }; let final_states: Vec<&str> = match lines.next() { Some(s) => s.split(',').collect(), None => return Err("Couldn't find the final states definition"), }; for final_state in final_states { match states.get_mut(final_state) { Some(state) => state.is_final = true, None => return Err("Couldn't find final state"), } } for line in lines { let mut line_iter = line.split("=>"); let from_symbol: Vec<&str> = match line_iter.next() { Some(s) => s.split(',').collect(), None => return Err("Transition line is empty"), }; let from_state = match from_symbol.get(0) { Some(&s) => match states.contains_key(s) { true => s, false => { return Err("Couldn't find a transition's starting state in defined states") } }, None => return Err("Couldn't find starting state in a transition"), }; let symbol = match from_symbol.get(1) { Some(&s) => match alphabet.contains(s) { true => s, false => return Err("Symbol doesn't belong to alphabet definition"), }, None => return Err("Couldn't find symbol in transition definition"), }; let next_states: Vec<&str> = match line_iter.next() { Some(s) => s.split(',').collect(), None => return Err("Transition couldn't be split correctly"), }; for next_state in next_states { if states.contains_key(next_state) { states .get_mut(from_state) .unwrap() .transitions .insert(symbol.to_string(), next_state.to_string()); } else { return Err("Couldn't find ending state in transition in defined states"); } } } Ok(NFA { states, initial_state, alphabet, }) } } impl fmt::Display for NFA { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Automata {{\n")?; write!(f, " Language: {:?},\n", self.alphabet)?; write!(f, " States: [\n")?; for (state_name, state) in &self.states { write!(f, " {}: {:?}\n", state_name, state)?; } write!(f, " ],\n")?; write!(f, " Initial State: {},\n", self.initial_state)?; write!(f, "}}") } }
use multimap::MultiMap; use std::collections::{HashMap, HashSet}; use std::convert::TryFrom; use std::fmt; #[derive(Debug)] pub struct NFA { pub alphabet: HashSet<String>, initial_state: String, states: HashMap<String, NFAState>, } #[derive(Debug)] struct NFAState { is_final: bool, transitions: MultiMap<String, String>, } impl NFA { fn transition(&self, from_state: &str, symbol: &str) -> Option<&Vec<String>> { self.states .get(from_state) .unwrap() .transitions .get_vec(symbol) } pub fn resolve_transitions(&self, symbols: Vec<&str>) -> Result<Vec<String>, String> { let mut current_states: HashSet<&str> = [self.initial_state.as_ref()].iter().cloned().collect(); for symbol in symbols { if !self.alphabet.contains(symbol) { return Err(format!("Symbol {} isn't the in alphabet", symbol)); } let mut next_states: HashSet<&str> = HashSet::with_capacity(self.states.len()); for current_state in current_states { if let Some(v) = self.transition(current_state, &symbol) { for state_to_transition in v { next_states.insert(state_to_transition.as_str()); } } } current_states = next_states; } Ok(current_states.iter().map(|&s| s.to_string()).collect()) } pub fn is_any_state_final(&self, states: Vec<String>) -> bool { for state in states { if self.states.get(state.as_str()).unwrap().is_final { return true; } } false } } impl TryFrom<String> for NFA { type Error = &'static str; fn try_from(file: String) -> Result<Self, Self::Error> { let mut lines = file.lines(); let mut states: HashMap<String, NFAState> = HashMap::new(); for state_name in lines.next().unwrap().split(',') { states.insert( state_name.to_string(), NFAState { is_final: false, transitions: MultiMap::new(), }, ); } let mut alphabet: HashSet<String> = HashSet::new(); match lines.next() { Some(s) => { let symbols: Vec<&str> = s.split(',').collect(); for symbol in symbols { alphabet.insert(symbol.to_string()); } } None => return Err("Couldn't find the alphabet definition"), }; let initial_state = match lines.next() { Some(s) => match states.contains_key(s) { true => s.to_string(), false => return Err("Couldn't find initial state"), }, None => return Err("Couldn't find the initial state definition"), }; let final_states: Vec<&str> = match lines.next() { Some(s) => s.split(',').collect(), None => return Err("Couldn't find the final states definition"), }; for final_state in final_states { match states.get_mut(final_state) { Some(state) => state.is_final = true, None => return Err("Couldn't find final state"), } } for line in lines { let mut line_iter = line.split("=>"); let from_symbol: Vec<&str> =
; let from_state = match from_symbol.get(0) { Some(&s) => match states.contains_key(s) { true => s, false => { return Err("Couldn't find a transition's starting state in defined states") } }, None => return Err("Couldn't find starting state in a transition"), }; let symbol = match from_symbol.get(1) { Some(&s) => match alphabet.contains(s) { true => s, false => return Err("Symbol doesn't belong to alphabet definition"), }, None => return Err("Couldn't find symbol in transition definition"), }; let next_states: Vec<&str> = match line_iter.next() { Some(s) => s.split(',').collect(), None => return Err("Transition couldn't be split correctly"), }; for next_state in next_states { if states.contains_key(next_state) { states .get_mut(from_state) .unwrap() .transitions .insert(symbol.to_string(), next_state.to_string()); } else { return Err("Couldn't find ending state in transition in defined states"); } } } Ok(NFA { states, initial_state, alphabet, }) } } impl fmt::Display for NFA { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Automata {{\n")?; write!(f, " Language: {:?},\n", self.alphabet)?; write!(f, " States: [\n")?; for (state_name, state) in &self.states { write!(f, " {}: {:?}\n", state_name, state)?; } write!(f, " ],\n")?; write!(f, " Initial State: {},\n", self.initial_state)?; write!(f, "}}") } }
match line_iter.next() { Some(s) => s.split(',').collect(), None => return Err("Transition line is empty"), }
if_condition
[ { "content": "pub fn show_tui(automata_struct: nfa::NFA) {\n\n let instructions = \"Write the symbols separated by a space, press Esc or Ctrl+C to quit\";\n\n let mut input_f = input_field::InputField::new();\n\n let term = Term::with_height(TermHeight::Fixed(4)).unwrap();\n\n while let Ok(ev) = term.poll_event() {\n\n let _ = term.clear();\n\n // Match event\n\n match ev {\n\n Event::Key(Key::ESC) | Event::Key(Key::Ctrl('c')) => break,\n\n Event::Key(k) => input_f.receive_event(k),\n\n _ => {}\n\n }\n\n // Print instructions\n\n let _ = term.print(0, 0, instructions);\n\n // Print current states\n\n let final_states = automata_struct.resolve_transitions(input_f.get_split_value());\n\n let is_accepted = match final_states {\n\n Ok(v) => {\n\n let _ = term.print(1, 0, format!(\"{:?}\", v).as_str());\n\n automata_struct.is_any_state_final(v)\n", "file_path": "src/tui/mod.rs", "rank": 0, "score": 64911.53208385265 }, { "content": "#[derive(Debug)]\n\nstruct DFAState {\n\n is_final: bool,\n\n transitions: HashMap<String, String>,\n\n}\n\n\n\nimpl DFA {\n\n #[allow(dead_code)]\n\n fn transition(&self, from_state: &str, symbol: &str) -> Option<&String> {\n\n self.states.get(from_state).unwrap().transitions.get(symbol)\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn resolve_transitions(&self, symbols: Vec<&str>) -> Result<Option<String>, String> {\n\n let mut current_state: String = self.initial_state.clone();\n\n for symbol in symbols {\n\n // Check that symbol is in alphabet\n\n if !self.alphabet.contains(symbol) {\n\n return Err(format!(\"Symbol {} isn't the in alphabet\", symbol));\n\n }\n\n // Compute next state\n", "file_path": "src/dfa.rs", "rank": 2, "score": 38101.547351497255 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let file =\n\n fs::read_to_string(filename).expect(format!(\"Couldn't read file {}\", filename).as_ref());\n\n let automata_struct = nfa::NFA::try_from(file).unwrap();\n\n println!(\"{}\", automata_struct);\n\n\n\n tui::show_tui(automata_struct);\n\n}\n", "file_path": "src/main.rs", "rank": 3, "score": 23991.174853140576 }, { "content": " let next_state = self.transition(current_state.as_str(), symbol);\n\n let next_state = match next_state {\n\n Some(s) => s.clone(),\n\n None => return Ok(None),\n\n };\n\n current_state = next_state;\n\n }\n\n Ok(Some(current_state))\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn is_this_state_final(&self, state: &str) -> bool {\n\n self.states.get(state).unwrap().is_final\n\n }\n\n}\n\n\n\nimpl From<nfa::NFA> for DFA {\n\n fn from(original_automata: nfa::NFA) -> Self {\n\n #[allow(unused_variables)]\n\n let alphabet = original_automata.alphabet;\n", "file_path": "src/dfa.rs", "rank": 12, "score": 14.46667028904843 }, { "content": "use std::collections::{HashMap, HashSet};\n\nuse std::fmt;\n\n\n\nuse crate::nfa;\n\n\n\n#[derive(Debug)]\n\npub struct DFA {\n\n alphabet: HashSet<String>,\n\n initial_state: String,\n\n states: HashMap<String, DFAState>,\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/dfa.rs", "rank": 13, "score": 9.374378186357365 }, { "content": "\n\n unimplemented!()\n\n // TODO: Implement the conversion from NFA to DFA\n\n }\n\n}\n\n\n\nimpl fmt::Display for DFA {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Automata {{\\n\")?;\n\n write!(f, \" Language: {:?},\\n\", self.alphabet)?;\n\n write!(f, \" States: [\\n\")?;\n\n for (state_name, state) in &self.states {\n\n write!(f, \" {}: {:?}\\n\", state_name, state)?;\n\n }\n\n write!(f, \" ],\\n\")?;\n\n write!(f, \" Initial State: {},\\n\", self.initial_state)?;\n\n write!(f, \"}}\")\n\n }\n\n}\n", "file_path": "src/dfa.rs", "rank": 14, "score": 7.2209451967798834 }, { "content": "use tuikit::key::Key;\n\nuse tuikit::term::Term;\n\n\n\n#[derive(Debug)]\n\npub struct InputField(String);\n\n\n\nimpl InputField {\n\n pub fn new() -> Self {\n\n InputField(String::new())\n\n }\n\n\n\n pub fn receive_event(&mut self, key: Key) {\n\n match key {\n\n Key::Char(ch) => {\n\n if (ch >= 'a' && ch <= 'z') || ch == ' ' {\n\n self.0.push(ch);\n\n }\n\n }\n\n Key::Backspace => {\n\n self.0.pop();\n", "file_path": "src/tui/input_field.rs", "rank": 15, "score": 7.180575723510655 }, { "content": "use tuikit::prelude::*;\n\n\n\nuse crate::nfa;\n\n\n\nmod input_field;\n\n\n", "file_path": "src/tui/mod.rs", "rank": 16, "score": 3.6929001434887065 }, { "content": " }\n\n _ => (),\n\n };\n\n }\n\n\n\n pub fn print_in_term(&self, term: &Term, row: usize) {\n\n let _ = term.print(row, 0, self.0.as_str());\n\n let _ = term.set_cursor(row, self.0.len());\n\n }\n\n\n\n pub fn get_value(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n\n\n pub fn get_split_value(&self) -> Vec<&str> {\n\n self.get_value().split_whitespace().collect()\n\n }\n\n}\n", "file_path": "src/tui/input_field.rs", "rank": 17, "score": 3.6773424253800036 }, { "content": "use std::convert::TryFrom;\n\nuse std::env;\n\nuse std::fs;\n\n\n\nmod dfa;\n\nmod nfa;\n\nmod tui;\n\n\n", "file_path": "src/main.rs", "rank": 18, "score": 3.499538643161438 }, { "content": " }\n\n Err(e) => {\n\n let attr = Attr {\n\n fg: Color::RED,\n\n ..Attr::default()\n\n };\n\n let _ = term.print_with_attr(1, 0, e.as_str(), attr);\n\n false\n\n }\n\n };\n\n // Print if string is accepted in the language\n\n let is_accepted = format!(\n\n \"That string {} accepted by the automata\",\n\n if is_accepted { \"is\" } else { \"isn't\" }\n\n );\n\n let _ = term.print(2, 0, is_accepted.as_str());\n\n // Print input\n\n input_f.print_in_term(&term, 3);\n\n // Display\n\n let _ = term.present();\n\n }\n\n}\n", "file_path": "src/tui/mod.rs", "rank": 19, "score": 2.748246015800839 } ]
Rust
src/middle/builder.rs
reitermarkus/libffi-rs
693182bf1fb16da2c00ace0cc13920a98845c9a3
use std::any::Any; use super::types::Type; #[derive(Clone, Debug)] pub struct Builder { args: Vec<Type>, res: Type, abi: super::FfiAbi, } impl Default for Builder { fn default() -> Self { Builder::new() } } impl Builder { pub fn new() -> Self { Builder { args: vec![], res: Type::void(), abi: super::ffi_abi_FFI_DEFAULT_ABI, } } pub fn arg(mut self, type_: Type) -> Self { self.args.push(type_); self } pub fn args<I>(mut self, types: I) -> Self where I: IntoIterator<Item=Type> { self.args.extend(types.into_iter()); self } pub fn res(mut self, type_: Type) -> Self { self.res = type_; self } pub fn abi(mut self, abi: super::FfiAbi) -> Self { self.abi = abi; self } pub fn into_cif(self) -> super::Cif { let mut result = super::Cif::new(self.args, self.res); result.set_abi(self.abi); result } pub fn into_closure<U, R>( self, callback: super::Callback<U, R>, userdata: &U) -> super::Closure { super::Closure::new(self.into_cif(), callback, userdata) } pub fn into_closure_mut<U, R>( self, callback: super::CallbackMut<U, R>, userdata: &mut U) -> super::Closure { super::Closure::new_mut(self.into_cif(), callback, userdata) } pub fn into_closure_once<U: Any, R>( self, callback: super::CallbackOnce<U, R>, userdata: U) -> super::ClosureOnce { super::ClosureOnce::new(self.into_cif(), callback, userdata) } }
use std::any::Any; use super::types::Type; #[derive(Clone, Debug)] pub struct Builder { args: Vec<Type>, res: Type, abi: super::FfiAbi, } impl Default for Builder { fn default() -> Self { Builder::new() } } impl Builder { pub fn new() -> Self { Builder { args: vec![], res: Type::void(), abi: super::ffi_abi_FFI_DEFAULT_ABI, } } pub fn arg(mut self, type_: Type) -> Self { self.args.push(type_); self }
pub fn res(mut self, type_: Type) -> Self { self.res = type_; self } pub fn abi(mut self, abi: super::FfiAbi) -> Self { self.abi = abi; self } pub fn into_cif(self) -> super::Cif { let mut result = super::Cif::new(self.args, self.res); result.set_abi(self.abi); result } pub fn into_closure<U, R>( self, callback: super::Callback<U, R>, userdata: &U) -> super::Closure { super::Closure::new(self.into_cif(), callback, userdata) } pub fn into_closure_mut<U, R>( self, callback: super::CallbackMut<U, R>, userdata: &mut U) -> super::Closure { super::Closure::new_mut(self.into_cif(), callback, userdata) } pub fn into_closure_once<U: Any, R>( self, callback: super::CallbackOnce<U, R>, userdata: U) -> super::ClosureOnce { super::ClosureOnce::new(self.into_cif(), callback, userdata) } }
pub fn args<I>(mut self, types: I) -> Self where I: IntoIterator<Item=Type> { self.args.extend(types.into_iter()); self }
function_block-full_function
[ { "content": "/// Constructs an [`Arg`](struct.Arg.html) for passing to\n\n/// [`call`](fn.call.html).\n\npub fn arg<T: super::CType>(arg: &T) -> Arg {\n\n Arg::new(arg)\n\n}\n\n\n\n/// Performs a dynamic call to a C function.\n\n///\n\n/// To reduce boilerplate, see [`ffi_call!`](../../macro.ffi_call!.html).\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// extern \"C\" fn hypot(x: f32, y: f32) -> f32 {\n\n/// (x * x + y * y).sqrt()\n\n/// }\n\n///\n\n/// use libffi::high::call::*;\n\n///\n\n/// let result = unsafe {\n\n/// call::<f32>(CodePtr(hypot as *mut _), &[arg(&3f32), arg(&4f32)])\n\n/// };\n", "file_path": "src/high/call.rs", "rank": 0, "score": 109018.6929101348 }, { "content": "/// Coerces an argument reference into the [`Arg`](struct.Arg.html)\n\n/// type.\n\n///\n\n/// This is used to wrap each argument pointer before passing them\n\n/// to [`Cif::call`](struct.Cif.html#method.call).\n\n/// (This is the same as [`Arg::new`](struct.Arg.html#method.new)).\n\npub fn arg<T>(r: &T) -> Arg {\n\n Arg::new(r)\n\n}\n\n\n\n/// Describes the calling convention and types for calling a function.\n\n///\n\n/// This is the `middle` layer’s wrapping of the `low` and `raw` layers’\n\n/// [`ffi_cif`](../raw/struct.ffi_cif.html). An initialized CIF contains\n\n/// references to an array of argument types and a result type, each of\n\n/// which may be allocated on the heap. `Cif` manages the memory of\n\n/// those referenced objects.\n\n///\n\n/// Construct with [`Cif::new`](#method.new) or\n\n/// [`Cif::from_type_array`](#method.from_type_array).\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// extern \"C\" fn add(x: f64, y: &f64) -> f64 {\n\n/// return x + y;\n", "file_path": "src/middle/mod.rs", "rank": 1, "score": 99511.8662472726 }, { "content": "fn main() {\n\n Type::structure(vec![\n\n Type::u16(),\n\n Type::u16(),\n\n ].into_iter());\n\n}\n", "file_path": "examples/types.rs", "rank": 2, "score": 65203.31902655413 }, { "content": "type TypeArray_ = *mut Type_;\n\n\n", "file_path": "src/middle/types.rs", "rank": 3, "score": 59529.22362874649 }, { "content": "type Type_ = *mut low::ffi_type;\n", "file_path": "src/middle/types.rs", "rank": 4, "score": 56200.78599651047 }, { "content": "/// Allocates a closure.\n\n///\n\n/// Returns a pair of the writable closure object and the function\n\n/// pointer for calling it. The former acts as a handle to the closure,\n\n/// and is used to configure and free it. The latter is the code pointer\n\n/// used to invoke the closure. Before it can be invoked, it must be\n\n/// initialized with [`prep_closure`](fn.prep_closure.html) and\n\n/// [`prep_closure_mut`](fn.prep_closure_mut.html). The closure must be\n\n/// deallocated using [`closure_free`](fn.closure_free.html), after\n\n/// which point the code pointer should not be used.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use libffi::low::*;\n\n///\n\n/// let (closure_handle, code_ptr) = closure_alloc();\n\n/// ```\n\npub fn closure_alloc() -> (*mut ffi_closure, CodePtr) {\n\n unsafe {\n\n let mut code_pointer: *mut c_void = mem::uninitialized();\n\n let closure = raw::ffi_closure_alloc(mem::size_of::<ffi_closure>(),\n\n &mut code_pointer);\n\n (closure as *mut ffi_closure, CodePtr::from_ptr(code_pointer))\n\n }\n\n}\n\n\n\n/// Frees a closure.\n\n///\n\n/// Closures allocated with [`closure_alloc`](fn.closure_alloc.html)\n\n/// must be deallocated with `closure_free`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use libffi::low::*;\n\n///\n\n/// let (closure_handle, code_ptr) = closure_alloc();\n", "file_path": "src/low.rs", "rank": 5, "score": 55063.026040615085 }, { "content": "// Informal indication that the object should be considered owned by\n\n// the given reference.\n\ntype Owned<T> = T;\n\n\n\n/// Represents a single C type.\n\n///\n\n/// # Example\n\n///\n\n/// Suppose we have a C struct:\n\n///\n\n/// ```c\n\n/// struct my_struct {\n\n/// uint16_t f1;\n\n/// uint64_t f2;\n\n/// };\n\n/// ```\n\n///\n\n/// To pass the struct by value via libffi, we need to construct a\n\n/// `Type` object describing its layout:\n\n///\n\n/// ```\n\n/// use libffi::middle::Type;\n", "file_path": "src/middle/types.rs", "rank": 6, "score": 40905.03793671755 }, { "content": "fn main() {\n\n let mut v = vec![3, 4, 8, 1, 2, 0, 9];\n\n qsort(&mut v);\n\n\n\n assert_eq!(vec![0, 1, 2, 3, 4, 8, 9], v);\n\n}\n", "file_path": "examples/sort.rs", "rank": 7, "score": 40625.52793919471 }, { "content": "fn qsort<T: Ord>(array: &mut [T]) {\n\n use std::cmp::Ordering::*;\n\n use std::mem;\n\n use std::os::raw::c_void;\n\n\n\n let lambda = |x: *const c_void, y: *const c_void| {\n\n let x = unsafe { &*(x as *const T) };\n\n let y = unsafe { &*(y as *const T) };\n\n match x.cmp(y) {\n\n Less => -1,\n\n Equal => 0,\n\n Greater => 1,\n\n }\n\n };\n\n let compare = Closure2::new(&lambda);\n\n\n\n unsafe {\n\n c::qsort(array.as_ptr() as *const _,\n\n array.len(),\n\n mem::size_of::<T>(),\n\n *compare.code_ptr())\n\n }\n\n}\n\n\n", "file_path": "examples/sort.rs", "rank": 8, "score": 28038.384379304687 }, { "content": "extern crate libffi;\n\n\n\nuse libffi::middle::Type;\n\n\n", "file_path": "examples/types.rs", "rank": 9, "score": 26273.163339025676 }, { "content": "///\n\n/// let my_struct = Type::structure(vec![\n\n/// Type::u64(),\n\n/// Type::u16(),\n\n/// ]);\n\n/// ```\n\npub struct Type(Unique<low::ffi_type>);\n\n\n\n/// Represents a sequence of C types.\n\n///\n\n/// This can be used to construct a struct type or as the arguments\n\n/// when creating a [`Cif`](struct.Cif.html).\n\npub struct TypeArray(Unique<*mut low::ffi_type>);\n\n\n\nimpl fmt::Debug for Type {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_fmt(format_args!(\"Type({:?})\", *self.0))\n\n }\n\n}\n\n\n", "file_path": "src/middle/types.rs", "rank": 19, "score": 24595.83764237319 }, { "content": " Unique::new(ffi_type_struct_create(fields.into_iter()))\n\n })\n\n }\n\n\n\n /// Gets a raw pointer to the underlying\n\n /// [`ffi_type`](../raw/struct._ffi_type.html).\n\n ///\n\n /// This method may be useful for interacting with the\n\n /// [`low`](../low/index.html) and\n\n /// [`raw`](../raw/index.html) layers.\n\n pub fn as_raw_ptr(&self) -> *mut low::ffi_type {\n\n *self.0\n\n }\n\n}\n\n\n\nimpl TypeArray {\n\n /// Constructs an array the given `Type`s.\n\n pub fn new<I>(elements: I) -> Self\n\n where I: IntoIterator<Item=Type>,\n\n I::IntoIter: ExactSizeIterator<Item=Type>\n", "file_path": "src/middle/types.rs", "rank": 20, "score": 24593.465406282663 }, { "content": "pub unsafe trait CType : Copy {\n\n /// Creates or retrieves a `Type<T>` for any type `T: CType`.\n\n ///\n\n /// We can use the resulting object to assemble a CIF to set up\n\n /// a call that uses type `T`.\n\n fn reify() -> Type<Self>;\n\n}\n\n\n\nmacro_rules! impl_ffi_type {\n\n ($type_:ty, $cons:ident) => {\n\n unsafe impl<> CType for $type_ {\n\n fn reify() -> Type<Self> {\n\n Type::make(middle::Type::$cons())\n\n }\n\n }\n\n };\n\n ($type_:ident) => {\n\n impl_ffi_type!($type_, $type_);\n\n };\n\n}\n", "file_path": "src/high/types.rs", "rank": 21, "score": 24591.99675893121 }, { "content": "//! Representations of C types for the high layer.\n\n\n\nuse std::marker::PhantomData;\n\n\n\nuse super::super::middle;\n\n\n\n/// Represents a C type statically associated with a Rust type.\n\n///\n\n/// In particular, the run-time value describes a particular C type,\n\n/// while the type parameter `T` is the equivalent Rust type.\n\n/// Instances of this type are created via the\n\n/// [`CType`](trait.CType.html) trait.\n\n#[derive(Clone, Debug)]\n\npub struct Type<T> {\n\n untyped: middle::Type,\n\n _marker: PhantomData<*mut T>,\n\n}\n\n\n\nimpl<T> Type<T> {\n\n fn make(untyped: middle::Type) -> Self {\n", "file_path": "src/high/types.rs", "rank": 22, "score": 24590.58849559614 }, { "content": " pub fn void() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::void) })\n\n }\n\n\n\n /// Returns the unsigned 8-bit numeric type.\n\n pub fn u8() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::uint8) })\n\n }\n\n\n\n /// Returns the signed 8-bit numeric type.\n\n pub fn i8() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::sint8) })\n\n }\n\n\n\n /// Returns the unsigned 16-bit numeric type.\n\n pub fn u16() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::uint16) })\n\n }\n\n\n\n /// Returns the signed 16-bit numeric type.\n", "file_path": "src/middle/types.rs", "rank": 23, "score": 24590.503741611614 }, { "content": " pub fn i16() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::sint16) })\n\n }\n\n\n\n /// Returns the unsigned 32-bit numeric type.\n\n pub fn u32() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::uint32) })\n\n }\n\n\n\n /// Returns the signed 32-bit numeric type.\n\n pub fn i32() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::sint32) })\n\n }\n\n\n\n /// Returns the unsigned 64-bit numeric type.\n\n pub fn u64() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::uint64) })\n\n }\n\n\n\n /// Returns the signed 64-bit numeric type.\n", "file_path": "src/middle/types.rs", "rank": 24, "score": 24590.503741611614 }, { "content": "mod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn create_u64() {\n\n Type::u64();\n\n }\n\n\n\n #[test]\n\n fn clone_u64() {\n\n Type::u64().clone().clone();\n\n }\n\n\n\n #[test]\n\n fn create_struct() {\n\n Type::structure(vec![Type::i64(),\n\n Type::i64(),\n\n Type::u64()]);\n\n }\n\n\n\n #[test]\n\n fn clone_struct() {\n\n Type::structure(vec![Type::i64(),\n\n Type::i64(),\n\n Type::u64()]).clone().clone();\n\n }\n\n\n\n}\n", "file_path": "src/middle/types.rs", "rank": 25, "score": 24589.678710749584 }, { "content": "///\n\n/// # Warning\n\n///\n\n/// This type does not obey the ABI, and as such should not be passed by\n\n/// value to or from a C or C++ function. Passing it via a pointer is\n\n/// okay. Theoretically, passing it via libffi is okay, but libffi\n\n/// doesn’t have complex support on most platforms yet.\n\n#[allow(non_camel_case_types)]\n\n#[cfg(feature = \"complex\")]\n\npub type c_c64 = [f64; 2];\n\n\n\n/// This item is enabled by `#[cfg(feature = \"complex\")]`.\n\n#[cfg(feature = \"complex\")]\n\nimpl_ffi_type!(c_c32, c32);\n\n/// This item is enabled by `#[cfg(feature = \"complex\")]`.\n\n#[cfg(feature = \"complex\")]\n\nimpl_ffi_type!(c_c64, c64);\n\n\n\nunsafe impl<T> CType for *const T {\n\n fn reify() -> Type<Self> { Type::make(middle::Type::pointer()) }\n\n}\n\n\n\nunsafe impl<T> CType for *mut T {\n\n fn reify() -> Type<Self> { Type::make(middle::Type::pointer()) }\n\n}\n", "file_path": "src/high/types.rs", "rank": 26, "score": 24589.432177727434 }, { "content": " /// Returns the C `longlong` type.\n\n pub fn c_longlong() -> Self {\n\n match_size_signed!(c_longlong)\n\n }\n\n\n\n /// Returns the C `unsigned longlong` type.\n\n pub fn c_ulonglong() -> Self {\n\n match_size_unsigned!(c_ulonglong)\n\n }\n\n\n\n /// Returns the C `float` (32-bit floating point) type.\n\n pub fn f32() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::float) })\n\n }\n\n\n\n /// Returns the C `double` (64-bit floating point) type.\n\n pub fn f64() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::double) })\n\n }\n\n\n", "file_path": "src/middle/types.rs", "rank": 27, "score": 24589.303788179892 }, { "content": " /// Returns the C `void*` type, for passing any kind of pointer.\n\n pub fn pointer() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::pointer) })\n\n }\n\n\n\n /// Returns the C `long double` (extended-precision floating point) type.\n\n pub fn longdouble() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::longdouble) })\n\n }\n\n\n\n /// Returns the C `_Complex float` type.\n\n ///\n\n /// This item is enabled by `#[cfg(feature = \"complex\")]`.\n\n #[cfg(feature = \"complex\")]\n\n pub fn c32() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::complex_float) })\n\n }\n\n\n\n /// Returns the C `_Complex double` type.\n\n ///\n", "file_path": "src/middle/types.rs", "rank": 28, "score": 24589.163866449562 }, { "content": " /// This item is enabled by `#[cfg(feature = \"complex\")]`.\n\n #[cfg(feature = \"complex\")]\n\n pub fn c64() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::complex_double) })\n\n }\n\n\n\n /// Returns the C `_Complex long double` type.\n\n ///\n\n /// This item is enabled by `#[cfg(feature = \"complex\")]`.\n\n #[cfg(feature = \"complex\")]\n\n pub fn complex_longdouble() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::complex_longdouble) })\n\n }\n\n\n\n /// Constructs a structure type whose fields have the given types.\n\n pub fn structure<I>(fields: I) -> Self\n\n where I: IntoIterator<Item=Type>,\n\n I::IntoIter: ExactSizeIterator<Item=Type>\n\n {\n\n Type(unsafe {\n", "file_path": "src/middle/types.rs", "rank": 29, "score": 24589.050024655506 }, { "content": " }\n\n}\n\n\n\nimpl Clone for TypeArray {\n\n fn clone(&self) -> Self {\n\n TypeArray(unsafe {\n\n Unique::new(ffi_type_array_clone(*self.0))\n\n })\n\n }\n\n}\n\n\n\nmacro_rules! match_size_signed {\n\n ( $name:ident ) => {\n\n match mem::size_of::<libc::$name>() {\n\n 1 => Self::i8(),\n\n 2 => Self::i16(),\n\n 4 => Self::i32(),\n\n 8 => Self::i64(),\n\n _ => panic!(\"Strange size for C type\"),\n\n }\n", "file_path": "src/middle/types.rs", "rank": 30, "score": 24588.935437822 }, { "content": " ffi_type_array_destroy((*victim).elements);\n\n libc::free(victim as *mut libc::c_void);\n\n }\n\n}\n\n\n\nimpl Drop for Type {\n\n fn drop(&mut self) {\n\n unsafe { ffi_type_destroy(*self.0) }\n\n }\n\n}\n\n\n\nimpl Drop for TypeArray {\n\n fn drop(&mut self) {\n\n unsafe { ffi_type_array_destroy(*self.0) }\n\n }\n\n}\n\n\n\nimpl Clone for Type {\n\n fn clone(&self) -> Self {\n\n Type(unsafe { Unique::new(ffi_type_clone(*self.0)) })\n", "file_path": "src/middle/types.rs", "rank": 31, "score": 24588.872645038973 }, { "content": "\n\n/// Creates a struct type from a raw array of element types.\n\nunsafe fn ffi_type_struct_create_raw(elements: Owned<TypeArray_>)\n\n -> Owned<Type_>\n\n{\n\n let new = libc::malloc(mem::size_of::<low::ffi_type>()) as Type_;\n\n assert!(!new.is_null(),\n\n \"ffi_type_struct_create_raw: out of memory\");\n\n\n\n (*new).size = 0;\n\n (*new).alignment = 0;\n\n (*new).type_ = low::type_tag::STRUCT;\n\n (*new).elements = elements;\n\n\n\n new\n\n}\n\n\n\n/// Creates a struct `ffi_type` with the given elements. Takes ownership\n\n/// of the elements.\n\nunsafe fn ffi_type_struct_create<I>(elements: I) -> Owned<Type_>\n", "file_path": "src/middle/types.rs", "rank": 32, "score": 24588.481961810496 }, { "content": " pub fn i64() -> Self {\n\n Type(unsafe { Unique::new(&mut low::types::sint64) })\n\n }\n\n\n\n /// Returns the C equivalent of Rust `usize` (`u16`).\n\n #[cfg(target_pointer_width = \"16\")]\n\n pub fn usize() -> Self {\n\n Self::u16()\n\n }\n\n\n\n /// Returns the C equivalent of Rust `isize` (`i16`).\n\n #[cfg(target_pointer_width = \"16\")]\n\n pub fn isize() -> Self {\n\n Self::i16()\n\n }\n\n\n\n /// Returns the C equivalent of Rust `usize` (`u32`).\n\n #[cfg(target_pointer_width = \"32\")]\n\n pub fn usize() -> Self {\n\n Self::u32()\n", "file_path": "src/middle/types.rs", "rank": 33, "score": 24588.12947524582 }, { "content": " }\n\n}\n\n\n\nmacro_rules! match_size_unsigned {\n\n ( $name:ident ) => {\n\n match mem::size_of::<libc::$name>() {\n\n 1 => Self::u8(),\n\n 2 => Self::u16(),\n\n 4 => Self::u32(),\n\n 8 => Self::u64(),\n\n _ => panic!(\"Strange size for C type\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Type {\n\n /// Returns the representation of the C `void` type.\n\n ///\n\n /// This is used only for the return type of a CIF, not for an\n\n /// argument or struct member.\n", "file_path": "src/middle/types.rs", "rank": 34, "score": 24588.005572728387 }, { "content": "\n\nimpl_ffi_type!(u8);\n\nimpl_ffi_type!(i8);\n\nimpl_ffi_type!(u16);\n\nimpl_ffi_type!(i16);\n\nimpl_ffi_type!(u32);\n\nimpl_ffi_type!(i32);\n\nimpl_ffi_type!(u64);\n\nimpl_ffi_type!(i64);\n\nimpl_ffi_type!(f32);\n\nimpl_ffi_type!(f64);\n\nimpl_ffi_type!(usize);\n\nimpl_ffi_type!(isize);\n\nimpl_ffi_type!((), void);\n\n\n\n// Why is the complex stuff even here? It doesn’t work yet because\n\n// libffi doesn’t support it, so it should probably go away and come\n\n// back when it’s actually useful. Also, the definitions for c_c32 and\n\n// c_c64 should come from elsewhere (the num package?), but that\n\n// elsewhere doesn’t seem to exist yet.\n", "file_path": "src/high/types.rs", "rank": 35, "score": 24587.092356239424 }, { "content": " where I: ExactSizeIterator<Item=Type>\n\n{\n\n ffi_type_struct_create_raw(ffi_type_array_create(elements))\n\n}\n\n\n\n/// Makes a copy of a type array.\n\nunsafe fn ffi_type_array_clone(old: TypeArray_) -> Owned<TypeArray_> {\n\n let size = ffi_type_array_len(old);\n\n let new = ffi_type_array_create_empty(size);\n\n\n\n for i in 0 .. size {\n\n *new.offset(i as isize) = ffi_type_clone(*old.offset(i as isize));\n\n }\n\n\n\n new\n\n}\n\n\n\n/// Makes a copy of a type.\n\nunsafe fn ffi_type_clone(old: Type_) -> Owned<Type_> {\n\n if (*old).type_ == low::type_tag::STRUCT {\n", "file_path": "src/middle/types.rs", "rank": 36, "score": 24586.96484469649 }, { "content": " {\n\n TypeArray(unsafe {\n\n Unique::new(ffi_type_array_create(elements.into_iter()))\n\n })\n\n }\n\n\n\n /// Gets a raw pointer to the underlying C array of\n\n /// [`ffi_type`](../raw/struct._ffi_type.html)s.\n\n ///\n\n /// The C array is null-terminated.\n\n ///\n\n /// This method may be useful for interacting with the\n\n /// [`low`](../low/index.html) and\n\n /// [`raw`](../raw/index.html) layers.\n\n pub fn as_raw_ptr(&self) -> *mut *mut low::ffi_type {\n\n *self.0\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/middle/types.rs", "rank": 37, "score": 24586.602564112218 }, { "content": "impl fmt::Debug for TypeArray {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_fmt(format_args!(\"TypeArray({:?})\", *self.0))\n\n }\n\n}\n\n\n\n/// Computes the length of a raw `TypeArray_` by searching for the\n\n/// null terminator.\n\nunsafe fn ffi_type_array_len(mut array: TypeArray_) -> usize {\n\n let mut count = 0;\n\n while !(*array).is_null() {\n\n count += 1;\n\n array = array.offset(1);\n\n }\n\n count\n\n}\n\n\n\n/// Creates an empty `TypeArray_` with null terminator.\n\nunsafe fn ffi_type_array_create_empty(len: usize) -> Owned<TypeArray_> {\n\n let array = libc::malloc((len + 1) * mem::size_of::<Type_>())\n", "file_path": "src/middle/types.rs", "rank": 38, "score": 24586.45098589962 }, { "content": " Type {\n\n untyped: untyped,\n\n _marker: PhantomData,\n\n }\n\n }\n\n\n\n /// Gets the underlying representation as used by the\n\n /// [`middle`](../../middle/index.html) layer.\n\n pub fn into_middle(self) -> middle::Type {\n\n self.untyped\n\n }\n\n}\n\n\n\n/// Types that we can automatically marshall to/from C.\n\n///\n\n/// In particular, for any type `T` that implements `CType`, we can\n\n/// get a `Type<T>` for describing that type.\n\n/// This trait is unsafe to implement because if the libffi type\n\n/// associated with a Rust type doesn’t match then we get\n\n/// undefined behavior.\n", "file_path": "src/high/types.rs", "rank": 39, "score": 24584.946369343445 }, { "content": " /// Returns the C `int` type.\n\n pub fn c_int() -> Self {\n\n match_size_signed!(c_int)\n\n }\n\n\n\n /// Returns the C `unsigned int` type.\n\n pub fn c_uint() -> Self {\n\n match_size_unsigned!(c_uint)\n\n }\n\n\n\n /// Returns the C `long` type.\n\n pub fn c_long() -> Self {\n\n match_size_signed!(c_long)\n\n }\n\n\n\n /// Returns the C `unsigned long` type.\n\n pub fn c_ulong() -> Self {\n\n match_size_unsigned!(c_ulong)\n\n }\n\n\n", "file_path": "src/middle/types.rs", "rank": 40, "score": 24584.517992827215 }, { "content": " }\n\n\n\n /// Returns the C equivalent of Rust `isize` (`i32`).\n\n #[cfg(target_pointer_width = \"32\")]\n\n pub fn isize() -> Self {\n\n Self::i32()\n\n }\n\n\n\n /// Returns the C equivalent of Rust `usize` (`u64`).\n\n #[cfg(target_pointer_width = \"64\")]\n\n pub fn usize() -> Self {\n\n Self::u64()\n\n }\n\n\n\n /// Returns the C equivalent of Rust `isize` (`i64`).\n\n #[cfg(target_pointer_width = \"64\")]\n\n pub fn isize() -> Self {\n\n Self::i64()\n\n }\n\n\n", "file_path": "src/middle/types.rs", "rank": 41, "score": 24584.50594743685 }, { "content": " /// Returns the C `signed char` type.\n\n pub fn c_schar() -> Self {\n\n match_size_signed!(c_schar)\n\n }\n\n\n\n /// Returns the C `unsigned char` type.\n\n pub fn c_uchar() -> Self {\n\n match_size_unsigned!(c_uchar)\n\n }\n\n\n\n /// Returns the C `short` type.\n\n pub fn c_short() -> Self {\n\n match_size_signed!(c_short)\n\n }\n\n\n\n /// Returns the C `unsigned short` type.\n\n pub fn c_ushort() -> Self {\n\n match_size_unsigned!(c_ushort)\n\n }\n\n\n", "file_path": "src/middle/types.rs", "rank": 42, "score": 24584.490096479727 }, { "content": " as TypeArray_;\n\n assert!(!array.is_null(),\n\n \"ffi_type_array_create_empty: out of memory\");\n\n *array.offset(len as isize) = ptr::null_mut::<low::ffi_type>() as Type_;\n\n array\n\n}\n\n\n\n/// Creates a null-terminated array of Type_. Takes ownership of\n\n/// the elements.\n\nunsafe fn ffi_type_array_create<I>(elements: I) -> Owned<TypeArray_>\n\n where I: ExactSizeIterator<Item=Type>\n\n{\n\n let size = elements.len();\n\n let new = ffi_type_array_create_empty(size);\n\n for (i, element) in elements.enumerate() {\n\n *new.offset(i as isize) = *element.0;\n\n }\n\n\n\n new\n\n}\n", "file_path": "src/middle/types.rs", "rank": 43, "score": 24584.305069832633 }, { "content": " ffi_type_struct_create_raw(ffi_type_array_clone((*old).elements))\n\n } else {\n\n old\n\n }\n\n}\n\n\n\n/// Destroys a `TypeArray_` and all of its elements.\n\nunsafe fn ffi_type_array_destroy(victim: Owned<TypeArray_>) {\n\n let mut current = victim;\n\n while !(*current).is_null() {\n\n ffi_type_destroy(*current);\n\n current = current.offset(1);\n\n }\n\n\n\n libc::free(victim as *mut libc::c_void);\n\n}\n\n\n\n/// Destroys a `Type_` if it was dynamically allocated.\n\nunsafe fn ffi_type_destroy(victim: Owned<Type_>) {\n\n if (*victim).type_ == low::type_tag::STRUCT {\n", "file_path": "src/middle/types.rs", "rank": 44, "score": 24584.253353102504 }, { "content": "\n\n/// Laid out the same as C11 `float complex` and C++11\n\n/// `std::complex<float>`.\n\n///\n\n/// This item is enabled by `#[cfg(feature = \"complex\")]`.\n\n///\n\n/// # Warning\n\n///\n\n/// This type does not obey the ABI, and as such should not be passed by\n\n/// value to or from a C or C++ function. Passing it via a pointer is\n\n/// okay. Theoretically, passing it via libffi is okay, but libffi\n\n/// doesn’t have complex support on most platforms yet.\n\n#[allow(non_camel_case_types)]\n\n#[cfg(feature = \"complex\")]\n\npub type c_c32 = [f32; 2];\n\n\n\n/// Laid out the same as C11 `double complex` and C++11\n\n/// `std::complex<double>`.\n\n///\n\n/// This item is enabled by `#[cfg(feature = \"complex\")]`.\n", "file_path": "src/high/types.rs", "rank": 45, "score": 24582.864613475318 }, { "content": "//! Representations of C types and arrays thereof.\n\n//!\n\n//! These are used to describe the types of the arguments and results of\n\n//! functions. When we construct a CIF (“Call Inter<span></span>Face”),\n\n//! we provide a sequence of argument types and a result type, and\n\n//! libffi uses this to figure out how to set up a call to a function\n\n//! with those types.\n\n\n\nuse std::fmt;\n\nuse std::mem;\n\nuse std::ptr;\n\nuse libc;\n\n\n\nuse low;\n\n\n\nuse super::util::Unique;\n\n\n\n// Internally we represent types and type arrays using raw pointers,\n\n// since this is what libffi understands. Below we wrap them with\n\n// types that implement Drop and Clone.\n\n\n", "file_path": "src/middle/types.rs", "rank": 46, "score": 24581.46335894938 }, { "content": "// Converts the raw status type to a `Result`.\n\nfn status_to_result<R>(status: raw::ffi_status, good: R) -> Result<R> {\n\n if status == raw::ffi_status_FFI_OK { Ok(good) }\n\n else if status == raw::ffi_status_FFI_BAD_TYPEDEF { Err(Error::Typedef) }\n\n else if status == raw::ffi_status_FFI_BAD_ABI { Err(Error::Abi) }\n\n // If we don't recognize the status, that is an ABI error:\n\n else { Err(Error::Abi) }\n\n}\n\n\n\n/// Wraps a function pointer of unknown type.\n\n///\n\n/// This is used to make the API a bit easier to understand, and as a\n\n/// simple type lint. As a `repr(C)` struct of one element, it should\n\n/// be safe to transmute between `CodePtr` and `*mut c_void`, or between\n\n/// collections thereof.\n\n#[derive(Clone, Copy, Debug, Hash)]\n\n#[repr(C)]\n\npub struct CodePtr(pub *mut c_void);\n\n\n\n// How useful is this type? Does it need all the methods?\n\nimpl CodePtr {\n", "file_path": "src/low.rs", "rank": 47, "score": 22481.530290462706 }, { "content": "void ffi_type_destroy(ffi_type* type)\n\n{\n\n if (type == NULL) return;\n\n\n\n if (type->type == FFI_TYPE_STRUCT || type->type == FFI_TYPE_COMPLEX) {\n\n ffi_type_destroy_array(type->elements);\n\n free(type);\n\n }\n", "file_path": "examples/c/support.c", "rank": 48, "score": 18600.39387399466 }, { "content": "ffi_type* ffi_type_clone(ffi_type*);\n", "file_path": "examples/c/support.h", "rank": 49, "score": 18597.28269043524 }, { "content": "void ffi_type_destroy(ffi_type*);\n", "file_path": "examples/c/support.h", "rank": 50, "score": 18597.28269043524 }, { "content": "ffi_type* ffi_type_clone(ffi_type* type)\n\n{\n\n if (type == NULL) return NULL;\n\n\n\n if (type->type == FFI_TYPE_STRUCT || type->type == FFI_TYPE_COMPLEX) {\n\n ffi_type* copy = malloc(sizeof(ffi_type));\n\n assert(copy != NULL);\n\n\n\n memcpy(copy, type, sizeof(ffi_type));\n\n copy->elements = ffi_type_clone_array(type->elements);\n\n\n\n return copy;\n\n } else\n\n return type;\n", "file_path": "examples/c/support.c", "rank": 51, "score": 18597.28269043524 }, { "content": "ffi_type** ffi_type_clone_array(ffi_type** types)\n\n{\n\n size_t size = 1;\n\n for (ffi_type** curr = types; *curr != NULL; ++curr)\n\n ++size;\n\n\n\n ffi_type** copy = malloc(size * sizeof(ffi_type*));\n\n assert(copy != NULL);\n\n\n\n for (size_t i = 0; i < size; ++i)\n\n copy[i] = ffi_type_clone(types[i]);\n\n\n\n return copy;\n", "file_path": "examples/c/support.c", "rank": 52, "score": 17734.229473781626 }, { "content": "void ffi_type_destroy_array(ffi_type**);\n", "file_path": "examples/c/support.h", "rank": 53, "score": 17734.229473781626 }, { "content": "void ffi_type_destroy_array(ffi_type** types)\n\n{\n\n for (ffi_type** curr = types; *curr != NULL; ++curr)\n\n ffi_type_destroy(*curr);\n\n\n\n free(types);\n", "file_path": "examples/c/support.c", "rank": 54, "score": 17734.229473781626 }, { "content": "ffi_type** ffi_type_clone_array(ffi_type**);\n", "file_path": "examples/c/support.h", "rank": 55, "score": 17734.229473781626 }, { "content": " /// Defaults to the platform’s default calling convention; this\n\n /// can be adjusted using [`set_abi`](#method.set_abi).\n\n pub fn new<I>(args: I, result: Type) -> Self\n\n where I: IntoIterator<Item=Type>,\n\n I::IntoIter: ExactSizeIterator<Item=Type>\n\n {\n\n let args = args.into_iter();\n\n let nargs = args.len();\n\n let args = types::TypeArray::new(args);\n\n let mut cif: low::ffi_cif = Default::default();\n\n\n\n unsafe {\n\n low::prep_cif(&mut cif,\n\n low::ffi_abi_FFI_DEFAULT_ABI,\n\n nargs,\n\n result.as_raw_ptr(),\n\n args.as_raw_ptr())\n\n }.expect(\"low::prep_cif\");\n\n\n\n // Note that cif retains references to args and result,\n", "file_path": "src/middle/mod.rs", "rank": 56, "score": 19.5160713824369 }, { "content": "/// }\n\n///\n\n/// use libffi::middle::*;\n\n///\n\n/// let args = vec![Type::f64(), Type::pointer()];\n\n/// let cif = Cif::new(args.into_iter(), Type::f64());\n\n///\n\n/// let n = unsafe { cif.call(CodePtr(add as *mut _), &[arg(&5), arg(&&6)]) };\n\n/// assert_eq!(11, n);\n\n/// ```\n\n#[derive(Debug)]\n\npub struct Cif {\n\n cif: low::ffi_cif,\n\n args: types::TypeArray,\n\n result: Type,\n\n}\n\n\n\n// To clone a Cif we need to clone the types and then make sure the new\n\n// ffi_cif refers to the clones of the types.\n\nimpl Clone for Cif {\n", "file_path": "src/middle/mod.rs", "rank": 57, "score": 18.8153599428654 }, { "content": "mod util;\n\n\n\nmod types;\n\npub use self::types::Type;\n\n\n\nmod builder;\n\npub use self::builder::Builder;\n\n\n\n/// Contains an untyped pointer to a function argument.\n\n///\n\n/// When calling a function via a [CIF](struct.Cif.html), each argument\n\n/// must be passed as a C `void*`. Wrapping the argument in the `Arg`\n\n/// struct accomplishes the necessary coercion.\n\n#[derive(Clone, Debug)]\n\n#[repr(C)]\n\npub struct Arg(*mut c_void);\n\n\n\nimpl Arg {\n\n /// Coerces an argument reference into the `Arg` type.\n\n ///\n", "file_path": "src/middle/mod.rs", "rank": 58, "score": 18.093935481935027 }, { "content": " pub struct $cif<$( $T, )* R> {\n\n untyped: middle::Cif,\n\n _marker: PhantomData<fn($( $T, )*) -> R>,\n\n }\n\n\n\n impl<$( $T, )* R> $cif<$( $T, )* R> {\n\n /// Creates a new statically-typed CIF with the given argument\n\n /// and result types.\n\n #[allow(non_snake_case)]\n\n pub fn new($( $T: Type<$T>, )* result: Type<R>) -> Self {\n\n let cif = middle::Cif::new(\n\n vec![$( $T.into_middle() ),*].into_iter(),\n\n result.into_middle());\n\n $cif { untyped: cif, _marker: PhantomData }\n\n }\n\n\n\n /// Sets the CIF to use the given calling convention.\n\n pub fn set_abi(&mut self, abi: FfiAbi) {\n\n self.untyped.set_abi(abi);\n\n }\n", "file_path": "src/high/mod.rs", "rank": 59, "score": 17.55027607846853 }, { "content": "//! };\n\n//!\n\n//! let closure = ClosureOnce3::new(f);\n\n//! let call = closure.code_ptr();\n\n//!\n\n//! assert_eq!(12, call(2, 3, 4));\n\n//! ```\n\n//!\n\n//! Invoking the closure a second time will panic.\n\n\n\npub use middle::{FfiAbi, ffi_abi_FFI_DEFAULT_ABI};\n\n\n\npub mod types;\n\npub use self::types::{Type, CType};\n\n\n\n#[macro_use]\n\npub mod call;\n\npub use self::call::*;\n\n\n\nmacro_rules! define_closure_mod {\n", "file_path": "src/high/mod.rs", "rank": 60, "score": 16.44970967498906 }, { "content": " /// Gets the code pointer typed as a `void*`.\n\n ///\n\n /// This is the other common type used in APIs (or at least in\n\n /// libffi) for untyped callback arguments.\n\n pub fn as_mut_ptr(&self) -> *mut c_void {\n\n self.0\n\n }\n\n}\n\n\n\npub use raw::{ffi_abi, ffi_abi_FFI_DEFAULT_ABI, _ffi_type as ffi_type, ffi_status,\n\n ffi_cif, ffi_closure};\n\n\n\n/// Re-exports the `ffi_type` objects used to describe the types of\n\n/// arguments and results.\n\n///\n\n/// These are from [`raw`](../../raw/index.html), but are renamed by\n\n/// removing the `ffi_type_` prefix. For example, `raw::ffi_type_void`\n\n/// becomes `low::types::void`.\n\npub mod types {\n\n pub use raw::{ffi_type_void as void,\n", "file_path": "src/low.rs", "rank": 61, "score": 15.604911271409797 }, { "content": " result: &mut R,\n\n args: &($( &$T, )*),\n\n userdata: &U);\n\n\n\n /// An immutable, typed closure with the given argument and result\n\n /// types.\n\n pub struct $closure<'a, $( $T, )* R> {\n\n untyped: middle::Closure<'a>,\n\n _marker: PhantomData<fn($( $T, )*) -> R>,\n\n }\n\n\n\n impl<'a, $($T: CType,)* R: CType> $closure<'a, $($T,)* R> {\n\n /// Constructs a typed closure callable from C from a\n\n /// Rust closure.\n\n pub fn new<Callback>(callback: &'a Callback) -> Self\n\n where Callback: Fn($( $T, )*) -> R + 'a\n\n {\n\n Self::new_with_cif($cif::reify(), callback)\n\n }\n\n }\n", "file_path": "src/high/mod.rs", "rank": 62, "score": 15.574011266299419 }, { "content": "use std::marker::PhantomData;\n\n\n\nuse middle;\n\npub use middle::CodePtr;\n\n\n\n/// Encapsulates an argument with its type information.\n\n///\n\n/// In order to set up calls using [`call`](index.html#method.call), we\n\n/// need to wrap (a reference to) each argument in an `Arg`. The usual\n\n/// way to do this is with function [`arg`](fn.arg.html).\n\n#[derive(Clone, Debug)]\n\npub struct Arg<'a> {\n\n // There should be some type T such that type_ is the middle-layer\n\n // value of Type<T> and value is T::reify().\n\n type_: middle::Type,\n\n value: middle::Arg,\n\n _marker: PhantomData<&'a ()>,\n\n}\n\n\n\nimpl<'a> Arg<'a> {\n", "file_path": "src/high/call.rs", "rank": 63, "score": 15.17724111932141 }, { "content": "/// use libffi::low::*;\n\n///\n\n/// let mut args: [*mut ffi_type; 2] = unsafe {\n\n/// [ &mut types::sint32,\n\n/// &mut types::uint64 ]\n\n/// };\n\n/// let mut cif: ffi_cif = Default::default();\n\n///\n\n/// unsafe {\n\n/// prep_cif(&mut cif, ffi_abi_FFI_DEFAULT_ABI, 2,\n\n/// &mut types::pointer, args.as_mut_ptr())\n\n/// }.unwrap();\n\n/// ```\n\npub unsafe fn prep_cif(cif: *mut ffi_cif,\n\n abi: ffi_abi,\n\n nargs: usize,\n\n rtype: *mut ffi_type,\n\n atypes: *mut *mut ffi_type)\n\n -> Result<()>\n\n{\n", "file_path": "src/low.rs", "rank": 64, "score": 14.92301580069277 }, { "content": "///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use std::os::raw::c_void;\n\n/// use libffi::low::*;\n\n///\n\n/// extern \"C\" fn c_function(a: u64, b: u64) -> u64 { a + b }\n\n///\n\n/// let result = unsafe {\n\n/// let mut args: Vec<*mut ffi_type> = vec![ &mut types::uint64,\n\n/// &mut types::uint64 ];\n\n/// let mut cif: ffi_cif = Default::default();\n\n///\n\n/// prep_cif(&mut cif, ffi_abi_FFI_DEFAULT_ABI, 2,\n\n/// &mut types::uint64, args.as_mut_ptr()).unwrap();\n\n///\n\n/// call(&mut cif, CodePtr(c_function as *mut _),\n\n/// vec![ &mut 4u64 as *mut _ as *mut c_void,\n\n/// &mut 5u64 as *mut _ as *mut c_void ].as_mut_ptr())\n", "file_path": "src/low.rs", "rank": 65, "score": 14.338769996588399 }, { "content": "/// `ffi_type` as follows using `type_tag::STRUCT`:\n\n///\n\n/// ```\n\n/// use std::ptr;\n\n/// use libffi::low::{ffi_type, types, type_tag};\n\n///\n\n/// let mut elements = unsafe {\n\n/// [ &mut types::uint16,\n\n/// &mut types::uint64,\n\n/// ptr::null_mut::<ffi_type>() ]\n\n/// };\n\n///\n\n/// let mut my_struct: ffi_type = Default::default();\n\n/// my_struct.type_ = type_tag::STRUCT;\n\n/// my_struct.elements = elements.as_mut_ptr();\n\n/// ```\n\npub mod type_tag {\n\n use raw;\n\n use std::os::raw::c_ushort;\n\n\n", "file_path": "src/low.rs", "rank": 66, "score": 14.12140845824107 }, { "content": " /// This is used to wrap each argument pointer before passing them\n\n /// to [`Cif::call`](struct.Cif.html#method.call).\n\n pub fn new<T>(r: &T) -> Self {\n\n Arg(r as *const T as *mut c_void)\n\n }\n\n}\n\n\n\n/// Coerces an argument reference into the [`Arg`](struct.Arg.html)\n\n/// type.\n\n///\n\n/// This is used to wrap each argument pointer before passing them\n\n/// to [`Cif::call`](struct.Cif.html#method.call).\n\n/// (This is the same as [`Arg::new`](struct.Arg.html#method.new)).\n", "file_path": "src/middle/mod.rs", "rank": 67, "score": 13.362180623384226 }, { "content": "#[cfg(test)]\n\nmod test {\n\n use low;\n\n use super::*;\n\n use std::mem;\n\n use std::os::raw::c_void;\n\n\n\n #[test]\n\n fn call() {\n\n let cif = Cif::new(vec![Type::i64(), Type::i64()].into_iter(),\n\n Type::i64());\n\n let f = |m: i64, n: i64| -> i64 {\n\n unsafe { cif.call(CodePtr(add_it as *mut c_void),\n\n &[arg(&m), arg(&n)]) }\n\n };\n\n\n\n assert_eq!(12, f(5, 7));\n\n assert_eq!(13, f(6, 7));\n\n assert_eq!(15, f(8, 7));\n\n }\n", "file_path": "src/middle/mod.rs", "rank": 68, "score": 12.925161191663415 }, { "content": " /// Indicates a structure type.\n\n pub const STRUCT: c_ushort = raw::ffi_type_enum_STRUCT as c_ushort;\n\n\n\n /// Indicates a complex number type.\n\n ///\n\n /// This item is enabled by `#[cfg(feature = \"complex\")]`.\n\n #[cfg(feature = \"complex\")]\n\n pub const COMPLEX: c_ushort = raw::ffi_type_enum_COMPLEX as c_ushort;\n\n}\n\n\n\n/// Initalizes a CIF (Call Interface) with the given ABI\n\n/// and types.\n\n///\n\n/// We need to initialize a CIF before we can use it to call a function\n\n/// or create a closure. This function lets us specify the calling\n\n/// convention to use and the argument and result types. For varargs\n\n/// CIF initialization, see [`prep_cif_var`](fn.prep_cif_var.html).\n\n///\n\n///\n\n/// # Safety\n", "file_path": "src/low.rs", "rank": 69, "score": 12.90033505510763 }, { "content": " }\n\n\n\n impl<$( $T: CType, )* R: CType> $cif<$( $T, )* R> {\n\n /// Creates a new statically-typed CIF by reifying the\n\n /// argument types as `Type<T>`s.\n\n pub fn reify() -> Self {\n\n Self::new($( $T::reify(), )* R::reify())\n\n }\n\n }\n\n\n\n // We use tuples of pointers to describe the arguments, and we\n\n // extract them by pattern matching. This assumes that a tuple\n\n // of pointers will be laid out packed and in order. This seems\n\n // to hold true right now, and I can’t think of a reason why it\n\n // wouldn’t be that way, but technically it may be undefined\n\n // behavior.\n\n\n\n /// The type of function called from an immutable, typed closure.\n\n pub type $callback<U, $( $T, )* R>\n\n = extern \"C\" fn(cif: &::low::ffi_cif,\n", "file_path": "src/high/mod.rs", "rank": 70, "score": 12.723098418281415 }, { "content": " /// Constructs a typed closure callable from C from a\n\n /// Rust closure.\n\n pub fn new<Callback>(callback: Callback) -> Self\n\n where Callback: FnOnce($( $T, )*) -> R + Any\n\n {\n\n Self::new_with_cif($cif::reify(), callback)\n\n }\n\n }\n\n\n\n impl<$( $T: Copy, )* R> $closure_once<$( $T, )* R> {\n\n /// Constructs a one-shot closure callable from C from a CIF\n\n /// describing the calling convention for the resulting\n\n /// function and the Rust closure to call.\n\n pub fn new_with_cif<Callback>(cif: $cif<$( $T, )* R>,\n\n callback: Callback) -> Self\n\n where Callback: FnOnce($( $T, )*) -> R + Any\n\n {\n\n Self::from_parts(cif,\n\n Self::static_callback,\n\n callback)\n", "file_path": "src/high/mod.rs", "rank": 71, "score": 12.722734127091048 }, { "content": " pub fn new<Callback>(callback: &'a mut Callback) -> Self\n\n where Callback: FnMut($( $T, )*) -> R + 'a\n\n {\n\n Self::new_with_cif($cif::reify(), callback)\n\n }\n\n }\n\n\n\n impl<'a, $( $T, )* R> $closure_mut<'a, $( $T, )* R> {\n\n /// Gets the C code pointer that is used to invoke the\n\n /// closure.\n\n pub fn code_ptr(&self) -> &extern \"C\" fn($( $T, )*) -> R {\n\n unsafe {\n\n mem::transmute(self.untyped.code_ptr())\n\n }\n\n }\n\n\n\n /// Constructs a typed closure callable from C from a CIF\n\n /// describing the calling convention for the resulting\n\n /// function, a callback for the function to call, and\n\n /// userdata to pass to the callback.\n", "file_path": "src/high/mod.rs", "rank": 72, "score": 12.51297925549519 }, { "content": "use std::marker::PhantomData;\n\nuse std::ops::Deref;\n\n\n\npub struct Unique<T> {\n\n contents: *mut T,\n\n _marker: PhantomData<T>,\n\n}\n\n\n\nimpl<T> Deref for Unique<T> {\n\n type Target = *mut T;\n\n fn deref(&self) -> &Self::Target {\n\n &self.contents\n\n }\n\n}\n\n\n\nimpl<T> Unique<T> {\n\n pub unsafe fn new(ptr: *mut T) -> Self {\n\n Unique {\n\n contents: ptr,\n\n _marker: PhantomData,\n\n }\n\n }\n\n}\n", "file_path": "src/middle/util.rs", "rank": 73, "score": 12.49252777283387 }, { "content": " /// Wraps an argument reference for passing to `high::call::call`.\n\n ///\n\n /// For a shorter alias of the same, see\n\n /// [`high::call::arg`](fn.arg.html).\n\n pub fn new<T: super::CType>(arg: &'a T) -> Self {\n\n Arg {\n\n type_: T::reify().into_middle(),\n\n value: middle::Arg::new(arg),\n\n _marker: PhantomData,\n\n }\n\n }\n\n}\n\n\n\n/// Constructs an [`Arg`](struct.Arg.html) for passing to\n\n/// [`call`](fn.call.html).\n", "file_path": "src/high/call.rs", "rank": 74, "score": 12.486237363836024 }, { "content": "/// let arg1 = **args.offset(0);\n\n/// let arg2 = **args.offset(1);\n\n///\n\n/// *result = userdata(arg1, arg2);\n\n/// }\n\n///\n\n/// let cif = Cif::new(vec![Type::u64(), Type::u64()].into_iter(),\n\n/// Type::u64());\n\n/// let lambda = |x: u64, y: u64| x + y;\n\n/// let closure = Closure::new(cif, lambda_callback, &lambda);\n\n///\n\n/// unsafe {\n\n/// let fun: &unsafe extern \"C\" fn(u64, u64) -> u64\n\n/// = mem::transmute(closure.code_ptr());\n\n///\n\n/// assert_eq!(11, fun(5, 6));\n\n/// assert_eq!(12, fun(5, 7));\n\n/// }\n\n/// ```\n\n#[derive(Debug)]\n", "file_path": "src/middle/mod.rs", "rank": 75, "score": 12.451380233658844 }, { "content": " fn clone(&self) -> Self {\n\n let mut copy = Cif {\n\n cif: self.cif,\n\n args: self.args.clone(),\n\n result: self.result.clone(),\n\n };\n\n\n\n copy.cif.arg_types = copy.args.as_raw_ptr();\n\n copy.cif.rtype = copy.result.as_raw_ptr();\n\n\n\n copy\n\n }\n\n}\n\n\n\nimpl Cif {\n\n /// Creates a new CIF for the given argument and result types.\n\n ///\n\n /// Takes ownership of the argument and result\n\n /// [`Type`](types/struct.Type.html)s, because the resulting\n\n /// `Cif` retains references to them.\n", "file_path": "src/middle/mod.rs", "rank": 76, "score": 12.30285311117094 }, { "content": " let closure\n\n = middle::Closure::new(cif.untyped,\n\n callback,\n\n userdata);\n\n $closure {\n\n untyped: closure,\n\n _marker: PhantomData,\n\n }\n\n }\n\n }\n\n\n\n impl<'a, $( $T: Copy, )* R> $closure<'a, $( $T, )* R> {\n\n /// Constructs a typed closure callable from C from a CIF\n\n /// describing the calling convention for the resulting\n\n /// function and the Rust closure to call.\n\n pub fn new_with_cif<Callback>(cif: $cif<$( $T, )* R>,\n\n callback: &'a Callback) -> Self\n\n where Callback: Fn($( $T, )*) -> R + 'a\n\n {\n\n Self::from_parts(cif,\n", "file_path": "src/high/mod.rs", "rank": 77, "score": 10.868851366912338 }, { "content": "///\n\n/// - `cif` — the CIF to initialize\n\n/// - `abi` — the calling convention to use\n\n/// - `nfixedargs` — the number of fixed arguments\n\n/// - `ntotalargs` — the total number of arguments, including fixed and\n\n/// var args\n\n/// - `rtype` — the result type\n\n/// - `atypes` — the argument types (length must be at least `nargs`)\n\n///\n\n/// # Result\n\n///\n\n/// `Ok(())` for success or `Err(e)` for failure.\n\n///\n\npub unsafe fn prep_cif_var(cif: *mut ffi_cif,\n\n abi: ffi_abi,\n\n nfixedargs: usize,\n\n ntotalargs: usize,\n\n rtype: *mut ffi_type,\n\n atypes: *mut *mut ffi_type)\n\n -> Result<()>\n", "file_path": "src/low.rs", "rank": 78, "score": 10.770316857629197 }, { "content": " unsafe extern \"C\" fn callback(_cif: &low::ffi_cif,\n\n result: &mut u64,\n\n args: *const *const c_void,\n\n userdata: &u64)\n\n {\n\n let args: *const &u64 = mem::transmute(args);\n\n *result = **args + *userdata;\n\n }\n\n\n\n #[test]\n\n fn rust_lambda() {\n\n let cif = Cif::new(vec![Type::u64(), Type::u64()].into_iter(),\n\n Type::u64());\n\n let env = |x: u64, y: u64| x + y;\n\n let closure = Closure::new(cif, callback2, &env);\n\n\n\n unsafe {\n\n let fun: &unsafe extern \"C\" fn (u64, u64) -> u64\n\n = mem::transmute(closure.code_ptr());\n\n\n", "file_path": "src/middle/mod.rs", "rank": 79, "score": 10.539289424395207 }, { "content": "///\n\n/// assert!((result - 5f32).abs() < 0.0001);\n\n/// ```\n\npub unsafe fn call<R: super::CType>(fun: CodePtr, args: &[Arg]) -> R {\n\n let types = args.into_iter().map(|arg| arg.type_.clone());\n\n let cif = middle::Cif::new(types, R::reify().into_middle());\n\n\n\n let values = args.into_iter().map(|arg| arg.value.clone())\n\n .collect::<Vec<_>>();\n\n cif.call(fun, &values)\n\n}\n\n\n\n/// Performs a dynamic call to a C function.\n\n///\n\n/// This macro provides sugar for `call::arg` and `call::call`. For more\n\n/// control, see [`high::call::call`](high/call/fn.call.html).\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n", "file_path": "src/high/call.rs", "rank": 80, "score": 10.468375646770209 }, { "content": "\n\n assert_eq!(self.cif.nargs as usize, args.len(),\n\n \"Cif::call: passed wrong number of arguments\");\n\n\n\n low::call::<R>(&self.cif as *const _ as *mut _,\n\n fun,\n\n mem::transmute::<*const Arg,\n\n *mut *mut c_void>(args.as_ptr()))\n\n }\n\n\n\n /// Sets the CIF to use the given calling convention.\n\n pub fn set_abi(&mut self, abi: FfiAbi) {\n\n self.cif.abi = abi;\n\n }\n\n\n\n /// Gets a raw pointer to the underlying\n\n /// [`ffi_cif`](../low/struct.ffi_cif.html).\n\n ///\n\n /// This can be used for passing a `middle::Cif` to functions from the\n\n /// [`low`](../low/index.html) and [`raw`](../raw/index.html) modules.\n", "file_path": "src/middle/mod.rs", "rank": 81, "score": 10.271203862179842 }, { "content": "//! A low-level wrapping of libffi, this layer makes no attempts at safety,\n\n//! but tries to provide a somewhat more idiomatic interface.\n\n//!\n\n//! This module also re-exports types and constants necessary for using the\n\n//! library, so it should not be generally necessary to use the `raw` module.\n\n//! While this is a bit “Rustier” than [`raw`](../raw/index.html), I’ve\n\n//! avoided drastic renaming in favor of hewing close to the libffi API.\n\n//! See [`middle`](../middle/index.html) for an easier-to-use approach.\n\n\n\nuse std::mem;\n\nuse std::os::raw::{c_void, c_uint};\n\n\n\nuse raw;\n\n\n\n/// The two kinds of errors reported by libffi.\n\n#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub enum Error {\n\n /// Given a bad or unsupported type representation.\n\n Typedef,\n\n /// Given a bad or unsupported ABI.\n\n Abi,\n\n}\n\n\n\n/// The `Result` type specialized for libffi `Error`s.\n\npub type Result<T> = ::std::result::Result<T, Error>;\n\n\n\n// Converts the raw status type to a `Result`.\n", "file_path": "src/low.rs", "rank": 82, "score": 9.896137980979788 }, { "content": " /// function and the Rust closure to call.\n\n pub fn new_with_cif<Callback>(cif: $cif<$( $T, )* R>,\n\n callback: &'a mut Callback)\n\n -> Self\n\n where Callback: FnMut($( $T, )*) -> R + 'a\n\n {\n\n Self::from_parts(cif,\n\n Self::static_callback,\n\n callback)\n\n }\n\n\n\n #[allow(non_snake_case)]\n\n extern \"C\" fn static_callback<Callback>\n\n (_cif: &::low::ffi_cif,\n\n result: &mut R,\n\n &($( &$T, )*):\n\n &($( &$T, )*),\n\n userdata: &mut Callback)\n\n where Callback: FnMut($( $T, )*) -> R + 'a\n\n {\n", "file_path": "src/high/mod.rs", "rank": 83, "score": 9.316983795854705 }, { "content": "/// args: *const *const c_void,\n\n/// userdata: &u64)\n\n/// {\n\n/// let args: *const &u64 = mem::transmute(args);\n\n/// *result = **args + *userdata;\n\n/// }\n\n///\n\n/// fn twice(f: extern \"C\" fn(u64) -> u64, x: u64) -> u64 {\n\n/// f(f(x))\n\n/// }\n\n///\n\n/// unsafe {\n\n/// let mut cif: ffi_cif = Default::default();\n\n/// let mut args = [&mut types::uint64 as *mut _];\n\n/// let mut userdata: u64 = 5;\n\n///\n\n/// prep_cif(&mut cif, ffi_abi_FFI_DEFAULT_ABI, 1, &mut types::uint64,\n\n/// args.as_mut_ptr()).unwrap();\n\n///\n\n/// let (closure, code) = closure_alloc();\n", "file_path": "src/low.rs", "rank": 84, "score": 9.270962318805479 }, { "content": "//! Middle layer providing a somewhat safer (but still quite unsafe)\n\n//! API.\n\n//!\n\n//! The main idea of the middle layer is to wrap types\n\n//! [`ffi_cif`](../raw/struct.ffi_cif.html) and\n\n//! [`ffi_closure`](../raw/struct.ffi_closure.html) as\n\n//! [`Cif`](struct.Cif.html) and [`Closure`](struct.Closure.html),\n\n//! respectively, so that their resources are managed properly. However,\n\n//! calling a function via a CIF or closure is still unsafe because\n\n//! argument types aren’t checked. See the [`high`](../high/index.html)\n\n//! layer for closures with type-checked arguments.\n\n\n\nuse std::any::Any;\n\nuse std::os::raw::c_void;\n\nuse std::marker::PhantomData;\n\n\n\nuse low;\n\npub use low::{Callback, CallbackMut, CodePtr,\n\n ffi_abi as FfiAbi, ffi_abi_FFI_DEFAULT_ABI};\n\n\n", "file_path": "src/middle/mod.rs", "rank": 85, "score": 8.895019144908149 }, { "content": " io::stderr().write(b\"FnOnce closure already used\");\n\n process::exit(2);\n\n }\n\n }\n\n }\n\n\n\n impl<$( $T, )* R> $closure_once<$( $T, )* R> {\n\n /// Gets the C code pointer that is used to invoke the\n\n /// closure.\n\n pub fn code_ptr(&self) -> &extern \"C\" fn($( $T, )*) -> R {\n\n unsafe {\n\n mem::transmute(self.untyped.code_ptr())\n\n }\n\n }\n\n\n\n /// Constructs a one-shot closure callable from C from a CIF\n\n /// describing the calling convention for the resulting\n\n /// function, a callback for the function to call, and\n\n /// userdata to pass to the callback.\n\n pub fn from_parts<U: Any>(\n", "file_path": "src/high/mod.rs", "rank": 86, "score": 8.585727324354234 }, { "content": "\n\n /// The type of function called from a mutable, typed closure.\n\n pub type $callback_mut<U, $( $T, )* R>\n\n = extern \"C\" fn(cif: &::low::ffi_cif,\n\n result: &mut R,\n\n args: &($( &$T, )*),\n\n userdata: &mut U);\n\n\n\n /// A mutable, typed closure with the given argument and\n\n /// result types.\n\n pub struct $closure_mut<'a, $( $T, )* R> {\n\n untyped: middle::Closure<'a>,\n\n _marker: PhantomData<fn($( $T, )*) -> R>,\n\n }\n\n\n\n impl<'a, $($T: CType,)* R: CType>\n\n $closure_mut<'a, $($T,)* R>\n\n {\n\n /// Constructs a typed closure callable from C from a\n\n /// Rust closure.\n", "file_path": "src/high/mod.rs", "rank": 87, "score": 8.366027271132435 }, { "content": " cif: $cif<$( $T, )* R>,\n\n callback: $callback_once<U, $( $T, )* R>,\n\n userdata: U)\n\n -> Self\n\n {\n\n let callback: middle::CallbackOnce<U, R>\n\n = unsafe { mem::transmute(callback) };\n\n let closure\n\n = middle::ClosureOnce::new(cif.untyped,\n\n callback,\n\n userdata);\n\n $closure_once {\n\n untyped: closure,\n\n _marker: PhantomData,\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub use self::$module::*;\n", "file_path": "src/high/mod.rs", "rank": 88, "score": 8.291594041758334 }, { "content": "/// f(f(x))\n\n/// }\n\n///\n\n/// unsafe {\n\n/// let mut cif: ffi_cif = Default::default();\n\n/// let mut args = [&mut types::uint64 as *mut _];\n\n/// let mut userdata: u64 = 5;\n\n///\n\n/// prep_cif(&mut cif, ffi_abi_FFI_DEFAULT_ABI, 1, &mut types::uint64,\n\n/// args.as_mut_ptr()).unwrap();\n\n///\n\n/// let (closure, code) = closure_alloc();\n\n/// let add5: extern \"C\" fn(u64) -> u64 = mem::transmute(code);\n\n///\n\n/// prep_closure_mut(closure,\n\n/// &mut cif,\n\n/// callback,\n\n/// &mut userdata,\n\n/// CodePtr(add5 as *mut _)).unwrap();\n\n///\n", "file_path": "src/low.rs", "rank": 89, "score": 8.08369986756736 }, { "content": "/// arguments: a CIF describing its arguments, a pointer for where to\n\n/// store its result, a pointer to an array of pointers to its\n\n/// arguments, and a userdata pointer. In this ase, the Rust closure\n\n/// value `lambda` is passed as userdata to `lambda_callback`, which\n\n/// then invokes it.\n\n///\n\n/// ```\n\n/// use std::mem;\n\n/// use std::os::raw::c_void;\n\n///\n\n/// use libffi::middle::*;\n\n/// use libffi::low;\n\n///\n\n/// unsafe extern \"C\" fn lambda_callback<F: Fn(u64, u64) -> u64>(\n\n/// _cif: &low::ffi_cif,\n\n/// result: &mut u64,\n\n/// args: *const *const c_void,\n\n/// userdata: &F)\n\n/// {\n\n/// let args: *const &u64 = mem::transmute(args);\n", "file_path": "src/middle/mod.rs", "rank": 90, "score": 7.779832403606737 }, { "content": "\n\n extern \"C\" fn add_it(n: i64, m: i64) -> i64 {\n\n return n + m;\n\n }\n\n\n\n #[test]\n\n fn closure() {\n\n let cif = Cif::new(vec![Type::u64()].into_iter(), Type::u64());\n\n let env: u64 = 5;\n\n let closure = Closure::new(cif, callback, &env);\n\n\n\n unsafe {\n\n let fun: &unsafe extern \"C\" fn(u64) -> u64\n\n = mem::transmute(closure.code_ptr());\n\n\n\n assert_eq!(11, fun(6));\n\n assert_eq!(12, fun(7));\n\n }\n\n }\n\n\n", "file_path": "src/middle/mod.rs", "rank": 91, "score": 7.767331852722146 }, { "content": "/// For atomic types this tag doesn’t matter because libffi predeclares\n\n/// [an instance of each one](types/index.html). However, for composite\n\n/// types (structs and complex numbers), we need to create a new\n\n/// instance of the `ffi_type` struct. In particular, the `type_` field\n\n/// contains a value that indicates what kind of type is represented,\n\n/// and we use these values to indicate that that we are describing a\n\n/// struct or complex type.\n\n///\n\n/// # Examples\n\n///\n\n/// Suppose we have the following C struct:\n\n///\n\n/// ```c\n\n/// struct my_struct {\n\n/// uint16_t f1;\n\n/// uint64_t f2;\n\n/// };\n\n/// ```\n\n///\n\n/// To pass it by value to a C function we can construct an\n", "file_path": "src/low.rs", "rank": 92, "score": 7.718653697424809 }, { "content": " // which is why we hold onto them here.\n\n Cif {\n\n cif: cif,\n\n args: args,\n\n result: result,\n\n }\n\n }\n\n\n\n /// Calls a function with the given arguments.\n\n ///\n\n /// In particular, this method invokes function `fun` passing it\n\n /// arguments `args`, and returns the result.\n\n ///\n\n /// # Safety\n\n ///\n\n /// There is no checking that the calling convention and types\n\n /// in the `Cif` match the actual calling convention and types of\n\n /// `fun`, nor that they match the types of `args`.\n\n pub unsafe fn call<R>(&self, fun: CodePtr, args: &[Arg]) -> R {\n\n use std::mem;\n", "file_path": "src/middle/mod.rs", "rank": 93, "score": 7.539224649542712 }, { "content": " /// Initializes a code pointer from a function pointer.\n\n ///\n\n /// This is useful mainly for talking to C APIs that take untyped\n\n /// callbacks specified in the API as having type `void(*)()`.\n\n pub fn from_fun(fun: unsafe extern \"C\" fn()) -> Self {\n\n CodePtr(fun as *mut c_void)\n\n }\n\n\n\n /// Initializes a code pointer from a void pointer.\n\n ///\n\n /// This is the other common type used in APIs (or at least in\n\n /// libffi) for untyped callback arguments.\n\n pub fn from_ptr(fun: *const c_void) -> Self {\n\n CodePtr(fun as *mut c_void)\n\n }\n\n\n\n /// Gets the code pointer typed as a C function pointer.\n\n ///\n\n /// This is useful mainly for talking to C APIs that take untyped\n\n /// callbacks specified in the API as having type `void(*)()`.\n", "file_path": "src/low.rs", "rank": 94, "score": 7.496804612788159 }, { "content": " ffi_type_uint8 as uint8,\n\n ffi_type_sint8 as sint8,\n\n ffi_type_uint16 as uint16,\n\n ffi_type_sint16 as sint16,\n\n ffi_type_uint32 as uint32,\n\n ffi_type_sint32 as sint32,\n\n ffi_type_uint64 as uint64,\n\n ffi_type_sint64 as sint64,\n\n ffi_type_float as float,\n\n ffi_type_double as double,\n\n ffi_type_pointer as pointer,\n\n ffi_type_longdouble as longdouble};\n\n #[cfg(feature = \"complex\")]\n\n pub use raw::{ffi_type_complex_float as complex_float,\n\n ffi_type_complex_double as complex_double,\n\n ffi_type_complex_longdouble as complex_longdouble};\n\n}\n\n\n\n/// Type tags used in constructing and inspecting `ffi_type`s.\n\n///\n", "file_path": "src/low.rs", "rank": 95, "score": 7.4351171125150115 }, { "content": "pub struct Closure<'a> {\n\n _cif: Box<Cif>,\n\n alloc: *mut ::low::ffi_closure,\n\n code: CodePtr,\n\n _marker: PhantomData<&'a ()>,\n\n}\n\n\n\nimpl<'a> Drop for Closure<'a> {\n\n fn drop(&mut self) {\n\n unsafe {\n\n low::closure_free(self.alloc);\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Closure<'a> {\n\n /// Creates a new closure with immutable userdata.\n\n ///\n\n /// # Arguments\n\n ///\n", "file_path": "src/middle/mod.rs", "rank": 96, "score": 7.279891005188377 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl ClosureOnce {\n\n /// Creates a new closure with owned userdata.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// - `cif` — describes the calling convention and argument and\n\n /// result types\n\n /// - `callback` — the function to call when the closure is invoked\n\n /// - `userdata` — the value to pass to `callback` along with the\n\n /// arguments when the closure is called\n\n ///\n\n /// # Result\n\n ///\n\n /// The new closure.\n\n pub fn new<U: Any, R>(cif: Cif,\n\n callback: CallbackOnce<U, R>,\n", "file_path": "src/middle/mod.rs", "rank": 97, "score": 7.236251332181995 }, { "content": "\n\n impl<'a, $( $T, )* R> $closure<'a, $( $T, )* R> {\n\n /// Gets the C code pointer that is used to invoke the\n\n /// closure.\n\n pub fn code_ptr(&self) -> &extern \"C\" fn($( $T, )*) -> R {\n\n unsafe {\n\n mem::transmute(self.untyped.code_ptr())\n\n }\n\n }\n\n\n\n /// Constructs a typed closure callable from C from a CIF\n\n /// describing the calling convention for the resulting\n\n /// function, a callback for the function to call, and\n\n /// userdata to pass to the callback.\n\n pub fn from_parts<U>(cif: $cif<$( $T, )* R>,\n\n callback: $callback<U, $( $T, )* R>,\n\n userdata: &'a U) -> Self\n\n {\n\n let callback: middle::Callback<U, R>\n\n = unsafe { mem::transmute(callback) };\n", "file_path": "src/high/mod.rs", "rank": 98, "score": 7.201447663396349 }, { "content": " let status = raw::ffi_prep_cif(cif, abi,\n\n nargs as c_uint,\n\n rtype, atypes);\n\n status_to_result(status, ())\n\n}\n\n\n\n/// Initalizes a CIF (Call Interface) for a varargs function.\n\n///\n\n/// We need to initialize a CIF before we can use it to call a function\n\n/// or create a closure. This function lets us specify the calling\n\n/// convention to use and the argument and result types. For non-varargs\n\n/// CIF initialization, see [`prep_cif`](fn.prep_cif.html).\n\n///\n\n/// # Safety\n\n///\n\n/// The CIF `cif` retains references to `rtype` and `atypes`, so if\n\n/// they are no longer live when the CIF is used then the behavior is\n\n/// undefined.\n\n///\n\n/// # Arguments\n", "file_path": "src/low.rs", "rank": 99, "score": 7.132610268955979 } ]
Rust
baustelle/src/containerfile.rs
akhramov/knast
8c9f5f481a467a22c7cc47f11a28fe52536f9950
use std::{convert::TryFrom, fs, io::Read, path::PathBuf}; use anyhow::{Context, Error}; use dockerfile_parser::{ Dockerfile as Containerfile, FromInstruction, Instruction::{self, *}, }; use futures::{ channel::mpsc::{unbounded, SendError, UnboundedSender}, future::{self, Future}, stream::Stream, SinkExt, TryFutureExt, }; use uuid::Uuid; use registratur::v2::{ client::Client, domain::{config::Config, manifest::Manifest}, }; use crate::{ fetcher::{Fetcher, LayerDownloadStatus}, runtime_config::RuntimeConfig, storage::{Storage, StorageEngine, BLOBS_STORAGE_KEY}, unpacker::Unpacker, }; #[derive(Clone, Debug)] pub enum EvaluationUpdate { From(LayerDownloadStatus), } pub struct Builder<'a, T: StorageEngine> { fetcher: Fetcher<'a, T>, storage: &'a Storage<T>, container_folder: PathBuf, } impl<'a, T: StorageEngine> Builder<'a, T> { #[fehler::throws] pub fn new( registry_url: &'a str, architecture: String, os: Vec<String>, storage: &'a Storage<T>, ) -> Self { let client = Client::build(registry_url)?; let fetcher = Fetcher::new(storage, client, architecture, os); let container_uuid = format!("{}", Uuid::new_v4()); let container_folder = storage.folder().join("containers").join(&container_uuid); fs::create_dir_all(&container_folder)?; Self { fetcher, container_folder, storage, } } #[fehler::throws] pub fn interpret( &self, file: impl Read, ) -> ( impl Stream<Item = EvaluationUpdate>, impl Future<Output = Result<PathBuf, Error>> + '_, ) { let (sender, receiver) = unbounded(); let containerfile = Containerfile::from_reader(file)?; let result = containerfile.iter_stages().flat_map(|stage| { stage.instructions.into_iter().map(|instruction| { self.execute_instruction(instruction.clone(), sender.clone()) }) }); let folder = self.container_folder.clone(); let completion_future = future::try_join_all(result).and_then(|_| future::ok(folder)); (receiver, completion_future) } #[fehler::throws] async fn execute_instruction( &self, instruction: Instruction, sender: UnboundedSender<EvaluationUpdate>, ) { match instruction { From(instruction) => { self.execute_from_instruction(instruction, sender).await?; } _ => { log::warn!( "Unhandled containerfile instruction {:?}", instruction ) } } } #[fehler::throws] async fn execute_from_instruction( &self, instruction: FromInstruction, sender: UnboundedSender<EvaluationUpdate>, ) { let image = &instruction.image_parsed; let sender = sender.with(|val| { future::ok::<_, SendError>(EvaluationUpdate::From(val)) }); let default_tag = String::from("latest"); let tag = image.tag.as_ref().unwrap_or(&default_tag); let digest = self.fetcher.fetch(&image.image, &tag, sender).await?; let manifest: Manifest = self.storage.get(BLOBS_STORAGE_KEY, &digest)?.context( "Fetched manifest was not found. Possible storage corruption", )?; let config: Config = self .storage .get(BLOBS_STORAGE_KEY, manifest.config.digest)? .context( "Fetched config was not found. Possible storage corruption", )?; let destination = self.container_folder.join("rootfs"); let unpacker = Unpacker::new(&self.storage, &destination); unpacker.unpack(digest)?; let runtime_config = RuntimeConfig::try_from((config, destination.as_path()))?; serde_json::to_writer( fs::File::create(&self.container_folder.join("config.json"))?, &runtime_config, )?; } } #[cfg(test)] mod tests { use futures::StreamExt; use super::*; use crate::storage::TestStorage as Storage; #[tokio::test] async fn test_interpretation() { #[cfg(feature = "integration_testing")] let (url, _mocks) = ("https://registry-1.docker.io", ()); #[cfg(not(feature = "integration_testing"))] let (url, _mocks) = test_helpers::mock_server!("unix.yml"); let tempdir = tempfile::tempdir().expect("Failed to create a tempdir"); let storage = Storage::new(tempdir.path()).expect("Unable to initialize cache"); let builder = Builder::new(&url, "amd64".into(), vec!["linux".into()], &storage) .expect("failed to initialize the builder"); let containerfile = test_helpers::fixture!("containerfile"); let (updates, complete_future) = builder.interpret(containerfile.as_bytes()).unwrap(); let (_, result) = future::join(updates.collect::<Vec<_>>(), complete_future).await; let container_folder = result.expect("Unable to enterpret containerfile"); assert!(container_folder.join("rootfs/etc/passwd").exists()); let file = fs::File::open(container_folder.join("config.json")) .expect("Failed to open OCI runtime config file"); let config: RuntimeConfig = serde_json::from_reader(file) .expect("Failed to parse OCI runtime config file"); let command = config.process.unwrap().args.unwrap().join(" "); assert_eq!(command, "nginx -g daemon off;"); } }
use std::{convert::TryFrom, fs, io::Read, path::PathBuf}; use anyhow::{Context, Error}; use dockerfile_parser::{ Dockerfile as Containerfile, FromInstruction, Instruction::{self, *}, }; use futures::{ channel::mpsc::{unbounded, SendError, UnboundedSender}, future::{self, Future}, stream::Stream, SinkExt, TryFutureExt, }; use uuid::Uuid; use registratur::v2::{ client::Client, domain::{config::Config, manifest::Manifest}, }; use crate::{ fetcher::{Fetcher, LayerDownloadStatus}, runtime_config::RuntimeConfig, storage::{Storage, StorageEngine, BLOBS_STORAGE_KEY}, unpacker::Unpacker, }; #[derive(Clone, Debug)] pub enum EvaluationUpdate { From(LayerDownloadStatus), } pub struct Builder<'a, T: StorageEngine> { fetcher: Fetcher<'a, T>, storage: &'a Storage<T>, container_folder: PathBuf, } impl<'a, T: StorageEngine> Builder<'a, T> { #[fehler::throws] pub fn new( registry_url: &'a str, architecture: String, os: Vec<String>, storage: &'a Storage<T>, ) -> Self { let client = Client::build(registry_url)?; let fetcher = Fetcher::new(storage, client, architecture, os); let container_uuid = format!("{}", Uuid::new_v4()); let container_folder = storage.folder().join("containers").join(&container_uuid); fs::create_dir_all(&container_folder)?; Self { fetcher, container_folder, storage, } } #[fehler::throws] pub fn interpret( &self, file: impl Read, ) -> ( impl Stream<Item = EvaluationUpdate>, impl Future<Output = Result<PathBuf, Error>> + '_, ) { let (sender, receiver) = unbounded(); let containerfile = Containerfile::from_reader(file)?; let result = containerfile.iter_stages().flat_map(|stage| { stage.instructions.into_iter().map(|instruction| { self.execute_instruction(instruction.clone(), sender.clone()) }) }); let folder = self.container_folder.clone(); let completion_future = future::try_join_all(result).and_then(|_| future::ok(folder)); (receiver, completion_future) } #[fehler::throws] async fn execute_instruction( &self, instruction: Instruction, se
#[fehler::throws] async fn execute_from_instruction( &self, instruction: FromInstruction, sender: UnboundedSender<EvaluationUpdate>, ) { let image = &instruction.image_parsed; let sender = sender.with(|val| { future::ok::<_, SendError>(EvaluationUpdate::From(val)) }); let default_tag = String::from("latest"); let tag = image.tag.as_ref().unwrap_or(&default_tag); let digest = self.fetcher.fetch(&image.image, &tag, sender).await?; let manifest: Manifest = self.storage.get(BLOBS_STORAGE_KEY, &digest)?.context( "Fetched manifest was not found. Possible storage corruption", )?; let config: Config = self .storage .get(BLOBS_STORAGE_KEY, manifest.config.digest)? .context( "Fetched config was not found. Possible storage corruption", )?; let destination = self.container_folder.join("rootfs"); let unpacker = Unpacker::new(&self.storage, &destination); unpacker.unpack(digest)?; let runtime_config = RuntimeConfig::try_from((config, destination.as_path()))?; serde_json::to_writer( fs::File::create(&self.container_folder.join("config.json"))?, &runtime_config, )?; } } #[cfg(test)] mod tests { use futures::StreamExt; use super::*; use crate::storage::TestStorage as Storage; #[tokio::test] async fn test_interpretation() { #[cfg(feature = "integration_testing")] let (url, _mocks) = ("https://registry-1.docker.io", ()); #[cfg(not(feature = "integration_testing"))] let (url, _mocks) = test_helpers::mock_server!("unix.yml"); let tempdir = tempfile::tempdir().expect("Failed to create a tempdir"); let storage = Storage::new(tempdir.path()).expect("Unable to initialize cache"); let builder = Builder::new(&url, "amd64".into(), vec!["linux".into()], &storage) .expect("failed to initialize the builder"); let containerfile = test_helpers::fixture!("containerfile"); let (updates, complete_future) = builder.interpret(containerfile.as_bytes()).unwrap(); let (_, result) = future::join(updates.collect::<Vec<_>>(), complete_future).await; let container_folder = result.expect("Unable to enterpret containerfile"); assert!(container_folder.join("rootfs/etc/passwd").exists()); let file = fs::File::open(container_folder.join("config.json")) .expect("Failed to open OCI runtime config file"); let config: RuntimeConfig = serde_json::from_reader(file) .expect("Failed to parse OCI runtime config file"); let command = config.process.unwrap().args.unwrap().join(" "); assert_eq!(command, "nginx -g daemon off;"); } }
nder: UnboundedSender<EvaluationUpdate>, ) { match instruction { From(instruction) => { self.execute_from_instruction(instruction, sender).await?; } _ => { log::warn!( "Unhandled containerfile instruction {:?}", instruction ) } } }
function_block-function_prefixed
[ { "content": "#[fehler::throws]\n\npub fn teardown(storage: &Storage<impl StorageEngine>, key: impl AsRef<str>) {\n\n let cache: ContainerAddressStorage = storage\n\n .get(NETWORK_STATE_STORAGE_KEY, CONTAINER_ADDRESS_STORAGE_KEY)?\n\n .ok_or_else(|| anyhow::anyhow!(\"Failed to read network state data\"))?;\n\n let key: String = key.as_ref().into();\n\n let (iface, host, container) = cache\n\n .get(&key)\n\n .ok_or_else(|| anyhow::anyhow!(\"Failed to read network state data\"))?;\n\n Interface::new(iface)?.destroy()?;\n\n release_addresses(storage, key)?;\n\n free_address(&storage, *host)?;\n\n free_address(&storage, *container)?;\n\n}\n\n\n", "file_path": "libknast/src/operations/network.rs", "rank": 0, "score": 220127.50796174433 }, { "content": "fn string(input: &str) -> IResult<&str, &str> {\n\n take_while(|c| c != QUOTE)(input)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #[test]\n\n fn test_parsing() {\n\n let header = test_helpers::fixture!(\"www_authenticate\");\n\n let parsed_header = super::WwwAuthenticate::parse(header)\n\n .expect(\"Failed to parse WwwAuthenticate header\");\n\n\n\n assert_eq!(parsed_header.realm, \"https://auth.docker.io/token\");\n\n assert_eq!(parsed_header.service, \"registry.docker.io\");\n\n assert_eq!(parsed_header.scope, \"repository:library/nginx:pull\");\n\n }\n\n}\n", "file_path": "registratur/src/v2/client/www_authenticate.rs", "rank": 1, "score": 218171.5377712545 }, { "content": "fn client() -> Result<TaskClient, Error> {\n\n use nix::sys::socket::*;\n\n\n\n let socket = socket(\n\n AddressFamily::Unix,\n\n SockType::Stream,\n\n SockFlag::empty(),\n\n None,\n\n )?;\n\n\n\n let sockaddr = UnixAddr::new(server_address()?.path().as_bytes())?;\n\n let sockaddr = SockAddr::Unix(sockaddr);\n\n\n\n let base: u32 = 2;\n\n let mut attempt: u32 = 1;\n\n let mut result;\n\n loop {\n\n result = connect(socket, &sockaddr);\n\n\n\n if result.is_err() && attempt < CONNECTION_RETRY_ATTEMPTS {\n", "file_path": "containerd-shim/src/main.rs", "rank": 2, "score": 216720.86010197742 }, { "content": "pub fn term(input: &str) -> IResult<&str, &str> {\n\n delimited(preceded(string, char(QUOTE)), string, char(QUOTE))(input)\n\n}\n\n\n", "file_path": "registratur/src/v2/client/www_authenticate.rs", "rank": 3, "score": 210262.29967909065 }, { "content": "#[fehler::throws]\n\npub fn mask(range: impl AsRef<str>) -> Ipv4Addr {\n\n Ipv4Network::try_from(range.as_ref())?.mask()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_range() {\n\n let mut result = range(\"172.24.0.2/16\").unwrap();\n\n\n\n assert_eq!(result.len(), 256 * 256);\n\n assert_eq!(\"172.24.255.255\", result.pop().unwrap().to_string());\n\n assert_eq!(\"172.24.255.254\", result.pop().unwrap().to_string());\n\n }\n\n\n\n #[test]\n\n fn test_broadcast() {\n\n let result = broadcast(\"172.24.0.2/16\").unwrap();\n", "file_path": "netzwerk/src/range.rs", "rank": 4, "score": 183750.1319038056 }, { "content": "#[fehler::throws]\n\npub fn broadcast(range: impl AsRef<str>) -> Ipv4Addr {\n\n Ipv4Network::try_from(range.as_ref())?.broadcast()\n\n}\n\n\n", "file_path": "netzwerk/src/range.rs", "rank": 5, "score": 183750.1319038056 }, { "content": "fn normalize_image_name(image: &str) -> String {\n\n let prefix = if image.contains('/') { \"\" } else { \"library/\" };\n\n\n\n format!(\"{}{}\", prefix, image)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use futures::stream::StreamExt;\n\n\n\n use super::*;\n\n use crate::storage::TestStorage as Storage;\n\n\n\n macro_rules! setup_client {\n\n ($var:ident, $fetcher:ident, $dir:ident) => {\n\n #[cfg(feature = \"integration_testing\")]\n\n let (url, _mocks) = (\"https://registry-1.docker.io\", ());\n\n #[cfg(not(feature = \"integration_testing\"))]\n\n let (url, _mocks) = test_helpers::mock_server!(\"basic.yml\");\n\n\n", "file_path": "baustelle/src/fetcher.rs", "rank": 6, "score": 181720.64156592236 }, { "content": "fn server() -> Result<(Server, Receiver<()>), Error> {\n\n let (sender, shutdown_notification) = mpsc::sync_channel(1);\n\n let nat_interface =\n\n std::env::var(\"NAT_INTERFACE\").unwrap_or_else(|_| \"lagg0\".into());\n\n let service = protocols::shim_ttrpc::create_task(TaskService::new(\n\n storage(),\n\n sender,\n\n nat_interface,\n\n ));\n\n tracing::info!(\"Initializing server\");\n\n let address = server_address()?;\n\n if let Err(error) = remove_file(address.path()) {\n\n tracing::info!(\"Previous socket wasn't deleted due to {}\", error)\n\n };\n\n let server = Server::new()\n\n .bind(address.as_str())?\n\n .register_service(service);\n\n\n\n Ok((server, shutdown_notification))\n\n}\n\n\n", "file_path": "containerd-shim/src/main.rs", "rank": 7, "score": 178987.37976962418 }, { "content": "fn error_response(err: impl ToString) -> ttrpc::Error {\n\n ttrpc::Error::RpcStatus(ttrpc::get_status(ttrpc::Code::INTERNAL, err))\n\n}\n\n\n", "file_path": "containerd-shim/src/task_service.rs", "rank": 8, "score": 175257.40610938543 }, { "content": "#[fehler::throws]\n\npub fn range(range: impl AsRef<str>) -> BinaryHeap<Ipv4Addr> {\n\n BinaryHeap::from_iter(&Ipv4Network::try_from(range.as_ref())?)\n\n}\n\n\n", "file_path": "netzwerk/src/range.rs", "rank": 9, "score": 174027.29393972736 }, { "content": "fn parent_process() -> Result<(), Error> {\n\n client().and_then(|client| {\n\n let request = ConnectRequest::new();\n\n Ok(client.connect(\n\n context::with_timeout(CONNECTION_TIMEOUT_NANOS),\n\n &request,\n\n )?)\n\n })?;\n\n\n\n let server_address = server_address()?;\n\n\n\n println!(\"{}\", server_address.as_str());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "containerd-shim/src/main.rs", "rank": 10, "score": 159721.6622201606 }, { "content": "#[fehler::throws]\n\npub fn add_default(address: &str) {\n\n rtmsg(Operation::Add, Some(address))?;\n\n}\n\n\n\n/// Delete default route\n\n///\n\n/// # Examples\n\n/// delete net default\n\n///\n\n/// ```rust,no_run\n\n/// use netzwerk::route;\n\n///\n\n/// route::delete_default()\n\n/// .expect(\"Delete net default failed\");\n\n/// ```\n", "file_path": "netzwerk/src/route.rs", "rank": 11, "score": 153191.32289356564 }, { "content": "fn generate_mounts(os: String) -> Vec<Mount> {\n\n let mut mounts = vec![Mount {\n\n destination: \"/dev\".into(),\n\n r#type: \"devfs\".into(),\n\n source: Some(\"devfs\".into()),\n\n options: None,\n\n }];\n\n\n\n match &os[..] {\n\n \"linux\" => {\n\n mounts.extend(\n\n vec![\n\n Mount {\n\n destination: \"/sys\".into(),\n\n r#type: \"linsysfs\".into(),\n\n source: Some(\"linsysfs\".into()),\n\n options: Some(vec![\n\n \"nosuid\".into(),\n\n \"noexec\".into(),\n\n \"ro\".into(),\n", "file_path": "baustelle/src/runtime_config.rs", "rank": 12, "score": 146888.66824449875 }, { "content": "// TODO: this should be static variable.\n\nfn server_address() -> Result<url::Url, Error> {\n\n let address = url::Url::parse(\"unix:///tmp/knast.sock\")?;\n\n\n\n Ok(address)\n\n}\n", "file_path": "containerd-shim/src/main.rs", "rank": 13, "score": 146490.13047754785 }, { "content": "fn create_interfaces(jail_number: i32) -> Result<String> {\n\n let bridge = Interface::new(\"bridge\")\n\n .expect(\"Failed to create iface socket\")\n\n .create()?\n\n .name(\"knast0\")?;\n\n\n\n let pair_a = Interface::new(\"epair\")?.create()?.address(\n\n \"172.24.0.1\",\n\n \"172.24.0.255\",\n\n \"255.255.255.0\",\n\n )?;\n\n\n\n let name = pair_a.get_name()?;\n\n let len = name.len();\n\n let name_b = &[&name[..len - 1], \"b\"].join(\"\");\n\n\n\n let pair_b =\n\n Interface::new(name_b).expect(\"Failed to create iface socket\");\n\n\n\n pair_b\n\n .vnet(jail_number) /* Transfer interface to the jail */\n\n .expect(\"Failed to move interface to the jail\");\n\n\n\n bridge.bridge_addm(&[name])?;\n\n\n\n Ok(String::from(name_b))\n\n}\n\n\n", "file_path": "netzwerk/examples/bridge_jail.rs", "rank": 14, "score": 143758.24034632227 }, { "content": "#[fehler::throws]\n\npub fn get_address(address: Option<&str>) -> sockaddr_in {\n\n let mut result: sockaddr_in = unsafe { mem::zeroed() };\n\n\n\n result.sin_len = mem::size_of::<sockaddr_in>() as u8;\n\n result.sin_family = AF_INET as u8;\n\n\n\n let address = match address {\n\n Some(add) => add,\n\n None => return result,\n\n };\n\n\n\n match unsafe {\n\n inet_pton(\n\n AF_INET,\n\n [address, \"\\0\"].concat().as_ptr(),\n\n &mut result.sin_addr as *mut _ as *mut c_void,\n\n )\n\n } {\n\n 0 => {\n\n fehler::throw!(anyhow!(\n", "file_path": "netzwerk/src/common_bindings.rs", "rank": 15, "score": 138481.99279235932 }, { "content": "fn find_user_by_uid(rootfs: &Path) -> impl Fn(u32) -> Result<EtcPasswdEntry> {\n\n let path = Path::new(rootfs).join(\"etc/passwd\");\n\n\n\n move |uid| {\n\n find_entry(&path, |user: &EtcPasswdEntry| user.uid == uid).context(\n\n format!(\"User with uid {} was not found in {:?}\", uid, path),\n\n )\n\n }\n\n}\n\n\n\n/// Parses user string to retrieve uid / gid pair\n\n///\n\n/// If user string doesn't contain all required information,\n\n/// then the info is looked up in the container's root\n\n/// filesystem. Namely, in `/etc/passwd` and `/etc/group`\n\n/// files.\n\n///\n\n/// Adhering to Linux specification, these types of user\n\n/// strings are valid: `user`, `uid`, `user:group`,\n\n/// `uid:gid`, `uid:group`, `user:gid`. In practice, docker\n\n/// registry may serve the config with the empty (`\"\"`) user\n\n/// string. This case is to be handled outside the scope of\n\n/// this function.\n\n///\n\n/// ```\n", "file_path": "baustelle/src/runtime_config/user.rs", "rank": 16, "score": 137369.61068597034 }, { "content": "#[fehler::throws]\n\npub fn rtmsg(operation: Operation, address: Option<&str>) {\n\n let socket = Socket::new(PF_ROUTE, SOCK_RAW)?;\n\n\n\n let header: rt_msghdr = unsafe { mem::zeroed() };\n\n\n\n let payload = [\n\n get_address(None)?,\n\n get_address(address)?,\n\n get_address(None)?,\n\n ];\n\n\n\n let mut message = rtmsg { header, payload };\n\n\n\n message.header.rtm_type = operation as _;\n\n message.header.rtm_flags = RTF_UP | RTF_GATEWAY | RTF_STATIC | RTF_PINNED;\n\n message.header.rtm_version = RTM_VERSION;\n\n message.header.rtm_addrs = match operation {\n\n Operation::Add => RTA_DST | RTA_GATEWAY | RTA_NETMASK,\n\n Operation::Delete => RTA_DST | RTA_NETMASK,\n\n };\n", "file_path": "netzwerk/src/route/bindings.rs", "rank": 17, "score": 135691.27541368033 }, { "content": "fn system_time_to_timestamp(time: SystemTime) -> Result<Timestamp, Error> {\n\n let duration = time.duration_since(UNIX_EPOCH)?;\n\n\n\n Ok(Timestamp {\n\n seconds: duration.as_secs().try_into()?,\n\n nanos: duration.subsec_nanos().try_into()?,\n\n ..Default::default()\n\n })\n\n}\n", "file_path": "containerd-shim/src/task_service.rs", "rank": 18, "score": 133896.94655675875 }, { "content": "fn identifier<T>(input: &str) -> IResult<&str, T>\n\nwhere\n\n T: FromStr,\n\n{\n\n map_res(alphanumeric1, FromStr::from_str)(input)\n\n}\n\n\n", "file_path": "baustelle/src/runtime_config/user.rs", "rank": 19, "score": 131985.7555824635 }, { "content": "#[fehler::throws]\n\nfn setup_bridge(storage: &Storage<impl StorageEngine>) -> Interface {\n\n let mut bridge = Interface::new(DEFAULT_BRIDGE)?;\n\n\n\n if !bridge.exists()? {\n\n let bridge_address = get_address(storage)?.to_string();\n\n let broadcast = broadcast(DEFAULT_NETWORK)?.to_string();\n\n let mask = mask(DEFAULT_NETWORK)?.to_string();\n\n\n\n bridge = Interface::new(\"bridge\")?\n\n .create()?\n\n .name(DEFAULT_BRIDGE)?\n\n .address(&bridge_address, &broadcast, &mask)?;\n\n }\n\n\n\n bridge\n\n}\n\n\n", "file_path": "libknast/src/operations/network.rs", "rank": 20, "score": 131302.1865584007 }, { "content": "#[fehler::throws]\n\npub fn apply(path: impl AsRef<Path>, operation: Operation) {\n\n let file = File::open(path.as_ref())?;\n\n let mut rule: DevfsRule = unsafe { mem::zeroed() };\n\n rule.magic = MAGIC;\n\n rule.iacts = DRA_BACTS;\n\n\n\n match operation {\n\n Operation::HideAll => {\n\n rule.bacts = DRB_HIDE;\n\n }\n\n Operation::Unhide(node) => {\n\n rule.bacts = DRB_UNHIDE;\n\n rule.icond = DRC_PATHPTRN;\n\n rule.pathptrn[0..node.len()]\n\n .copy_from_slice(node.as_signed_bytes());\n\n }\n\n }\n\n\n\n if unsafe { ioctl(file.as_raw_fd(), DEVFSIO_RAPPLY, &rule) } < 0 {\n\n fehler::throw!(anyhow!(\n", "file_path": "libknast/src/filesystem/devfs.rs", "rank": 21, "score": 130903.7367670735 }, { "content": "#[fehler::throws]\n\n#[tracing::instrument(err)]\n\nfn get_address(storage: &Storage<impl StorageEngine>) -> Ipv4Addr {\n\n let maybe_heap: Option<BinaryHeap<Ipv4Addr>> =\n\n storage.get(NETWORK_STATE_STORAGE_KEY, DEFAULT_NETWORK.as_bytes())?;\n\n\n\n if let Some(heap) = maybe_heap {\n\n let mut new_heap = heap.clone();\n\n\n\n let mut address = new_heap\n\n .pop()\n\n .ok_or_else(|| anyhow::anyhow!(\"No addresses left\"))?;\n\n if address.is_broadcast() {\n\n address = new_heap\n\n .pop()\n\n .ok_or_else(|| anyhow::anyhow!(\"No addresses left\"))?;\n\n }\n\n\n\n if let Err(_) = storage.compare_and_swap(\n\n NETWORK_STATE_STORAGE_KEY,\n\n DEFAULT_NETWORK.as_bytes(),\n\n Some(heap),\n", "file_path": "libknast/src/operations/network.rs", "rank": 22, "score": 129167.871609739 }, { "content": "#[fehler::throws]\n\npub fn parse(user: String, rootfs: &Path) -> (u32, u32) {\n\n let uid_gid = pair::<u32, u32>;\n\n\n\n let uid_group = map_res(pair, |(uid, group)| -> Result<(u32, u32)> {\n\n Ok((uid, find_group_by_name(rootfs)(group)?.gid))\n\n });\n\n\n\n let username =\n\n map_res(identifier, |username: String| -> Result<(u32, u32)> {\n\n let user = find_user_by_name(rootfs)(username)?;\n\n\n\n Ok((user.uid, user.gid))\n\n });\n\n\n\n let uid = map_res(identifier, |uid: u32| -> Result<(u32, u32)> {\n\n let user = find_user_by_uid(rootfs)(uid)?;\n\n\n\n Ok((user.uid, user.gid))\n\n });\n\n\n", "file_path": "baustelle/src/runtime_config/user.rs", "rank": 23, "score": 128815.69085345273 }, { "content": "#[fehler::throws]\n\nfn free_address(storage: &Storage<impl StorageEngine>, address: Ipv4Addr) {\n\n let maybe_heap: Option<BinaryHeap<Ipv4Addr>> =\n\n storage.get(NETWORK_STATE_STORAGE_KEY, DEFAULT_NETWORK.as_bytes())?;\n\n\n\n if let Some(heap) = maybe_heap {\n\n let mut new_heap = heap.clone();\n\n\n\n new_heap.push(address);\n\n\n\n if let Err(_) = storage.compare_and_swap(\n\n NETWORK_STATE_STORAGE_KEY,\n\n DEFAULT_NETWORK.as_bytes(),\n\n Some(heap),\n\n Some(new_heap),\n\n ) {\n\n free_address(&storage, address)?;\n\n };\n\n } else {\n\n let range = ip_range(DEFAULT_NETWORK)?;\n\n\n\n storage.compare_and_swap(\n\n NETWORK_STATE_STORAGE_KEY,\n\n DEFAULT_NETWORK.as_bytes(),\n\n None,\n\n Some(range),\n\n )?;\n\n free_address(&storage, address)?;\n\n }\n\n}\n\n\n", "file_path": "libknast/src/operations/network.rs", "rank": 24, "score": 125335.5697663397 }, { "content": "fn pair<S, T>(input: &str) -> IResult<&str, (S, T)>\n\nwhere\n\n S: FromStr,\n\n T: FromStr,\n\n{\n\n separated_pair(identifier, tag(\":\"), identifier)(input)\n\n}\n\n\n", "file_path": "baustelle/src/runtime_config/user.rs", "rank": 25, "score": 124599.03700272407 }, { "content": "#[fehler::throws]\n\npub fn rename_interface(socket: &Socket, request: &mut ifreq, name: &str) {\n\n let new_name = [name, \"\\0\"].concat();\n\n request.ifr_ifru.ifru_data = new_name.as_ptr() as *mut _;\n\n\n\n {\n\n if unsafe { ioctl(socket.0, SIOCSIFNAME, request as *mut _) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"rename interface: ioctl(SIOCSIFNAME) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n }\n\n\n\n request.ifr_name[0..new_name.len()]\n\n .copy_from_slice(new_name.as_str().as_signed_bytes());\n\n}\n\n\n", "file_path": "netzwerk/src/interface/operations.rs", "rank": 26, "score": 124066.2207703906 }, { "content": "/// Returns command and container id\n\nfn parse_opts() -> (String, String) {\n\n // Spike: this relies on arguments order.\n\n let mut args = std::env::args().rev();\n\n let command = args.next().expect(\"COMMAND is required\");\n\n\n\n let id = if command == \"start\" {\n\n args.next()\n\n } else {\n\n args.next();\n\n args.next();\n\n args.next()\n\n }\n\n .expect(\"ID is required\");\n\n\n\n (command, id)\n\n}\n\n\n", "file_path": "containerd-shim/src/main.rs", "rank": 27, "score": 119985.85730542333 }, { "content": "fn find_entry<T, F>(rootfs: impl AsRef<Path>, predicate: F) -> Result<T>\n\nwhere\n\n T: DeserializeOwned + 'static,\n\n F: Fn(&T) -> bool,\n\n{\n\n let items = EtcConf::<T>::new(&rootfs)?;\n\n\n\n for maybe_item in items {\n\n if let Ok(item) = maybe_item {\n\n if predicate(&item) {\n\n return Ok(item);\n\n }\n\n }\n\n }\n\n\n\n anyhow::bail!(\"Entry was not found\")\n\n}\n\n\n", "file_path": "baustelle/src/runtime_config/user.rs", "rank": 28, "score": 115784.05333483698 }, { "content": "fn generate_annotations() -> BTreeMap<String, String> {\n\n let mut annotations = BTreeMap::new();\n\n\n\n // TODO: something meaningful, or at least adhere to OCI\n\n // spec :)\n\n annotations.insert(\"io.container.manager\".into(), \"knast\".into());\n\n annotations\n\n .insert(\"org.opencontainers.image.stopSignal\".into(), \"15\".into());\n\n\n\n annotations\n\n}\n\n\n\nimpl From<&Path> for Root {\n\n fn from(rootfs: &Path) -> Self {\n\n Self {\n\n path: rootfs.into(),\n\n readonly: Some(false),\n\n }\n\n }\n\n}\n", "file_path": "baustelle/src/runtime_config.rs", "rank": 29, "score": 112587.71578728002 }, { "content": "#[fehler::throws]\n\npub fn setup(\n\n storage: &Storage<impl StorageEngine>,\n\n key: impl AsRef<str>,\n\n jail: RunningJail,\n\n nat_interface: Option<impl AsRef<str>>,\n\n) {\n\n let bridge = setup_bridge(storage)?;\n\n let host = setup_pair(storage, key, jail)?;\n\n let host_name = host.get_name()?;\n\n\n\n bridge.bridge_addm(&[host_name])?;\n\n\n\n if let Some(nat_interface) = nat_interface {\n\n let nat = Pf::new(nat_interface.as_ref())?;\n\n nat.add(DEFAULT_NETWORK)?;\n\n }\n\n}\n\n\n", "file_path": "libknast/src/operations/network.rs", "rank": 30, "score": 108398.05525576652 }, { "content": "#[fehler::throws]\n\npub fn delete_default() {\n\n rtmsg(Operation::Delete, None)?;\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::process::Command;\n\n\n\n #[test_helpers::jailed_test]\n\n fn test_add_default() {\n\n setup_lo();\n\n add_default(\"127.0.0.1\").expect(\"failed to add default route\");\n\n\n\n let content = routing_tables_content()\n\n .expect(\"(netstat) failed to get routing tables content\");\n\n\n\n assert!(content.contains(\"default 127.0.0.1\"));\n\n }\n\n\n", "file_path": "netzwerk/src/route.rs", "rank": 31, "score": 108398.05525576652 }, { "content": "/// For args, cwd, and mountpoints runtime config specifies\n\n/// paths inside containers Therefore, we need to prefix\n\n/// these paths with the rootfs of the container.\n\npub fn prefixed_destination(\n\n rootfs: impl AsRef<Path>,\n\n destination: impl AsRef<Path>,\n\n) -> PathBuf {\n\n let mut result = rootfs.as_ref().to_owned();\n\n\n\n for component in destination.as_ref().components() {\n\n // Sanitization: we don't want \"..\", \".\" or \"/\" here\n\n if let Component::Normal(component) = component {\n\n result.push(component);\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::process::Command;\n\n\n", "file_path": "libknast/src/filesystem.rs", "rank": 32, "score": 108398.05525576652 }, { "content": "#[fehler::throws]\n\npub fn mount<'a>(\n\n kind: &dyn AsRef<Path>,\n\n source: &dyn AsRef<Path>,\n\n destination: &dyn AsRef<Path>,\n\n options: impl Iterator<Item = &'a dyn AsRef<str>>,\n\n) {\n\n let kind = kind.as_bytes()?;\n\n let source = source.as_bytes()?;\n\n let destination = destination.as_bytes()?;\n\n let options: Vec<_> = options\n\n .flat_map(|option| {\n\n let mut split = option.as_ref().split(\"=\");\n\n let key = [split.next().unwrap_or(\"\").as_bytes(), b\"\\0\"].concat();\n\n let value = split\n\n .next()\n\n .map(|item| [item.as_bytes(), b\"\\0\"].concat())\n\n .unwrap_or(vec![]);\n\n\n\n vec![key, value]\n\n })\n", "file_path": "libknast/src/filesystem/mount.rs", "rank": 33, "score": 105652.42966583006 }, { "content": "fn report_error(archive: *const c_void) -> Error {\n\n let error_string = unsafe {\n\n let string = archive_error_string(archive);\n\n CStr::from_ptr(string)\n\n };\n\n\n\n anyhow!(\"Archiver error: {:?}\", error_string)\n\n}\n", "file_path": "baustelle/src/archive/resource.rs", "rank": 34, "score": 105078.41409686522 }, { "content": "#[fehler::throws]\n\npub fn set_interface_address(\n\n socket: &Socket,\n\n name: &[i8],\n\n address: &str,\n\n broadcast: &str,\n\n mask: &str,\n\n) {\n\n let mut request: ifaliasreq = unsafe { mem::zeroed() };\n\n\n\n request.ifra_name[0..name.len()].copy_from_slice(name);\n\n\n\n // Safety: ifra_addr receives `sockaddr`, which is a generalization of `sockaddr_in`.\n\n unsafe {\n\n request.ifra_addr = std::mem::transmute(get_address(Some(&address))?);\n\n request.ifra_broadaddr =\n\n std::mem::transmute(get_address(Some(&broadcast))?);\n\n request.ifra_mask = std::mem::transmute(get_address(Some(&mask))?);\n\n }\n\n\n\n if unsafe { ioctl(socket.0, SIOCAIFADDR, &request) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"set interface address: ioctl(SIOCAIFADDR) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n}\n\n\n", "file_path": "netzwerk/src/interface/operations.rs", "rank": 35, "score": 104164.95834586135 }, { "content": "fn delete_command(id: String) {\n\n let _guard = setup_logging();\n\n let storage = storage();\n\n let ops = OciOperations::new(&storage, id)\n\n .expect(\"Failed to initialize runtime\");\n\n\n\n ops.delete()\n\n}\n\n\n", "file_path": "containerd-shim/src/main.rs", "rank": 36, "score": 103397.420401916 }, { "content": "pub trait StorageEngine {\n\n fn initialize(cache_dir: impl AsRef<Path>) -> Result<Box<Self>, Error>;\n\n\n\n fn get(\n\n &self,\n\n collection: impl AsRef<[u8]>,\n\n key: impl AsRef<[u8]>,\n\n ) -> Result<Option<Vec<u8>>, Error>;\n\n\n\n fn put(\n\n &self,\n\n collection: impl AsRef<[u8]>,\n\n key: impl AsRef<[u8]>,\n\n value: impl AsRef<[u8]>,\n\n ) -> Result<(), Error>;\n\n\n\n fn compare_and_swap(\n\n &self,\n\n collection: impl AsRef<[u8]>,\n\n key: impl AsRef<[u8]>,\n", "file_path": "storage/src/lib.rs", "rank": 37, "score": 99482.77750202066 }, { "content": "#[fehler::throws]\n\nfn prepare_devfs(path: impl AsRef<Path>) {\n\n use devfs::{apply, Operation};\n\n\n\n const DEFAULT_DEVICES: [&str; 10] = [\n\n \"null\", \"zero\", \"full\", \"random\", \"urandom\", \"tty\", \"console\", \"pts\",\n\n \"pts/*\", \"fd\",\n\n ];\n\n\n\n apply(&path, Operation::HideAll)?;\n\n\n\n for device in &DEFAULT_DEVICES {\n\n apply(&path, Operation::Unhide(device))?\n\n }\n\n}\n\n\n", "file_path": "libknast/src/filesystem.rs", "rank": 38, "score": 96500.9434463524 }, { "content": "#[derive(serde::Deserialize)]\n\nstruct TokenResponse {\n\n access_token: String,\n\n}\n\n\n\nimpl<'a> Client<'a> {\n\n /// Builds an OCI registry API client\n\n #[fehler::throws]\n\n pub fn build(registry_url: &'a str) -> Self {\n\n let client =\n\n reqwest::Client::builder().user_agent(USER_AGENT).build()?;\n\n\n\n Self {\n\n registry_url,\n\n client,\n\n }\n\n }\n\n\n\n /// Performs an authenticated HTTP request against the\n\n /// registry.\n\n ///\n", "file_path": "registratur/src/v2/client.rs", "rank": 39, "score": 94977.07441072362 }, { "content": "#[fehler::throws]\n\nfn add_address_to_table(handle: i32, address: &str) {\n\n let parsed_address: Ipv4Network = address.parse()?;\n\n let mut result: pfioc_table = unsafe { mem::zeroed() };\n\n let mut address: pfr_addr = unsafe { mem::zeroed() };\n\n let table = table_struct();\n\n\n\n address.pfra_af = AF_INET as _;\n\n address.pfra_net = parsed_address.prefix();\n\n address.pfra_u._pfra_ip4addr.s_addr =\n\n u32::from_be(parsed_address.network().into());\n\n\n\n result.pfrio_table = table;\n\n result.pfrio_esize = mem::size_of::<pfr_addr>() as _;\n\n result.pfrio_size = 1;\n\n result.pfrio_buffer = &address as *const _ as _;\n\n\n\n if unsafe { ioctl(handle, DIOCRADDADDRS, &result) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"add NAT rule : ioctl(DIOCRADDADDRS) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n}\n\n\n", "file_path": "netzwerk/src/pf.rs", "rank": 40, "score": 94539.4112887386 }, { "content": "/// Executes closure in a forked process\n\npub fn run_in_fork<T: DeserializeOwned + Serialize>(\n\n f: impl FnOnce() -> Result<T, Error>,\n\n) -> Result<T, Error> {\n\n let (read, mut write) = UnixStream::pair()?;\n\n\n\n match unsafe { fork() } {\n\n Ok(ForkResult::Child) => {\n\n let result = f().map_err(|err| err.to_string());\n\n let result = serde_json::to_string(&result)\n\n .map_err(Error::from)\n\n .and_then(|string| {\n\n write.write_all(string.as_bytes())?;\n\n write.write(b\"\\n\")?;\n\n Ok(())\n\n });\n\n\n\n let status = match result {\n\n Ok(_) => 0,\n\n Err(err) => {\n\n tracing::error!(\"run_in_fork failed: {:?}\", err);\n", "file_path": "libknast/src/operations/utils.rs", "rank": 41, "score": 91441.77874591644 }, { "content": "#[fehler::throws]\n\npub fn unmount(destination: &dyn AsRef<Path>) {\n\n if unsafe {\n\n libc::unmount(\n\n destination.as_bytes()?.as_slice() as *const _ as _,\n\n libc::MNT_FORCE,\n\n )\n\n } < 0\n\n {\n\n fehler::throw!(anyhow!(\n\n \"mount: unmount failed: {}\",\n\n StdError::last_os_error(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "libknast/src/filesystem/mount.rs", "rank": 42, "score": 91201.36982468072 }, { "content": "fn storage() -> TestStorage {\n\n let home = std::env::var(\"HOME\").unwrap();\n\n TestStorage::new(home).unwrap()\n\n}\n\n\n", "file_path": "containerd-shim/src/main.rs", "rank": 43, "score": 90252.88485682402 }, { "content": "#[fehler::throws]\n\npub fn create_interface(socket: &Socket, request: &ifreq) {\n\n if unsafe { ioctl(socket.0, SIOCIFCREATE, request) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"create interface: ioctl(SIOCIFCREATE) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n}\n\n\n", "file_path": "netzwerk/src/interface/operations.rs", "rank": 44, "score": 89512.88362706332 }, { "content": "#[fehler::throws]\n\npub fn destroy_interface(socket: &Socket, request: &ifreq) {\n\n if unsafe { ioctl(socket.0, SIOCIFDESTROY, request) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"destroy interface: ioctl(SIOCIFDESTROY) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n}\n\n\n", "file_path": "netzwerk/src/interface/operations.rs", "rank": 45, "score": 89512.88362706332 }, { "content": "type ContainerAddressStorage = BTreeMap<String, (String, Ipv4Addr, Ipv4Addr)>;\n\n\n", "file_path": "libknast/src/operations/network.rs", "rank": 46, "score": 87189.53860295448 }, { "content": "fn transaction_struct(\n\n anchor_name: Option<&[i8]>,\n\n) -> (pfioc_trans, Box<pfioc_trans_pfioc_trans_e>) {\n\n let mut anchor = [0; 1024];\n\n\n\n if let Some(anchor_name) = anchor_name {\n\n anchor[0..anchor_name.len()].copy_from_slice(anchor_name);\n\n }\n\n\n\n let boxed_nat_request = Box::new(pfioc_trans_pfioc_trans_e {\n\n rs_num: PF_RULESET_NAT as _,\n\n anchor,\n\n ticket: 0,\n\n });\n\n\n\n (\n\n pfioc_trans {\n\n size: 1,\n\n esize: mem::size_of::<pfioc_trans_pfioc_trans_e>() as _,\n\n array: &*boxed_nat_request as *const _\n", "file_path": "netzwerk/src/pf.rs", "rank": 47, "score": 87086.25825748775 }, { "content": "#[fehler::throws]\n\npub fn check_interface_existence(socket: &Socket, request: &ifreq) -> bool {\n\n unsafe { ioctl(socket.0, SIOCGIFCAP, request) >= 0 }\n\n}\n\n\n\nmacro_rules! bridge_request {\n\n ($func:ident, $cmd:expr) => {\n\n #[fehler::throws]\n\n pub fn $func(socket: &Socket, name: &[i8], member: &str) {\n\n let mut bridge_request: ifbreq = unsafe { mem::zeroed() };\n\n bridge_request.ifbr_ifsname[0..member.len()]\n\n .copy_from_slice(member.as_signed_bytes());\n\n\n\n let mut request: ifdrv = unsafe { mem::zeroed() };\n\n request.ifd_name[0..name.len()].copy_from_slice(name);\n\n request.ifd_cmd = $cmd;\n\n request.ifd_len = mem::size_of::<ifbreq>() as _;\n\n request.ifd_data = &bridge_request as *const _ as _;\n\n\n\n if unsafe { ioctl(socket.0, SIOCSDRVSPEC, &request) } < 0 {\n\n fehler::throw!(anyhow!(\n", "file_path": "netzwerk/src/interface/operations.rs", "rank": 48, "score": 84685.65849357707 }, { "content": "fn table_struct() -> pfr_table {\n\n let mut table: pfr_table = unsafe { mem::zeroed() };\n\n\n\n table.pfrt_anchor[0..ANCHOR.len()].copy_from_slice(&ANCHOR);\n\n table.pfrt_name[0..TABLE_NAME.len()].copy_from_slice(&TABLE_NAME);\n\n\n\n table\n\n}\n\n\n", "file_path": "netzwerk/src/pf.rs", "rank": 49, "score": 81192.55478380671 }, { "content": "#[fehler::throws]\n\npub fn jail_interface(socket: &Socket, request: &mut ifreq, jid: i32) {\n\n request.ifr_ifru.ifru_jid = jid;\n\n\n\n if unsafe { ioctl(socket.0, SIOCSIFVNET, request as *mut _) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"jail interface: ioctl(SIOCSIFVNET) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n}\n\n\n", "file_path": "netzwerk/src/interface/operations.rs", "rank": 50, "score": 80267.12560257567 }, { "content": "#[proc_macro_attribute]\n\npub fn jailed_test(_attrs: TokenStream, item: TokenStream) -> TokenStream {\n\n let input = syn::parse_macro_input!(item as ItemFn);\n\n let fn_name = input.sig.ident;\n\n let block = input.block;\n\n\n\n let body = quote::quote! {\n\n use std::io::Write;\n\n use test_helpers::nix::{\n\n sys::{\n\n signal::Signal,\n\n wait::{waitpid, WaitStatus},\n\n },\n\n unistd::{fork, ForkResult},\n\n };\n\n use test_helpers::jail::StoppedJail;\n\n use test_helpers::memmap::MmapMut;\n\n use test_helpers::bincode;\n\n\n\n let mut mmap = MmapMut::map_anon(1024)\n\n .expect(\"failed to create a mmap\");\n", "file_path": "test_helpers/procedural_macros/src/lib.rs", "rank": 51, "score": 79445.80802224966 }, { "content": "fn comma_delimited_string<'de, D>(\n\n deserializer: D,\n\n) -> Result<Vec<String>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n String::deserialize(deserializer)\n\n .map(|string| string.split(',').map(String::from).collect())\n\n}\n\n\n\nimpl<T: DeserializeOwned + 'static> EtcConf<T> {\n\n #[fehler::throws]\n\n pub fn new(file: impl AsRef<Path>) -> Self {\n\n let file = File::open(file)?;\n\n let csv_reader = ReaderBuilder::new()\n\n .has_headers(false)\n\n .delimiter(b':')\n\n .comment(Some(b'#'))\n\n .from_reader(file);\n\n\n", "file_path": "baustelle/src/runtime_config/user/unix_user.rs", "rank": 52, "score": 70922.11464653737 }, { "content": "#[fehler::throws]\n\nfn commit_transaction(handle: i32, transaction_struct: &pfioc_trans) {\n\n if unsafe { ioctl(handle, DIOCXCOMMIT, transaction_struct) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"initialize NAT: ioctl(DIOCXCOMMIT) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n}\n\n\n", "file_path": "netzwerk/src/pf.rs", "rank": 53, "score": 70596.20356336363 }, { "content": "#[fehler::throws]\n\nfn begin_transaction(handle: i32, transaction_struct: &pfioc_trans) {\n\n if unsafe { ioctl(handle, DIOCXBEGIN, transaction_struct) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"initialize NAT: ioctl(DIOCXBEGIN) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n}\n\n\n", "file_path": "netzwerk/src/pf.rs", "rank": 54, "score": 70596.20356336363 }, { "content": "#[fehler::throws]\n\nfn rollback_transaction(handle: i32, transaction_struct: &pfioc_trans) {\n\n if unsafe { ioctl(handle, DIOCXROLLBACK, transaction_struct) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"initialize NAT: ioctl(DIOCXROLLBACK) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n}\n\n\n", "file_path": "netzwerk/src/pf.rs", "rank": 55, "score": 70596.20356336363 }, { "content": "#[repr(C)]\n\nstruct rt_msghdr {\n\n pub rtm_msglen: u16,\n\n pub rtm_version: u8,\n\n pub rtm_type: u8,\n\n pub rtm_index: u16,\n\n _rtm_spare1: u16,\n\n pub rtm_flags: u32,\n\n pub rtm_addrs: u32,\n\n pub rtm_pid: u32,\n\n pub rtm_seq: u32,\n\n pub rtm_errno: u32,\n\n pub rtm_fmask: u32,\n\n pub rtm_inits: u64,\n\n _rt_metrics: [u64; 14usize],\n\n}\n", "file_path": "netzwerk/src/route/bindings.rs", "rank": 56, "score": 62276.44627198132 }, { "content": "#[repr(C)]\n\nstruct DevfsRule {\n\n magic: u32,\n\n id: u32,\n\n icond: c_int,\n\n dswflags: c_int,\n\n pathptrn: [c_char; 200],\n\n iacts: c_int,\n\n bacts: c_int,\n\n uid: uid_t,\n\n gid: gid_t,\n\n mode: mode_t,\n\n incset: u32,\n\n}\n\n\n\npub enum Operation<'a> {\n\n HideAll,\n\n Unhide(&'a str),\n\n}\n\n\n", "file_path": "libknast/src/filesystem/devfs.rs", "rank": 57, "score": 62276.44627198132 }, { "content": "#[repr(C)]\n\nstruct rtmsg<T> {\n\n pub header: rt_msghdr,\n\n pub payload: T,\n\n}\n\n\n\n// This makes us 64-bit only, right?\n", "file_path": "netzwerk/src/route/bindings.rs", "rank": 58, "score": 60318.54165544252 }, { "content": "pub trait Nat {\n\n fn add(&self, subnet: &str) -> Result<(), Error>;\n\n}\n", "file_path": "netzwerk/src/nat.rs", "rank": 59, "score": 58996.279231899985 }, { "content": "pub trait Mountable {\n\n #[fehler::throws]\n\n fn mount(&self, rootfs: impl AsRef<Path>) {\n\n let kind = self.kind();\n\n let source = self.source();\n\n let destination = prefixed_destination(&rootfs, self.destination());\n\n\n\n tracing::info!(\n\n \"Mounting {} fs {:?} -> {:?}\",\n\n kind,\n\n source,\n\n destination\n\n );\n\n mount::mount(\n\n kind,\n\n source,\n\n &destination,\n\n self.options().iter().map(|x| x as &dyn AsRef<str>),\n\n )?;\n\n\n", "file_path": "libknast/src/filesystem.rs", "rank": 60, "score": 58996.279231899985 }, { "content": "UPDATE storage SET value = :new_value WHERE tree = :tree AND key = :key AND value IS :old_value\n\nRETURNING id;\n", "file_path": "storage/src/sqlite_engine/cas.sql", "rank": 61, "score": 57078.69483298048 }, { "content": "pub trait AsSignedBytes {\n\n fn as_signed_bytes(&self) -> &[i8] {\n\n let bytes = unsafe { self.bytes().align_to() };\n\n\n\n bytes.1\n\n }\n\n\n\n fn bytes(&self) -> &[u8];\n\n}\n\n\n\nimpl AsSignedBytes for &str {\n\n fn bytes(&self) -> &[u8] {\n\n self.as_bytes()\n\n }\n\n}\n\n\n\nimpl AsSignedBytes for Vec<u8> {\n\n fn bytes(&self) -> &[u8] {\n\n self.as_slice()\n\n }\n\n}\n\n\n\nimpl AsSignedBytes for &[u8] {\n\n fn bytes(&self) -> &[u8] {\n\n self\n\n }\n\n}\n", "file_path": "common_lib/src/lib.rs", "rank": 62, "score": 56704.72238767768 }, { "content": "fn main() {\n\n let bindings = builder()\n\n .header(\"ffi/ffi.h\")\n\n .generate()\n\n .expect(\"failed to generate bindings\");\n\n\n\n bindings\n\n .write_to_file(\"src/bindings.rs\")\n\n .expect(\"failed to write bindings on disk\");\n\n}\n", "file_path": "netzwerk/build.rs", "rank": 63, "score": 55683.78316711508 }, { "content": "#[async_trait::async_trait]\n\npub trait ReqwestResponseExt {\n\n /// Provides a facility to report the download progress\n\n /// and validate that the downloaded content matches\n\n /// it's hash.\n\n async fn read(\n\n self,\n\n mut f: Option<impl FnMut(usize) + Send + 'async_trait>,\n\n digest: Option<&str>,\n\n ) -> Result<Vec<u8>>;\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl ReqwestResponseExt for Response {\n\n async fn read(\n\n self,\n\n mut f: Option<impl FnMut(usize) + Send + 'async_trait>,\n\n digest: Option<&str>,\n\n ) -> Result<Vec<u8>> {\n\n let result = self\n\n .bytes_stream()\n", "file_path": "registratur/src/reqwest_ext.rs", "rank": 64, "score": 55676.87344119109 }, { "content": "/// Containerd-specific extensions to OCI operations.\n\npub trait ContainerdExtension {\n\n /// Start needs to set up IO for process on provided files\n\n fn start(self, exec_id: &str) -> Result<(), Error>;\n\n /// Exec executes a process in the existing container\n\n fn exec(self, exec_id: &str, process: Process) -> Result<(), Error>;\n\n /// Returns stdio triple for the container.\n\n fn stdio_triple(&self, exec_id: &str) -> Result<StdioTriple, Error>;\n\n /// Persists stdio triple for the container.\n\n fn save_stdio_triple(\n\n &self,\n\n exec_id: &str,\n\n triple: StdioTriple,\n\n ) -> Result<(), Error>;\n\n /// Resizes container's PTY\n\n fn resize_pty(&self, exec_id: &str, winsize: Winsize)\n\n -> Result<(), Error>;\n\n /// Persists PTY master side\n\n fn save_pty_state(&self, exec_id: &str, pty: (i32, i32)) -> Result<(), Error>;\n\n /// Returns PTY state\n\n fn pty_state(&self, exec_id: &str) -> Result<(i32, i32), Error>;\n", "file_path": "containerd-shim/src/oci_extensions.rs", "rank": 65, "score": 55672.050737785816 }, { "content": "// A workaround for https://github.com/fubarnetes/libjail-rs/issues/103\n\npub trait CommandExt {\n\n fn uid(&mut self, uid: u32) -> &mut Command;\n\n fn gid(&mut self, gid: u32) -> &mut Command;\n\n}\n\n\n\nimpl CommandExt for Command {\n\n fn uid(&mut self, uid: u32) -> &mut Command {\n\n unsafe {\n\n self.pre_exec(move || {\n\n if setuid(uid as uid_t) < 0 {\n\n return Err(Error::last_os_error());\n\n }\n\n\n\n Ok(())\n\n });\n\n }\n\n\n\n self\n\n }\n\n\n\n fn gid(&mut self, gid: u32) -> &mut Command {\n\n StdCommandExt::gid(self, gid)\n\n }\n\n}\n", "file_path": "libknast/src/operations/command_ext.rs", "rank": 66, "score": 55672.050737785816 }, { "content": "fn main() {\n\n let shim_inputs = vec![\n\n \"proto/shim.proto\",\n\n \"proto/google/protobuf/any.proto\",\n\n \"proto/google/protobuf/empty.proto\",\n\n \"proto/google/protobuf/timestamp.proto\",\n\n \"proto/github.com/containerd/containerd/api/types/mount.proto\",\n\n \"proto/github.com/containerd/containerd/api/types/task/task.proto\",\n\n ];\n\n Codegen::new()\n\n .out_dir(\"src/protocols\")\n\n .inputs(&shim_inputs)\n\n .include(\"proto\")\n\n .rust_protobuf()\n\n .run()\n\n .expect(\"Failed to generate ttrpc server code\");\n\n}\n", "file_path": "containerd-shim/build.rs", "rank": 67, "score": 54334.980636510845 }, { "content": "#[fehler::throws]\n\nfn add_rule(\n\n handle: i32,\n\n ticket: u32,\n\n pool_ticket: u32,\n\n overrides: impl Fn(pfioc_rule) -> pfioc_rule,\n\n) -> pfioc_rule {\n\n let mut result: pfioc_rule = unsafe { mem::zeroed() };\n\n result.ticket = ticket;\n\n result.pool_ticket = pool_ticket;\n\n result.rule.action = PF_NAT as _;\n\n result.rule.rtableid = -1;\n\n\n\n result = overrides(result);\n\n\n\n if unsafe { ioctl(handle, DIOCADDRULE, &result) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"initialize NAT: ioctl(DIOCADDRULE) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n\n\n result\n\n}\n\n\n", "file_path": "netzwerk/src/pf.rs", "rank": 68, "score": 53084.66613974506 }, { "content": "fn main() {\n\n let jid = std::env::args()\n\n .nth(1)\n\n .expect(\"USAGE: bridge_jail JID\")\n\n .parse()\n\n .expect(\"Failed to parse jail id\");\n\n\n\n let name = create_interfaces(jid).expect(\"Failed to create interfaces\");\n\n\n\n match unsafe { fork() } {\n\n Ok(ForkResult::Child) => {\n\n if unsafe { jail_attach(jid) } < 0 {\n\n panic!(\"Failed to attach to jail {}\", jid);\n\n };\n\n\n\n let pair_b =\n\n Interface::new(&name).expect(\"Failed to create iface socket\");\n\n\n\n pair_b\n\n .address(\"172.24.0.2\", \"172.24.0.255\", \"255.255.255.0\")\n\n .unwrap();\n\n route::add_default(\"172.24.0.1\").unwrap();\n\n }\n\n _ => (),\n\n }\n\n}\n", "file_path": "netzwerk/examples/bridge_jail.rs", "rank": 69, "score": 53084.66613974506 }, { "content": "fn main() {\n\n let (command, id) = parse_opts();\n\n match &command[..] {\n\n \"start\" => start_command(),\n\n \"delete\" => delete_command(id),\n\n _ => panic!(\"Unknown command {:?}\", command),\n\n }\n\n}\n\n\n", "file_path": "containerd-shim/src/main.rs", "rank": 70, "score": 53084.66613974506 }, { "content": "#[fehler::throws]\n\nfn add_address(\n\n handle: i32,\n\n pool_ticket: u32,\n\n interface: &str,\n\n) -> pfioc_pooladdr {\n\n let mut result: pfioc_pooladdr = unsafe { mem::zeroed() };\n\n\n\n result.ticket = pool_ticket;\n\n result.af = AF_INET as _;\n\n result.addr.addr.type_ = PF_ADDR_DYNIFTL as _;\n\n result.addr.addr.iflags = PFI_AFLAG_NOALIAS as _;\n\n unsafe {\n\n result.addr.addr.v.ifname[0..interface.len()]\n\n .copy_from_slice(interface.as_signed_bytes());\n\n\n\n result.addr.addr.v.a.mask.pfa.v4.s_addr =\n\n Ipv4Addr::from([255, 255, 255, 255]).into();\n\n }\n\n\n\n if unsafe { ioctl(handle, DIOCADDADDR, &result) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"initialize NAT: ioctl(DIOCADDADDR) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n\n\n result\n\n}\n\n\n", "file_path": "netzwerk/src/pf.rs", "rank": 71, "score": 53084.66613974506 }, { "content": "#[fehler::throws]\n\nfn release_addresses(\n\n storage: &Storage<impl StorageEngine>,\n\n key: impl AsRef<str>,\n\n) {\n\n let maybe_cache: Option<ContainerAddressStorage> = storage\n\n .get(NETWORK_STATE_STORAGE_KEY, CONTAINER_ADDRESS_STORAGE_KEY)?;\n\n\n\n if let Some(cache) = maybe_cache {\n\n let mut new_cache = cache.clone();\n\n let key: String = key.as_ref().into();\n\n new_cache.remove(&key);\n\n\n\n if let Err(_) = storage.compare_and_swap(\n\n NETWORK_STATE_STORAGE_KEY,\n\n CONTAINER_ADDRESS_STORAGE_KEY,\n\n Some(cache),\n\n Some(new_cache),\n\n ) {\n\n release_addresses(storage, key)?;\n\n };\n", "file_path": "libknast/src/operations/network.rs", "rank": 72, "score": 51922.432413025395 }, { "content": "#[fehler::throws]\n\nfn setup_pair(\n\n storage: &Storage<impl StorageEngine>,\n\n key: impl AsRef<str>,\n\n jail: RunningJail,\n\n) -> Interface {\n\n let host_address = get_address(&storage)?;\n\n let container_address = get_address(&storage)?;\n\n let broadcast = broadcast(DEFAULT_NETWORK)?.to_string();\n\n let mask = mask(DEFAULT_NETWORK)?.to_string();\n\n let pair_a = Interface::new(\"epair\")?.create()?.address(\n\n &host_address.to_string(),\n\n &broadcast,\n\n &mask,\n\n )?;\n\n let name = pair_a.get_name()?;\n\n let len = name.len();\n\n let name_b = &[&name[..len - 1], \"b\"].join(\"\");\n\n reserve_addresses(storage, key, name, (host_address, container_address))?;\n\n\n\n let pair_b = Interface::new(name_b)?;\n", "file_path": "libknast/src/operations/network.rs", "rank": 73, "score": 51922.432413025395 }, { "content": "#[fehler::throws]\n\nfn reserve_addresses(\n\n storage: &Storage<impl StorageEngine>,\n\n key: impl AsRef<str>,\n\n interface: impl AsRef<str>,\n\n addresses: (Ipv4Addr, Ipv4Addr),\n\n) {\n\n let maybe_cache: Option<ContainerAddressStorage> = storage\n\n .get(NETWORK_STATE_STORAGE_KEY, CONTAINER_ADDRESS_STORAGE_KEY)?;\n\n\n\n if let Some(cache) = maybe_cache {\n\n let mut new_cache = cache.clone();\n\n new_cache.insert(\n\n key.as_ref().into(),\n\n (interface.as_ref().into(), addresses.0, addresses.1),\n\n );\n\n\n\n if let Err(_) = storage.compare_and_swap(\n\n NETWORK_STATE_STORAGE_KEY,\n\n CONTAINER_ADDRESS_STORAGE_KEY,\n\n Some(cache),\n", "file_path": "libknast/src/operations/network.rs", "rank": 74, "score": 51922.432413025395 }, { "content": "fn child_process() {\n\n let _guard = setup_logging();\n\n\n\n match server() {\n\n Ok((mut server, shutdown_notification)) => {\n\n server.start().expect(\"failed to start server\");\n\n\n\n tracing::info!(\n\n \"Server is listening at {}\",\n\n server_address().unwrap().as_str()\n\n );\n\n\n\n if let Err(_) = shutdown_notification.recv() {\n\n tracing::error!(\n\n \"Sender dropped. Attempting to shutdown server\"\n\n );\n\n }\n\n\n\n server.shutdown();\n\n }\n\n Err(err) => {\n\n tracing::error!(\"Server failed to start due to error: {:?}\", err);\n\n }\n\n }\n\n}\n\n\n", "file_path": "containerd-shim/src/main.rs", "rank": 75, "score": 51922.432413025395 }, { "content": "fn start_command() {\n\n if parent_process().is_ok() {\n\n return;\n\n }\n\n\n\n match unsafe { rfork(RFPROC | RFCFDG) } {\n\n 0 => {\n\n child_process();\n\n }\n\n -1 => {\n\n eprintln!(\"rfork failed {:?}\", StdError::last_os_error());\n\n }\n\n _pid => parent_process().expect(\"Server is not running\"),\n\n }\n\n}\n\n\n", "file_path": "containerd-shim/src/main.rs", "rank": 76, "score": 51922.432413025395 }, { "content": "fn setup_io(\n\n command: &mut Command,\n\n triple: &StdioTriple,\n\n) -> Result<Option<(i32, i32)>, Error> {\n\n tracing::info!(\"Initializing process IO\");\n\n let StdioTriple {\n\n stdin,\n\n stdout,\n\n stderr,\n\n terminal,\n\n } = triple;\n\n\n\n tracing::info!(\"Openning file descriptors\");\n\n if *terminal {\n\n let mut stdin = OpenOptions::new().read(true).open(stdin)?;\n\n let mut stdout = OpenOptions::new().write(true).open(stdout)?;\n\n let OpenptyResult { master, slave } = openpty(None, None)?;\n\n tracing::info!(\"Setting up pty <-> containerd fifo pipe\");\n\n thread::spawn(move || {\n\n let mut writer = unsafe { File::from_raw_fd(master) };\n", "file_path": "containerd-shim/src/oci_extensions.rs", "rank": 77, "score": 50839.288619880645 }, { "content": "fn find_group_by_name(\n\n rootfs: &Path,\n\n) -> impl Fn(String) -> Result<EtcGroupEntry> {\n\n let path = Path::new(rootfs).join(\"etc/group\");\n\n\n\n move |groupname| {\n\n find_entry(&path, |group: &EtcGroupEntry| group.groupname == groupname)\n\n .context(format!(\n\n \"Group {} was not found in {:?}\",\n\n groupname, path\n\n ))\n\n }\n\n}\n\n\n", "file_path": "baustelle/src/runtime_config/user.rs", "rank": 78, "score": 49827.42731086939 }, { "content": "fn find_user_by_name(\n\n rootfs: &Path,\n\n) -> impl Fn(String) -> Result<EtcPasswdEntry> {\n\n let path = Path::new(rootfs).join(\"etc/passwd\");\n\n\n\n move |username| {\n\n find_entry(&path, |user: &EtcPasswdEntry| user.username == username)\n\n .context(format!(\"User {} was not found in {:?}\", username, path))\n\n }\n\n}\n\n\n", "file_path": "baustelle/src/runtime_config/user.rs", "rank": 79, "score": 49827.42731086939 }, { "content": "#[fehler::throws]\n\nfn create_table(handle: i32) {\n\n let mut result: pfioc_table = unsafe { mem::zeroed() };\n\n let mut table = table_struct();\n\n table.pfrt_flags = PFR_TFLAG_PERSIST;\n\n\n\n result.pfrio_esize = mem::size_of::<pfr_table>() as _;\n\n result.pfrio_size = 1;\n\n result.pfrio_buffer = &table as *const _ as _;\n\n\n\n if unsafe { ioctl(handle, DIOCRADDTABLES, &result) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"add NAT rule : ioctl(DIOCRADDTABLES) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n}\n\n\n", "file_path": "netzwerk/src/pf.rs", "rank": 80, "score": 48336.51253187549 }, { "content": "#[fehler::throws]\n\nfn begin_addresses(handle: i32) -> pfioc_pooladdr {\n\n let result: pfioc_pooladdr = unsafe { mem::zeroed() };\n\n\n\n if unsafe { ioctl(handle, DIOCBEGINADDRS, &result) } < 0 {\n\n fehler::throw!(anyhow!(\n\n \"initialize NAT: ioctl(DIOCBEGINADDRS) failed: {}\",\n\n StdError::last_os_error()\n\n ))\n\n };\n\n\n\n result\n\n}\n\n\n", "file_path": "netzwerk/src/pf.rs", "rank": 81, "score": 45346.01752364117 }, { "content": "fn setup_logging() -> tracing_appender::non_blocking::WorkerGuard {\n\n let file_appender =\n\n tracing_appender::rolling::never(\"/var/log\", \"knast.log\");\n\n let (non_blocking, guard) = tracing_appender::non_blocking(file_appender);\n\n tracing_subscriber::fmt().with_writer(non_blocking).init();\n\n\n\n guard\n\n}\n\n\n", "file_path": "containerd-shim/src/main.rs", "rank": 82, "score": 42834.42309959326 }, { "content": "CREATE TABLE IF NOT EXISTS storage(\n\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n\n tree BLOB NOT NULL,\n\n key BLOB NOT NULL,\n\n value BLOB,\n\n UNIQUE (tree, key)\n\n);\n", "file_path": "storage/src/sqlite_engine/migration.sql", "rank": 83, "score": 39863.72802718728 }, { "content": "DELETE FROM storage WHERE key = :key AND tree = :tree;\n", "file_path": "storage/src/sqlite_engine/remove.sql", "rank": 84, "score": 38205.32837692966 }, { "content": "SELECT EXISTS(SELECT 1 FROM storage WHERE key = :key AND tree = :tree);\n", "file_path": "storage/src/sqlite_engine/exists.sql", "rank": 94, "score": 36679.40215969742 }, { "content": "SELECT value FROM storage WHERE tree = :tree AND key = :key LIMIT 1;\n", "file_path": "storage/src/sqlite_engine/get.sql", "rank": 95, "score": 36679.40215969742 }, { "content": " ///\n\n /// ```rust,no_run\n\n /// use futures::{future, stream::StreamExt};\n\n /// use registratur::v2::client::Client;\n\n /// use baustelle::{fetcher::{Fetcher, LayerDownloadStatus::*}, storage::Storage};\n\n ///\n\n /// let storage =\n\n /// Storage::new(\"/opt/dir\").expect(\"Unable to initialize cache\");\n\n /// let client = Client::build(\"https://registry-1.docker.io\")\n\n /// .expect(\"failed to build the client\");\n\n ///\n\n /// let architecture = \"amd64\";\n\n /// let os = vec![\"linux\".into(), \"freebsd\".into()];\n\n /// let fetcher = Fetcher::new(&storage, client, architecture.into(), os);\n\n /// let (tx, rx) = futures::channel::mpsc::channel(1);\n\n ///\n\n /// async {\n\n /// let digest_fut = fetcher.fetch(\"nginx\", \"1.17.10\", tx);\n\n /// let updates_fut = rx.collect::<Vec<_>>();\n\n ///\n", "file_path": "baustelle/src/fetcher.rs", "rank": 97, "score": 36561.47052639677 }, { "content": " Storage, StorageEngine, BLOBS_STORAGE_KEY, IMAGES_INDEX_STORAGE_KEY,\n\n};\n\n\n\n/// Represents layer download update.\n\n#[derive(Clone, Debug)]\n\npub enum LayerDownloadStatus {\n\n Cached(Arc<String>),\n\n InProgress(Arc<String>, usize, usize),\n\n}\n\n\n\npub struct Fetcher<'a, T: StorageEngine> {\n\n storage: &'a Storage<T>,\n\n client: Client<'a>,\n\n architecture: String,\n\n os: Vec<String>, /* We support Linux & FreeBSD containers running\n\n * alongside */\n\n}\n\n\n\nimpl<'a, T: StorageEngine> Fetcher<'a, T> {\n\n pub fn new(\n", "file_path": "baustelle/src/fetcher.rs", "rank": 98, "score": 36560.967423355825 }, { "content": " let $dir =\n\n tempfile::tempdir().expect(\"failed to create a tmp directory\");\n\n\n\n let storage =\n\n Storage::new($dir.path()).expect(\"Unable to initialize cache\");\n\n\n\n let architecture = \"amd64\";\n\n\n\n let os = vec![\"linux\".into(), \"freebsd\".into()];\n\n\n\n let $var =\n\n Client::build(&url).expect(\"failed to build the client\");\n\n\n\n let $fetcher =\n\n Fetcher::new(&storage, $var, architecture.into(), os);\n\n };\n\n }\n\n\n\n use registratur::v2::{client::Client, domain::manifest::Manifest};\n\n\n", "file_path": "baustelle/src/fetcher.rs", "rank": 99, "score": 36555.89086827571 } ]
Rust
src/room.rs
neosam/sprite-game
5d261eb538824ebb133833a4f4fe4f162aa3445c
use rand::Rng; #[derive(Copy, Clone)] pub enum DestRoom { Relative(isize, isize, i32, i32,), Absolute(isize, isize, i32, i32,), } impl DestRoom { pub fn to_absolute_coordinates(&self, (x, y): (i32, i32)) -> (i32, i32) { match self { DestRoom::Relative(rel_x, rel_y, _, _) => (x + *rel_x as i32, y + *rel_y as i32), DestRoom::Absolute(abs_x, abs_y, _, _) => (*abs_x as i32, *abs_y as i32), } } pub fn spawn_point(&self) -> (i32, i32) { match self { DestRoom::Relative(_, _, x, y) => (*x, *y), DestRoom::Absolute(_, _, x, y) => (*x, *y), } } } #[derive(Copy, Clone)] pub enum RoomField { Nothing, Wall, Stone, Bush, Player, Exit(DestRoom), } #[derive(Clone)] pub struct Room { pub width: usize, pub height: usize, pub fields: Vec<RoomField>, } impl Room { pub fn new(width: usize, height: usize) -> Room { let fields = (0..(width * height)).map(|_| RoomField::Nothing).collect(); Room { width, height, fields, } } pub fn set_field(&mut self, x: usize, y: usize, field: RoomField) { if x < self.width && y < self.height { let index = x + y * self.width; self.fields[index] = field; } } pub fn get_field(&self, x: usize, y: usize) -> Option<RoomField> { if x < self.width && y < self.height { let index = x + y * self.width; Some(self.fields[index]) } else { None } } pub fn room_field_iterator(&self) -> RoomFieldIterator { RoomFieldIterator { x: 0, y: 0, room: self } } } #[derive(Default)] pub struct RoomGeneration { pub width: usize, pub height: usize, pub exit_north: bool, pub exit_south: bool, pub exit_east: bool, pub exit_west: bool, } impl RoomGeneration { pub fn generate_room(&self, rng: &mut impl Rng) -> Room { let mut room = Room::new(self.width, self.height); /* Draw borders */ let wall_borders = RoomField::Wall; for x in 0..self.width { room.set_field(x, 0, wall_borders); room.set_field(x, self.height - 1, wall_borders); } for y in 0..self.height { room.set_field(0, y, wall_borders); room.set_field(self.width - 1, y, wall_borders); } /* Open exits */ if self.exit_north { room.set_field(self.width / 2, self.height - 1, RoomField::Exit(DestRoom::Relative(0, -1, self.width as i32 / 2, 1))); } if self.exit_south { room.set_field(self.width / 2, 0, RoomField::Exit(DestRoom::Relative(0, 1, self.width as i32 / 2, self.height as i32 - 2))); } if self.exit_east { room.set_field(self.width - 1, self.height / 2, RoomField::Exit(DestRoom::Relative(1, 0, 1, self.height as i32 / 2))); } if self.exit_west { room.set_field(0, self.height / 2, RoomField::Exit(DestRoom::Relative(-1, 0, self.width as i32 - 2, self.height as i32 / 2))); } /* Draw 5-7 random stones */ for _ in 0..rng.gen_range(5, 8) { let x = rng.gen_range(2, self.width - 3); let y = rng.gen_range(2, self.height - 3); room.set_field(x, y, RoomField::Stone); } /* Draw 5-7 bushes */ for _ in 0..rng.gen_range(5, 8) { let x = rng.gen_range(2, self.width - 3); let y = rng.gen_range(2, self.height - 3); room.set_field(x, y, RoomField::Bush); } /* Add the player somewhere */ let x = rng.gen_range(2, self.width - 3); let y = rng.gen_range(2, self.height - 3); room.set_field(x, y, RoomField::Player); room } } pub struct RoomFieldIterator<'a> { room: &'a Room, x: usize, y: usize, } impl<'a> Iterator for RoomFieldIterator<'a> { type Item = (usize, usize, RoomField); fn next(&mut self) -> Option<Self::Item> { let result = self.room.get_field(self.x, self.y).map(|field| (self.x, self.y, field)); self.x += 1; if self.x >= self.room.width { self.x = 0; self.y += 1; } if self.y >= self.room.height { return None; } result } }
use rand::Rng; #[derive(Copy, Clone)] pub enum DestRoom { Relative(isize, isize, i32, i32,), Absolute(isize, isize, i32, i32,), } impl DestRoom { pub fn to_absolute_coordinates(&self, (x, y): (i32, i32)) -> (i32, i32) { match self { DestRoom::Relative(rel_x, rel_y, _, _) => (x + *rel_x as i32, y + *rel_y as i32), DestRoom::Absolute(abs_x, abs_y, _, _) => (*abs_x as i32, *abs_y as i32), } } pub fn spawn_point(&self) -> (i32, i32) { match self { DestRoom::Relative(_, _, x, y) => (*x, *y), DestRoom::Absolute(_, _, x, y) => (*x, *y), } } } #[derive(Copy, Clone)] pub enum RoomField { Nothing, Wall, Stone, Bush, Player, Exit(DestRoom), } #[derive(Clone)] pub struct Room { pub width: usize, pub height: usize, pub fields: Vec<RoomField>, } impl Room { pub fn new(width: usize, height: usize) -> Room { let fields = (0..(width * height)).map(|_| RoomField::Nothing).collect(); Room { width, height, fields, } } pub fn set_field(&mut self, x: usize, y: usize, field: RoomField) { if x < self.width && y < self.height { let index = x + y * self.width; self.fields[index] = field; } } pub fn get_field(&self, x: usize, y: usize) -> Option<RoomField> { if x < self.width && y < self.height { let index = x + y * self.width; Some(self.fields[index]) } else { None } } pub fn room_field_iterator(&self) -> RoomFieldIterator { RoomFieldIterator { x: 0, y: 0, room: self } } } #[derive(Default)] pub struct RoomGeneration { pub width: usize, pub height: usize, pub exit_north: bool, pub exit_south: bool, pub exit_east: bool, pub exit_west: bool, } impl RoomGeneration { pub fn generate_room(&self, rng: &mut impl Rng) -> Room { let mut room = Room::new(self.width, self.height); /* Draw borders */ let wall_borders = RoomField::Wall; for x in 0..self.width { room.set_field(x, 0, wall_borders); room.set_field(x, self.height - 1, wall_borders); } for y in 0..self.height { room.set_field(0, y, wall_borders); room.set_field(self.width - 1, y, wall_borders); } /* Open exits */ if self.exit_north { room.set_field(self.width / 2, self.height - 1, RoomField::Exit(DestRoom::Relative(0, -1, self.width as i32 / 2, 1))); } if self.exit_south { room.set_field(self.width / 2, 0, RoomField::Exit(DestRoom::Relative(0, 1, self.width as i32 / 2, self.height as i32 - 2))); }
if self.exit_west { room.set_field(0, self.height / 2, RoomField::Exit(DestRoom::Relative(-1, 0, self.width as i32 - 2, self.height as i32 / 2))); } /* Draw 5-7 random stones */ for _ in 0..rng.gen_range(5, 8) { let x = rng.gen_range(2, self.width - 3); let y = rng.gen_range(2, self.height - 3); room.set_field(x, y, RoomField::Stone); } /* Draw 5-7 bushes */ for _ in 0..rng.gen_range(5, 8) { let x = rng.gen_range(2, self.width - 3); let y = rng.gen_range(2, self.height - 3); room.set_field(x, y, RoomField::Bush); } /* Add the player somewhere */ let x = rng.gen_range(2, self.width - 3); let y = rng.gen_range(2, self.height - 3); room.set_field(x, y, RoomField::Player); room } } pub struct RoomFieldIterator<'a> { room: &'a Room, x: usize, y: usize, } impl<'a> Iterator for RoomFieldIterator<'a> { type Item = (usize, usize, RoomField); fn next(&mut self) -> Option<Self::Item> { let result = self.room.get_field(self.x, self.y).map(|field| (self.x, self.y, field)); self.x += 1; if self.x >= self.room.width { self.x = 0; self.y += 1; } if self.y >= self.room.height { return None; } result } }
if self.exit_east { room.set_field(self.width - 1, self.height / 2, RoomField::Exit(DestRoom::Relative(1, 0, 1, self.height as i32 / 2))); }
if_condition
[ { "content": "fn generate_corridor(map: &mut Map<RoomGeneration>, rng: &mut impl Rng, width: usize, height: usize, corridor_length: u32, mut coordinate: (i32, i32)) -> Vec<(i32,i32)> {\n\n let mut coordinate_stack = Vec::new();\n\n for _ in 0..corridor_length {\n\n let choice = {\n\n let mut i = 0;\n\n loop {\n\n if i == 8 {\n\n break None\n\n }\n\n i += 1;\n\n let direction: Direction = rng.gen();\n\n let new_coordinate = direction.add(coordinate);\n\n println!(\"Map gen: {:?}, {:?}\", coordinate, new_coordinate);\n\n if !map.has_room(new_coordinate) {\n\n break Some((direction, new_coordinate))\n\n }\n\n }\n\n };\n\n if let Some((direction, new_coordinate)) = choice {\n\n direction.set_exit(map.get_room_mut(coordinate).unwrap());\n", "file_path": "src/map.rs", "rank": 0, "score": 173920.43514947174 }, { "content": "fn build_map(width: usize, height: usize) -> map::Map<room::Room> {\n\n /*let mut map = map::Map::new();\n\n\n\n let mut room_generation1 = room::RoomGeneration::default();\n\n room_generation1.width = width;\n\n room_generation1.height = height;\n\n room_generation1.exit_east = true;\n\n let room1 = room_generation1.generate_room(&mut rand::thread_rng());\n\n\n\n let mut room_generation2 = room::RoomGeneration::default();\n\n room_generation2.width = width;\n\n room_generation2.height = height;\n\n room_generation2.exit_west = true;\n\n let room2 = room_generation2.generate_room(&mut rand::thread_rng());\n\n\n\n map.add_room((0, 0), room1);\n\n map.add_room((1, 0), room2);*/\n\n let mut map_gen = map::DungeonGen::default();\n\n map_gen.corridor_length = 5;\n\n map_gen.splits = 4;\n\n let map = map_gen.generate(&mut rand::thread_rng(), width, height).generate_map(&mut rand::thread_rng());\n\n\n\n map\n\n}\n", "file_path": "src/main.rs", "rank": 1, "score": 126884.24643819829 }, { "content": "/// Use an AnimationData and create animations and sprite images based on sprite names.\n\n///\n\n/// If a name ends with underscores followed by numbers it is treated as part of an animations.\n\n/// In this case it will add it to the animations, otherwise it will simply store the index in\n\n/// the images tree map under that name.\n\npub fn manually_assign_animations(animation_data: &mut AnimationData) {\n\n let mut animations: BTreeMap<String, Vec<usize>> = BTreeMap::new();\n\n let mut images: BTreeMap<String, usize> = BTreeMap::new();\n\n\n\n let ends_with_number_pattern = Regex::new(r\"_\\d+$\").unwrap();\n\n for (i, sprite) in (0..).zip(&animation_data.sprites) {\n\n if let Some(_) = ends_with_number_pattern.find(&sprite.name) {\n\n let animation_name = ends_with_number_pattern.replace_all(&sprite.name, \"\");\n\n println!(\"Animation name: {}\", animation_name);\n\n let entry = animations\n\n .entry(animation_name.to_string())\n\n .or_insert_with(|| Vec::new());\n\n entry.push(i);\n\n } else {\n\n images.insert(sprite.name.to_string(), i);\n\n }\n\n }\n\n\n\n animation_data.animations = animations;\n\n animation_data.images = images;\n\n}\n\n\n", "file_path": "src/spriteanimationloader.rs", "rank": 2, "score": 73529.18140617508 }, { "content": "/// Load animations and images from the given ron file.\n\n///\n\n/// It requires a mutable reference to the world, a directory, where the assets are stored and the filename\n\n/// of the ron file inside the directory. The reference to the image file in the ron file is relative to the\n\n/// directory provides as second argument.\n\npub fn load_sprites(\n\n world: &mut World,\n\n directory: impl ToString,\n\n filename: impl ToString,\n\n) -> SpriteAnimationStore {\n\n // ---- Loading animations\n\n info!(\"Loading animations\");\n\n let directory = directory.to_string();\n\n let filename = filename.to_string();\n\n let ron_path = format!(\"{}/{}\", directory, filename);\n\n let mut animations = AnimationData::load(ron_path).expect(\"Animation data should load\");\n\n manually_assign_animations(&mut animations);\n\n let texture_path = format!(\"{}/{}\", directory, animations.texture_path);\n\n let texture_handle = {\n\n let loader = world.read_resource::<Loader>();\n\n let texture_storage = world.read_resource::<AssetStorage<Texture>>();\n\n loader.load(\n\n texture_path,\n\n ImageFormat::default(),\n\n (),\n", "file_path": "src/spriteanimationloader.rs", "rank": 3, "score": 61431.095509026156 }, { "content": "pub fn sword_attack(\n\n world: &mut World,\n\n strength: f32,\n\n transform: Transform,\n\n direciton: CharacterDirection,\n\n sprite: SpriteRender,\n\n) {\n\n let translation = transform.translation();\n\n let (x, y) = match direciton {\n\n CharacterDirection::Up => (translation.x, translation.y + 32.0),\n\n CharacterDirection::Down => (translation.x, translation.y -32.0),\n\n CharacterDirection::Left => (translation.x -32.0, translation.y),\n\n CharacterDirection::Right => (translation.x + 32.0, translation.y),\n\n };\n\n let mut damage_transform = Transform::default();\n\n damage_transform.set_translation_xyz(x, y, -y);\n\n let physics_body: PhysicsBody<f32> = PhysicsBodyBuilder::from(BodyStatus::Dynamic)\n\n .build();\n\n let physics_collider: PhysicsCollider<f32> =\n\n PhysicsColliderBuilder::from(Shape::Cuboid {\n", "file_path": "src/swordattack.rs", "rank": 4, "score": 61431.095509026156 }, { "content": "/// Assembles a walkable entity\n\n///\n\n/// Assigns the components to the EntityBuilder which are required\n\n/// to have a solid entity.\n\n///\n\n/// The name must match the sprite name in.\n\n///\n\n/// ## Examples\n\n/// ```\n\n/// use helper::create_solid;\n\n///\n\n/// create_solid(\n\n/// world.create_entity(),\n\n/// &animations,\n\n/// (300.0, 300.0),\n\n/// (-16.0, 16.0, -16.0, 16.0),\n\n/// \"hero\"\n\n/// ).build();\n\n/// ```\n\npub fn create_walkable<'a>(\n\n entity_builder: EntityBuilder<'a>,\n\n animations: &SpriteAnimationStore,\n\n (x, y): (f32, f32),\n\n name: &str,\n\n) -> EntityBuilder<'a> {\n\n let sprite_render = SpriteRender {\n\n sprite_sheet: animations.sprite_sheet_handle.clone(),\n\n sprite_number: *animations.images.get(name).unwrap_or(&0),\n\n };\n\n let mut transform = Transform::default();\n\n transform.set_translation_xyz(x, y, -y);\n\n\n\n entity_builder\n\n .with(sprite_render)\n\n .with(transform)\n\n // .with(BoundingRect::new(left, right, bottom, top))\n\n // .with(Transparent)\n\n}\n", "file_path": "src/helper.rs", "rank": 5, "score": 59723.97357897811 }, { "content": "/// Assebles a solid entity\n\n///\n\n/// Assigns the components to the EntityBuilder which are required\n\n/// to have a solid enity.\n\n///\n\n/// The name must match the sprite name in.\n\n///\n\n/// ## Examples\n\n/// ```\n\n/// use helper::create_solid;\n\n///\n\n/// create_solid(\n\n/// world.create_entity(),\n\n/// &animations,\n\n/// (300.0, 300.0),\n\n/// (-16.0, 16.0, -16.0, 16.0),\n\n/// \"hero\"\n\n/// ).build();\n\n/// ```\n\npub fn create_solid<'a>(\n\n entity_builder: EntityBuilder<'a>,\n\n animations: &SpriteAnimationStore,\n\n (x, y): (f32, f32),\n\n name: &str,\n\n) -> EntityBuilder<'a> {\n\n let sprite_render = animations.get_sprite_render(name).unwrap();\n\n let mut transform = Transform::default();\n\n transform.set_translation_xyz(x, y, -y);\n\n let physics_body: PhysicsBody<f32> = PhysicsBodyBuilder::from(BodyStatus::Static)\n\n .build();\n\n let physics_collider: PhysicsCollider<f32> =\n\n PhysicsColliderBuilder::from(Shape::Cuboid {\n\n half_extents: Vector3::new(16.0, 16.0, 300.0)\n\n })\n\n .build();\n\n\n\n entity_builder\n\n .with(sprite_render)\n\n .with(transform)\n\n .with(physics_body)\n\n .with(physics_collider)\n\n // .with(BoundingRect::new(left, right, bottom, top))\n\n // .with(Transparent)\n\n // .with(Solid)\n\n}\n\n\n", "file_path": "src/helper.rs", "rank": 6, "score": 59723.97357897811 }, { "content": "/// Assembles a character on the map\n\n///\n\n/// Assigns the components to the EntityBuilder which are required\n\n/// to have a moving character on the screen.\n\n///\n\n/// For the animations, it requires to have animation names following\n\n/// this pattern:\n\n/// * (name)_walk_up\n\n/// * (name)_walk_down\n\n/// * (name)_walk_left\n\n/// * (name)_walk_right\n\n///\n\n/// ## Examples\n\n/// ```\n\n/// use helper::create_character;\n\n///\n\n/// create_character(\n\n/// world.create_entity(),\n\n/// &animations,\n\n/// (300.0, 300.0),\n\n/// (-16.0, 16.0, -16.0, 16.0),\n\n/// \"hero\"\n\n/// ).build();\n\n/// ```\n\npub fn create_character<'a>(\n\n entity_builder: EntityBuilder<'a>,\n\n animations: &SpriteAnimationStore,\n\n (x, y): (f32, f32),\n\n char_name: &str,\n\n) -> EntityBuilder<'a> {\n\n println!(\"Create character start\");\n\n let animation_up = format!(\"{}_walk_up\", char_name);\n\n let animation_down = format!(\"{}_walk_down\", char_name);\n\n let animation_left = format!(\"{}_walk_left\", char_name);\n\n let animation_right = format!(\"{}_walk_right\", char_name);\n\n\n\n let mut sprite_animation = SpriteAnimation::new(\n\n animations\n\n .animations\n\n .get(&animation_up)\n\n .map(|x| x.clone())\n\n .unwrap_or(vec![0]),\n\n 0.1,\n\n );\n", "file_path": "src/helper.rs", "rank": 7, "score": 59721.49845150809 }, { "content": "pub fn create_walkable_solid<'a>(\n\n entity_builder: EntityBuilder<'a>,\n\n (x, y): (f32, f32),\n\n) -> EntityBuilder<'a> {\n\n let mut transform = Transform::default();\n\n transform.set_translation_xyz(x, y, -y);\n\n let physics_body: PhysicsBody<f32> = PhysicsBodyBuilder::from(BodyStatus::Static)\n\n .build();\n\n let physics_collider: PhysicsCollider<f32> =\n\n PhysicsColliderBuilder::from(Shape::Cuboid {\n\n half_extents: Vector3::new(16.0, 16.0, 300.0)\n\n })\n\n .sensor(true)\n\n .build();\n\n\n\n entity_builder\n\n .with(transform)\n\n .with(physics_body)\n\n .with(physics_collider)\n\n // .with(BoundingRect::new(left, right, bottom, top))\n\n // .with(Transparent)\n\n // .with(Solid)\n\n}\n\n\n", "file_path": "src/helper.rs", "rank": 8, "score": 57584.98137883243 }, { "content": "/// Initialise the camera.\n\nfn initialise_camera(world: &mut World) {\n\n let mut transform = Transform::default();\n\n transform.set_translation_xyz(ARENA_WIDTH as f32 / 2.0, ARENA_HEIGHT as f32 / 2.0, 1000.0);\n\n\n\n world\n\n .create_entity()\n\n .with(Camera::standard_2d(ARENA_WIDTH, ARENA_HEIGHT))\n\n .with(transform)\n\n .build();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 55723.604923357336 }, { "content": "fn initialize_test_sprite(scene: &Example, world: &mut World) {\n\n info!(\"Loading sprites\");\n\n let sprite_animations = spriteanimationloader::load_sprites(world, \"texture\", \"tp-export.ron\");\n\n\n\n // Generate a room\n\n println!(\"Getting room: {:?}\", scene.room_coordinate);\n\n let room = scene.map.get_room(scene.room_coordinate).unwrap();\n\n\n\n for (x, y, field) in room.room_field_iterator() {\n\n let pixel_pos = (\n\n x as f32 * 32.0 + 16.0,\n\n y as f32 * 32.0 + 16.0,\n\n );\n\n \n\n match field {\n\n room::RoomField::Nothing => {},\n\n room::RoomField::Wall => {\n\n // Add a brick\n\n helper::create_solid(\n\n world.create_entity(),\n", "file_path": "src/main.rs", "rank": 10, "score": 48436.9670539491 }, { "content": "enum Direction {\n\n North, South, East, West,\n\n}\n\nimpl Distribution<Direction> for Standard {\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Direction {\n\n match rng.gen_range(0, 4) {\n\n 0 => Direction::North,\n\n 1 => Direction::South,\n\n 2 => Direction::East,\n\n 3 => Direction::West,\n\n\n\n // Not possible but the compiler doesn't know that.\n\n _ => Direction::North\n\n }\n\n }\n\n}\n\n\n\nimpl Direction {\n\n fn relative_pos(&self) -> (i32, i32) {\n\n match self {\n", "file_path": "src/map.rs", "rank": 11, "score": 40512.473697024405 }, { "content": "struct Example<'a, 'b> {\n\n map: map::Map<room::Room>,\n\n room_coordinate: map::Coordinate,\n\n spawn_player: Option<(i32, i32)>,\n\n\n\n dispatcher: Option<ecs::Dispatcher<'a, 'b>>,\n\n}\n\n\n\npub const ARENA_WIDTH: f32 = 640.0;\n\npub const ARENA_HEIGHT: f32 = 480.0;\n\n\n\nimpl<'a, 'b> SimpleState for Example<'a, 'b> {\n\n fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) {\n\n let world = data.world;\n\n\n\n //world.register::<SpriteRender>();\n\n //world.register::<Transparent>();\n\n\n\n let app_root = application_root_dir().unwrap();\n\n //let path = format!(\"{}/resources/display_config.ron\", root_dir);\n", "file_path": "src/main.rs", "rank": 12, "score": 37647.311842987794 }, { "content": "fn main() -> amethyst::Result<()> {\n\n amethyst::start_logger(Default::default());\n\n info!(\"starting up\");\n\n \n\n let game_data = GameDataBuilder::default()\n\n .with_bundle(TransformBundle::new())?;\n\n\n\n\n\n info!(\"Generate map\");\n\n let tiles_x = ARENA_WIDTH as usize / 32;\n\n let tiles_y = ARENA_HEIGHT as usize / 32;\n\n let scene = Example {\n\n map: build_map(tiles_x, tiles_y),\n\n room_coordinate: (0, 0),\n\n spawn_player: None,\n\n dispatcher: None,\n\n };\n\n\n\n info!(\"Create game\");\n\n let mut game = Application::new(\"./\", scene, game_data)?;\n\n\n\n info!(\"Run game\");\n\n game.run();\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 13, "score": 27667.30404666417 }, { "content": " coordinate_stack.push(coordinate);\n\n coordinate = new_coordinate;\n\n let mut new_room = RoomGeneration::default();\n\n new_room.width = width;\n\n new_room.height = height;\n\n direction.reverse().set_exit(&mut new_room);\n\n map.add_room(coordinate, new_room);\n\n }\n\n }\n\n coordinate_stack\n\n}\n\nimpl DungeonGen {\n\n \n\n\n\n pub fn generate(&self, rng: &mut impl Rng, width: usize, height: usize) -> Map<RoomGeneration> {\n\n let mut map = Map::new();\n\n let mut coordinate_stack: Vec<(i32, i32)> = Vec::new();\n\n let mut coordinate = (0, 0);\n\n let mut room = RoomGeneration::default();\n\n room.width = width;\n", "file_path": "src/map.rs", "rank": 22, "score": 23.27820493544079 }, { "content": "}\n\n\n\n/*impl Default for RoomExitSystem {\n\n fn default() -> Self {\n\n RoomExitSystem {\n\n reader: None\n\n }\n\n }\n\n}*/\n\nimpl RoomExitSystem {\n\n pub fn new(world: &mut World) -> Self {\n\n <Self as System<'_>>::SystemData::setup(world);\n\n let reader = world.fetch_mut::<ProximityEvents>().register_reader();\n\n RoomExitSystem {\n\n reader\n\n }\n\n }\n\n}\n\n\n\nimpl<'s> System<'s> for RoomExitSystem {\n", "file_path": "src/roomexit.rs", "rank": 23, "score": 17.997214654809625 }, { "content": "use amethyst::core::shrev::{ReaderId};\n\nuse amethyst::ecs::{Write, ReadStorage, System, Read};\n\nuse amethyst::ecs::{Component, VecStorage};\n\nuse amethyst::core::transform::Transform;\n\nuse amethyst::prelude::*;\n\nuse amethyst::ecs::SystemData;\n\n\n\n\n\nuse specs_physics::events::{ProximityEvent, ProximityEvents};\n\nuse crate::room::DestRoom;\n\nuse crate::room;\n\n\n\nimpl Component for DestRoom {\n\n type Storage = VecStorage<Self>;\n\n}\n\n\n\npub struct PerformRoomExit(pub room::DestRoom, pub (i32, i32));\n\n\n\npub struct RoomExitSystem {\n\n reader: ReaderId<ProximityEvent>\n", "file_path": "src/roomexit.rs", "rank": 24, "score": 15.881197180322733 }, { "content": " room.height = height;\n\n map.add_room(coordinate, room);\n\n for _ in 0..self.splits {\n\n let mut new_stack = generate_corridor(&mut map, rng, width, height, \n\n self.corridor_length - coordinate_stack.len() as u32, coordinate);\n\n coordinate_stack.append(&mut new_stack);\n\n println!(\"Stack size before: {}\", coordinate_stack.len());\n\n for _ in 0..rng.gen_range(0, coordinate_stack.len() - 1) {\n\n coordinate_stack.pop();\n\n }\n\n coordinate = *coordinate_stack.last().unwrap_or(&(0, 0));\n\n println!(\"Stack size after: {}\", coordinate_stack.len());\n\n }\n\n map\n\n }\n\n}\n\n\n\nimpl Map<RoomGeneration> {\n\n pub fn generate_map(&self, rng: &mut impl Rng) -> Map<Room> {\n\n let mut map = Map::new();\n\n for key in self.rooms.keys() {\n\n let value = self.rooms.get(key).unwrap();\n\n map.add_room(*key, value.generate_room(rng));\n\n }\n\n map\n\n }\n\n}\n", "file_path": "src/map.rs", "rank": 25, "score": 15.295815780841231 }, { "content": "use std::collections::BTreeMap;\n\nuse crate::room::{Room, RoomGeneration};\n\nuse rand::prelude::*;\n\nuse rand::distributions::Standard;\n\n\n\npub type Coordinate = (i32, i32);\n\n\n\n#[derive(Clone)]\n\npub struct Map<T> {\n\n rooms: BTreeMap<Coordinate, T>\n\n}\n\n\n\npub type RoomMap = Map<Room>;\n\n\n\nimpl<T> Map<T> {\n\n pub fn new() -> Map<T> {\n\n Map {\n\n rooms: BTreeMap::new()\n\n }\n\n }\n", "file_path": "src/map.rs", "rank": 26, "score": 14.747699136005503 }, { "content": "\n\n pub fn add_room(&mut self, coordinate: Coordinate, room: T) {\n\n self.rooms.insert(coordinate, room);\n\n }\n\n\n\n pub fn get_room(&self, coordinate: Coordinate) -> Option<&T> {\n\n self.rooms.get(&coordinate)\n\n }\n\n\n\n pub fn has_room(&self, coordinate: Coordinate) -> bool {\n\n self.rooms.contains_key(&coordinate)\n\n }\n\n\n\n pub fn get_room_mut(&mut self, coordinate: Coordinate) -> Option<&mut T> {\n\n self.rooms.get_mut(&coordinate)\n\n }\n\n\n\n pub fn get_room_or_insert(&mut self, coordinate: Coordinate, f: impl FnOnce() -> T) -> &mut T {\n\n self.rooms.entry(coordinate).or_insert_with(f)\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct DungeonGen {\n\n pub corridor_length: u32,\n\n pub splits: u32,\n\n}\n\n\n", "file_path": "src/map.rs", "rank": 27, "score": 13.95756075375151 }, { "content": " \"bush\",\n\n ).with(damage::Destroyable { health: 2.0 })\n\n .build();\n\n },\n\n room::RoomField::Player => {\n\n if let None = scene.spawn_player {\n\n helper::create_character(\n\n world.create_entity(),\n\n &sprite_animations,\n\n pixel_pos,\n\n \"healer\",\n\n )\n\n .with(charactermove::UserMove)\n\n // .with(damage::Destroyer { damage: 1.0})\n\n .build();\n\n }\n\n },\n\n room::RoomField::Exit(direction) => {\n\n helper::create_walkable_solid(\n\n world.create_entity(),\n", "file_path": "src/main.rs", "rank": 28, "score": 13.913306485486205 }, { "content": " pub pause: bool,\n\n}\n\n\n\nimpl SpriteAnimation {\n\n pub fn new(keys: Vec<usize>, speed: f32) -> Self {\n\n SpriteAnimation {\n\n index: 0,\n\n keys,\n\n speed,\n\n time: 0.0,\n\n pause: false,\n\n }\n\n }\n\n}\n\n\n\nimpl Component for SpriteAnimation {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n\n\n\n/// System to handle sprite animation.\n", "file_path": "src/spriteanimation.rs", "rank": 29, "score": 13.813841951043461 }, { "content": " pub width: u32,\n\n pub height: u32,\n\n pub offset: Option<(f32, f32)>,\n\n}\n\n\n\n/// RON file definition.\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct AnimationData {\n\n pub texture_path: String,\n\n pub texture_width: u32,\n\n pub texture_height: u32,\n\n pub sprites: Vec<SpriteDefinition>,\n\n pub animations: BTreeMap<String, Vec<usize>>,\n\n pub images: BTreeMap<String, usize>,\n\n}\n\n\n\nimpl Default for AnimationData {\n\n fn default() -> Self {\n\n AnimationData {\n\n texture_path: String::new(),\n", "file_path": "src/spriteanimationloader.rs", "rank": 30, "score": 12.899774298629929 }, { "content": " type SystemData = (\n\n Read<'s, ProximityEvents>,\n\n ReadStorage<'s, DestRoom>,\n\n Write<'s, Option<PerformRoomExit>>,\n\n ReadStorage<'s, Transform>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (channel, destrooms, mut perform_room_exit, transforms,): Self::SystemData,\n\n ) {\n\n for collision in channel.read(&mut self.reader) {\n\n let solid_entity = collision.collider1;\n\n if let Some(exit) = destrooms.get(solid_entity) {\n\n if let Some(_transform) = transforms.get(solid_entity) {\n\n let position = exit.spawn_point();\n\n *perform_room_exit = Some(PerformRoomExit(*exit, position));\n\n }\n\n }\n\n let solid_entity = collision.collider2;\n\n if let Some(exit) = destrooms.get(solid_entity) {\n\n if let Some(_transform) = transforms.get(solid_entity) {\n\n let position = exit.spawn_point();\n\n *perform_room_exit = Some(PerformRoomExit(*exit, position));\n\n }\n\n }\n\n }\n\n }\n\n}", "file_path": "src/roomexit.rs", "rank": 31, "score": 12.64471505394269 }, { "content": " Direction::North => (0, -1),\n\n Direction::South => (0, 1),\n\n Direction::East => (1, 0),\n\n Direction::West => (-1, 0),\n\n }\n\n }\n\n\n\n fn add(&self, coordinates: (i32, i32)) -> (i32, i32) {\n\n let relative_pos = self.relative_pos();\n\n (relative_pos.0 + coordinates.0, relative_pos.1 + coordinates.1)\n\n }\n\n\n\n fn set_exit(&self, room_gen: &mut RoomGeneration) {\n\n match self {\n\n Direction::North => room_gen.exit_north = true,\n\n Direction::South => room_gen.exit_south = true,\n\n Direction::East => room_gen.exit_east = true,\n\n Direction::West => room_gen.exit_west = true,\n\n }\n\n }\n", "file_path": "src/map.rs", "rank": 32, "score": 12.496410131316694 }, { "content": "use amethyst::{\n\n core::Transform,\n\n ecs::{Component, DenseVecStorage, Join, System, WriteStorage, ReadStorage},\n\n};\n\nuse specs_physics::PhysicsBody;\n\nuse nalgebra::{Point3};\n\nuse nalgebra::distance;\n\nuse specs_physics::nphysics::algebra::Force3;\n\n\n\npub struct RadialForceField {\n\n strength: f32,\n\n}\n\n\n\nimpl Component for RadialForceField {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n\n\n\nimpl RadialForceField {\n\n pub fn new(strength: f32) -> Self {\n\n RadialForceField { strength }\n", "file_path": "src/forces.rs", "rank": 33, "score": 12.3835878658278 }, { "content": "\n\n if let Some(dispatcher) = self.dispatcher.as_mut() {\n\n dispatcher.dispatch(&game_state.world);\n\n }\n\n\n\n\n\n let reset_all = {\n\n let mut perform_room_exits = game_state.world.fetch_mut::<Option<roomexit::PerformRoomExit>>();\n\n if let Some(PerformRoomExit(dest_room, spawn_coordinates)) = &*perform_room_exits {\n\n let room_coordinate = dest_room.to_absolute_coordinates(self.room_coordinate);\n\n println!(\"New coordinate: {:?}\", room_coordinate);\n\n self.room_coordinate = room_coordinate;\n\n self.spawn_player = Some(*spawn_coordinates);\n\n *perform_room_exits = None;\n\n \n\n true\n\n } else {\n\n false\n\n }\n\n };\n\n if reset_all {\n\n game_state.world.delete_all();\n\n initialise_camera(game_state.world);\n\n initialize_test_sprite(self, game_state.world);\n\n }\n\n SimpleTrans::None\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 34, "score": 12.253948201502073 }, { "content": " &sprite_animations,\n\n pixel_pos,\n\n \"brick\",\n\n ).build();\n\n },\n\n room::RoomField::Stone => {\n\n // Add a stone\n\n helper::create_solid(\n\n world.create_entity(),\n\n &sprite_animations,\n\n pixel_pos,\n\n \"stones\",\n\n ).build();\n\n },\n\n room::RoomField::Bush => {\n\n // Add a bush\n\n helper::create_solid(\n\n world.create_entity(),\n\n &sprite_animations,\n\n pixel_pos,\n", "file_path": "src/main.rs", "rank": 35, "score": 11.52581720217135 }, { "content": " texture_width: 0,\n\n texture_height: 0,\n\n sprites: Vec::new(),\n\n animations: BTreeMap::new(),\n\n images: BTreeMap::new(),\n\n }\n\n }\n\n}\n\n\n\n/// Stores all animations and sprites and sprites which can be used\n\n/// ingame.\n\npub struct SpriteAnimationStore {\n\n pub sprite_sheet_handle: SpriteSheetHandle,\n\n pub animations: BTreeMap<String, Vec<usize>>,\n\n pub images: BTreeMap<String, usize>,\n\n}\n\n\n\nimpl SpriteAnimationStore {\n\n pub fn get_sprite_render(&self, name: &str) -> Option<SpriteRender> {\n\n self.images.get(name)\n", "file_path": "src/spriteanimationloader.rs", "rank": 36, "score": 10.953320799582826 }, { "content": " CharacterDirection::Right => \"right\",\n\n CharacterDirection::Up => \"up\",\n\n CharacterDirection::Down => \"down\",\n\n }\n\n }\n\n}\n\n\n\n/// Component which holds information for characters.\n\n#[derive(Clone, PartialEq, Eq)]\n\npub struct CharacterMeta {\n\n pub direction: CharacterDirection,\n\n pub moving: bool,\n\n}\n\n\n\nimpl CharacterMeta {\n\n /// Creatre a new character meta which is not warlking.\n\n pub fn new(direction: CharacterDirection) -> Self {\n\n CharacterMeta {\n\n direction,\n\n moving: false,\n\n }\n\n }\n\n}\n\n\n\nimpl Component for CharacterMeta {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n", "file_path": "src/charactermeta.rs", "rank": 37, "score": 10.584627511721777 }, { "content": "pub struct SpriteAnimationSystem;\n\n\n\nimpl<'s> System<'s> for SpriteAnimationSystem {\n\n type SystemData = (\n\n WriteStorage<'s, SpriteRender>,\n\n WriteStorage<'s, SpriteAnimation>,\n\n Read<'s, Time>,\n\n );\n\n\n\n fn run(&mut self, (mut sprite_renders, mut sprite_animations, time): Self::SystemData) {\n\n for (mut sprite_render, mut sprite_animation) in\n\n (&mut sprite_renders, &mut sprite_animations).join()\n\n {\n\n if !sprite_animation.pause {\n\n sprite_animation.time += time.delta_seconds();\n\n while sprite_animation.time > sprite_animation.speed {\n\n sprite_animation.index =\n\n (sprite_animation.index + 1) % (sprite_animation.keys.len() as u32);\n\n sprite_render.sprite_number =\n\n sprite_animation.keys[sprite_animation.index as usize];\n\n sprite_animation.time -= sprite_animation.speed;\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/spriteanimation.rs", "rank": 38, "score": 10.280987439378368 }, { "content": "pub struct Destroyable {\n\n pub health: f32\n\n}\n\nimpl Component for Destroyable {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n\n\n\n// \n\npub struct DestroySystem {\n\n reader: Option<ReaderId<ProximityEvent>>\n\n}\n\nimpl Default for DestroySystem {\n\n fn default() -> Self {\n\n DestroySystem {\n\n reader: None\n\n }\n\n }\n\n}\n\nimpl<'s> System<'s> for DestroySystem {\n\n type SystemData = (\n", "file_path": "src/damage.rs", "rank": 39, "score": 10.170469591808136 }, { "content": " pub fn new(speed: f32) -> Self {\n\n CharacterMove { speed }\n\n }\n\n}\n\n\n\nimpl Component for CharacterMove {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n\n\n\n/// Component which lets the user control the entity.\n\npub struct UserMove;\n\nimpl Component for UserMove {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n\n\n\n/// System to handle user input and set the speed.\n\npub struct CharacterMoveSystem {\n\n attack_released: bool\n\n}\n\nimpl Default for CharacterMoveSystem {\n", "file_path": "src/charactermove.rs", "rank": 40, "score": 9.749176150554833 }, { "content": " ReadStorage<'s, CharacterMove>,\n\n );\n\n\n\n fn run(&mut self, (simple_enemies, transforms, mut physics, charactermoves): Self::SystemData) {\n\n /* Identify character position */\n\n let mut character = None;\n\n for (transform, charactermove) in (&transforms, &charactermoves).join() {\n\n character = Some(transform);\n\n }\n\n \n\n /* Let the character walk. */\n\n for (mut )\n\n }\n\n}\n", "file_path": "src/simpleenemy.rs", "rank": 41, "score": 9.445596517591138 }, { "content": "//! Module contains the meta informatormation for characters\n\n//!\n\n//! Meta information is the direction the character looks at\n\n//! and if the character is moving.\n\n\n\nuse amethyst::ecs::{Component, DenseVecStorage};\n\n\n\n/// Direction on a 2D map.\n\n#[derive(PartialEq, Eq, Clone, Copy)]\n\npub enum CharacterDirection {\n\n Left,\n\n Right,\n\n Up,\n\n Down,\n\n}\n\n\n\nimpl CharacterDirection {\n\n pub fn as_str(&self) -> &'static str {\n\n match self {\n\n CharacterDirection::Left => \"left\",\n", "file_path": "src/charactermeta.rs", "rank": 42, "score": 9.420766912051615 }, { "content": "//! Performs a sword attack\n\n\n\nuse amethyst::{\n\n core::timing::Time,\n\n ecs::{Component, DenseVecStorage, Entities, Join, System, WriteStorage, Read},\n\n};\n\n\n\npub struct DelayedRemove {\n\n pub current: f32,\n\n pub end: f32,\n\n}\n\nimpl Component for DelayedRemove {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n\nimpl DelayedRemove {\n\n pub fn new(end: f32) -> Self {\n\n DelayedRemove { current: 0.0, end }\n\n }\n\n}\n\n\n", "file_path": "src/delayedremove.rs", "rank": 43, "score": 9.380995553752955 }, { "content": " }\n\n}\n\n\n\npub struct ForceSystem;\n\n\n\nimpl<'s> System<'s> for ForceSystem {\n\n type SystemData = (\n\n WriteStorage<'s, PhysicsBody<f32>>,\n\n ReadStorage<'s, Transform>,\n\n ReadStorage<'s, RadialForceField>,\n\n );\n\n\n\n fn run(&mut self, (mut physics_bodies, transforms, radial_force_fields): Self::SystemData) {\n\n for (force_transform, radial_force_field) in (&transforms, &radial_force_fields).join() {\n\n for (body_transform, physics) in (&transforms, &mut physics_bodies).join() {\n\n let body_position = \n\n Point3::from(*(body_transform.translation()));\n\n let force_position = \n\n Point3::from(*(force_transform.translation()));\n\n let dist : f32 = distance(&force_position, &body_position);\n", "file_path": "src/forces.rs", "rank": 44, "score": 9.362658140867701 }, { "content": "//! Support to handle animations for sprites.\n\n\n\nuse amethyst::{\n\n core::timing::Time,\n\n ecs::prelude::{Join, Read, System, WriteStorage},\n\n ecs::{Component, DenseVecStorage},\n\n renderer::SpriteRender,\n\n};\n\n\n\n/// Component which holds a sprite animation\n\n///\n\n/// This includes the sprite indices for the animation, the\n\n/// animation speed, if the animation should pause and other\n\n/// information required to draw a sprite animation.\n\n#[derive(Default)]\n\npub struct SpriteAnimation {\n\n pub index: u32,\n\n pub keys: Vec<usize>,\n\n pub speed: f32,\n\n pub time: f32,\n", "file_path": "src/spriteanimation.rs", "rank": 45, "score": 9.308819857934566 }, { "content": "impl Component for CharacterAnimation {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n\n\n\n/// System to set the animations based on the CharacterMeta\n\npub struct CharacterAnimationSystem;\n\nimpl<'s> System<'s> for CharacterAnimationSystem {\n\n type SystemData = (\n\n WriteStorage<'s, CharacterAnimation>,\n\n ReadStorage<'s, CharacterMeta>,\n\n WriteStorage<'s, SpriteAnimation>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (mut character_animations, character_metas, mut sprite_animations): Self::SystemData,\n\n ) {\n\n for (mut character_animation, character_meta, mut sprite_animation) in (\n\n &mut character_animations,\n\n &character_metas,\n", "file_path": "src/characteranimation.rs", "rank": 46, "score": 8.75387987722022 }, { "content": "//! ECS to set required animations\n\n//!\n\n//!\n\n\n\nuse amethyst::ecs::{Component, DenseVecStorage};\n\nuse amethyst::ecs::{Join, ReadStorage, System, WriteStorage};\n\n\n\nuse crate::charactermeta::CharacterDirection;\n\nuse crate::charactermeta::CharacterMeta;\n\nuse crate::spriteanimation::SpriteAnimation;\n\n\n\n/// Component which contains the sprite animations.\n\npub struct CharacterAnimation {\n\n pub prev_character_meta: crate::charactermeta::CharacterMeta,\n\n pub walk_up_animation: Vec<usize>,\n\n pub walk_down_animation: Vec<usize>,\n\n pub walk_left_animation: Vec<usize>,\n\n pub walk_right_animation: Vec<usize>,\n\n}\n\n\n", "file_path": "src/characteranimation.rs", "rank": 47, "score": 8.640670921550036 }, { "content": " RenderToWindow::from_config_path(display_config_path).unwrap()\n\n .with_clear([0.34, 0.36, 0.52, 1.0]),\n\n )\n\n .with_plugin(RenderFlat2D::default())\n\n .build(world, &mut dispatcher_builder).unwrap();\n\n \n\n let mut dispatcher = dispatcher_builder\n\n .with_pool((*world.read_resource::<ArcThreadPool>()).clone())\n\n .build();\n\n dispatcher.setup(world);\n\n self.dispatcher = Some(dispatcher);\n\n\n\n info!(\"Initialize camera\");\n\n initialise_camera(world);\n\n info!(\"Initialize sprites\");\n\n initialize_test_sprite(self, world);\n\n }\n\n\n\n fn update(&mut self, game_state: &mut StateData<GameData>) -> SimpleTrans {\n\n use crate::roomexit::PerformRoomExit;\n", "file_path": "src/main.rs", "rank": 48, "score": 8.534736123315463 }, { "content": "//! Handle damages\n\n\n\nuse amethyst::{\n\n ecs::{ReadStorage, System, WriteStorage, Component, DenseVecStorage, Entities, Write},\n\n};\n\nuse specs_physics::events::{ProximityEvent, ProximityEvents};\n\nuse amethyst::core::shrev::{ReaderId};\n\n\n\n\n\n\n\n\n\n/// Destroys entities which are destroyable.\n\npub struct Destroyer {\n\n pub damage: f32\n\n}\n\nimpl Component for Destroyer {\n\n type Storage = DenseVecStorage<Self>;\n\n}\n\n\n\n/// Will be destroyed if collides with a Destroyer.\n", "file_path": "src/damage.rs", "rank": 49, "score": 8.244914739773714 }, { "content": "use amethyst::{\n\n prelude::*,\n\n core::{Transform},\n\n ecs::{Component, NullStorage, Join, ParJoin, Read, Write, ReadStorage, System, WriteStorage, Entities},\n\n};\n\nuse crate::physics::Physics;\n\nuse crate::charactermove::CharacterMove;\n\n\n\n#[derive(Default)]\n\npub struct SimpleEnemy;\n\nimpl Component for SimpleEnemy {\n\n type Storage = NullStorage<Self>;\n\n}\n\n\n\npub struct SimpleEnemySystem;\n\nimpl<'s> System<'s> for SimpleEnemySystem {\n\n type SystemData = (\n\n ReadStorage<'s, SimpleEnemy>,\n\n ReadStorage<'s, Transform>,\n\n WriteStorage<'s, Physics>,\n", "file_path": "src/simpleenemy.rs", "rank": 50, "score": 8.218334194464006 }, { "content": "use amethyst::{\n\n core::Transform,\n\n core::timing::Time,\n\n ecs::{Entities, System, WriteStorage, Read, ReadExpect},\n\n renderer::SpriteRender,\n\n};\n\nuse specs_physics::{PhysicsBody, PhysicsBodyBuilder, nphysics::object::BodyStatus};\n\nuse crate::spriteanimationloader::SpriteAnimationStore;\n\nuse crate::delayedremove::DelayedRemove;\n\nuse rand;\n\nuse rand::Rng;\n\n\n\npub struct SpawnParticleSystem {\n\n pub average_part_spawn: f32,\n\n pub min_x: f32,\n\n pub max_x: f32,\n\n pub min_y: f32,\n\n pub max_y: f32,\n\n pub lifespan: f32,\n\n}\n", "file_path": "src/randomparticles.rs", "rank": 51, "score": 7.579825407840744 }, { "content": " ReadStorage<'s, Destroyer>,\n\n WriteStorage<'s, Destroyable>,\n\n Entities<'s>,\n\n Write<'s, ProximityEvents>,\n\n );\n\n\n\n fn run(&mut self, (destroyers, mut destroyables, entities, mut channel): Self::SystemData) { \n\n if let None = self.reader {\n\n self.reader = Some(channel.register_reader());\n\n }\n\n \n\n if let Some(reader) = &mut self.reader {\n\n for collision in channel.read(reader) {\n\n \n\n if let (Some(destroyable), Some(destroyer)) = (destroyables.get_mut(collision.collider1), destroyers.get(collision.collider2)) {\n\n let collider = collision.collider1.clone();\n\n destroyable.health -= destroyer.damage;\n\n if destroyable.health < 0.0 {\n\n if let Err(error) = entities.delete(collider) {\n\n warn!(\"Couldn't remove entity {} with zero health: {}\",\n", "file_path": "src/damage.rs", "rank": 52, "score": 7.422784558543826 }, { "content": " &mut sprite_animations,\n\n )\n\n .join()\n\n {\n\n if character_animation.prev_character_meta != *character_meta {\n\n character_animation.prev_character_meta = character_meta.clone();\n\n let new_animation = match character_meta.direction {\n\n CharacterDirection::Up => character_animation.walk_up_animation.clone(),\n\n CharacterDirection::Down => character_animation.walk_down_animation.clone(),\n\n CharacterDirection::Left => character_animation.walk_left_animation.clone(),\n\n CharacterDirection::Right => character_animation.walk_right_animation.clone(),\n\n };\n\n sprite_animation.index = 0;\n\n sprite_animation.keys = new_animation;\n\n sprite_animation.pause = !character_meta.moving;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/characteranimation.rs", "rank": 53, "score": 7.367390135753751 }, { "content": " &texture_storage,\n\n )\n\n };\n\n let mut sprites = Vec::with_capacity(animations.sprites.len());\n\n for sprite in animations.sprites {\n\n let offset = if let Some((offset_x, offset_y)) = sprite.offset {\n\n [offset_x, offset_y]\n\n } else {\n\n [0.5; 2]\n\n };\n\n sprites.push(Sprite::from_pixel_values(\n\n animations.texture_width,\n\n animations.texture_height,\n\n sprite.width,\n\n sprite.height,\n\n sprite.x,\n\n sprite.y,\n\n offset,\n\n false,\n\n false\n", "file_path": "src/spriteanimationloader.rs", "rank": 54, "score": 7.1295244390756505 }, { "content": "};\n\nuse specs_physics:: {\n\n systems::*,\n\n};\n\n\n\n\n\n\n\npub mod characteranimation;\n\npub mod charactermeta;\n\npub mod charactermove;\n\npub mod damage;\n\npub mod delayedremove;\n\npub mod helper;\n\npub mod spriteanimation;\n\npub mod spriteanimationloader;\n\npub mod swordattack;\n\npub mod room;\n\npub mod map;\n\npub mod roomexit;\n\npub mod forces;\n\npub mod randomparticles;\n\n// pub mod simpleenemy;\n\n\n", "file_path": "src/main.rs", "rank": 55, "score": 6.846780029565421 }, { "content": "pub struct DelayedRemoveSystem;\n\nimpl<'s> System<'s> for DelayedRemoveSystem {\n\n type SystemData = (\n\n Read<'s, Time>,\n\n Entities<'s>,\n\n WriteStorage<'s, DelayedRemove>,\n\n );\n\n\n\n fn run(&mut self, (time, entities, mut delayed_removes): Self::SystemData) {\n\n for (delayed_remove, entity) in (&mut delayed_removes, &entities).join() {\n\n delayed_remove.current += time.delta_seconds();\n\n if delayed_remove.current > delayed_remove.end {\n\n info!(\"Delayed remove of {}\", entity.id());\n\n if let Err(error) = entities.delete(entity) {\n\n warn!(\"Delayed remove of {} failed: {}\", entity.id(), error);\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/delayedremove.rs", "rank": 56, "score": 6.8381773190894055 }, { "content": " .map(|index| SpriteRender {\n\n sprite_sheet: self.sprite_sheet_handle.clone(),\n\n sprite_number: *index\n\n })\n\n }\n\n}\n\n\n\n/// Use an AnimationData and create animations and sprite images based on sprite names.\n\n///\n\n/// If a name ends with underscores followed by numbers it is treated as part of an animations.\n\n/// In this case it will add it to the animations, otherwise it will simply store the index in\n\n/// the images tree map under that name.\n", "file_path": "src/spriteanimationloader.rs", "rank": 57, "score": 6.610647691414866 }, { "content": "\n\nimpl<'s> System<'s> for SpawnParticleSystem {\n\n type SystemData = (\n\n Read<'s, Time>,\n\n WriteStorage<'s, PhysicsBody<f32>>,\n\n WriteStorage<'s, Transform>,\n\n WriteStorage<'s, SpriteRender>,\n\n WriteStorage<'s, DelayedRemove>,\n\n ReadExpect<'s, SpriteAnimationStore>,\n\n Entities<'s>,\n\n );\n\n\n\n fn run(&mut self, (\n\n time, \n\n mut physics_bodies, \n\n mut transforms, \n\n mut sprite_render, \n\n mut delayed_removes,\n\n sprite_animation_store, \n\n entities): Self::SystemData) {\n", "file_path": "src/randomparticles.rs", "rank": 58, "score": 6.302674988820476 }, { "content": "//! ECS to handle character movement and input from the user\n\n\n\nuse amethyst::core::Transform;\n\nuse amethyst::ecs::{Component, DenseVecStorage, LazyUpdate};\n\nuse amethyst::ecs::{Join, Read, ReadStorage, System, WriteStorage, ReadExpect};\n\nuse amethyst::input::{InputHandler, StringBindings};\n\nuse specs_physics::PhysicsBody;\n\nuse specs_physics::nphysics::algebra::Velocity3;\n\n\n\nuse crate::charactermeta::{CharacterDirection, CharacterMeta};\n\nuse crate::swordattack::sword_attack;\n\nuse crate::spriteanimationloader::SpriteAnimationStore;\n\n\n\n/// Ability to let the character move.\n\npub struct CharacterMove {\n\n pub speed: f32,\n\n}\n\n\n\nimpl CharacterMove {\n\n /// Create a new CharacterMove which contains the given speed.\n", "file_path": "src/charactermove.rs", "rank": 59, "score": 6.2246088217794595 }, { "content": "//! Load animations and sprites from a ron file.\n\n\n\nuse amethyst::{\n\n assets::{AssetStorage, Loader},\n\n config::Config,\n\n prelude::*,\n\n renderer::{ImageFormat, Sprite, SpriteSheet, /*SpriteSheetHandle,*/ Texture, /*TextureMetadata*/\n\n sprite::SpriteSheetHandle, SpriteRender,\n\n },\n\n};\n\nuse regex::Regex;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::BTreeMap;\n\n\n\n/// Definition of one sprite in the RON file.\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct SpriteDefinition {\n\n pub name: String,\n\n pub x: u32,\n\n pub y: u32,\n", "file_path": "src/spriteanimationloader.rs", "rank": 60, "score": 5.448288154964173 }, { "content": " {\n\n let mut velocity_x = 0.0;\n\n let mut velocity_y = 0.0;\n\n let mut movement = false;\n\n if input.axis_value(\"player_move_x\").unwrap() > 0.0 {\n\n character_meta.direction = CharacterDirection::Right;\n\n character_meta.moving = true;\n\n velocity_x += character_move.speed;\n\n movement = true;\n\n }\n\n if input.axis_value(\"player_move_x\").unwrap() < 0.0 {\n\n character_meta.direction = CharacterDirection::Left;\n\n character_meta.moving = true;\n\n velocity_x -= character_move.speed;\n\n movement = true;\n\n }\n\n if input.axis_value(\"player_move_y\").unwrap() > 0.0 {\n\n character_meta.direction = CharacterDirection::Up;\n\n character_meta.moving = true;\n\n velocity_y += character_move.speed;\n", "file_path": "src/charactermove.rs", "rank": 61, "score": 5.079743780465826 }, { "content": " &mut self,\n\n (\n\n mut character_meta,\n\n mut physics_body,\n\n character_moves,\n\n user_moves,\n\n transforms,\n\n input,\n\n lazy_update,\n\n sprite_animation_store,\n\n ): Self::SystemData,\n\n ) {\n\n for (character_meta, physics_body, character_move, _, transform) in (\n\n &mut character_meta,\n\n &mut physics_body,\n\n &character_moves,\n\n &user_moves,\n\n &transforms,\n\n )\n\n .join()\n", "file_path": "src/charactermove.rs", "rank": 62, "score": 5.036297085108167 }, { "content": " pixel_pos,\n\n )\n\n .with(direction)\n\n // .with(damage::Destroyer { damage: 1.0})\n\n .build();\n\n },\n\n }\n\n }\n\n if let Some(player_coordinate) = scene.spawn_player {\n\n info!(\"Setting player coordinates\");\n\n let pixel_pos = (\n\n player_coordinate.0 as f32 * 32.0 + 16.0,\n\n player_coordinate.1 as f32 * 32.0 + 16.0,\n\n );\n\n helper::create_character(\n\n world.create_entity(),\n\n &sprite_animations,\n\n pixel_pos,\n\n \"healer\",\n\n )\n\n .with(charactermove::UserMove)\n\n // .with(damage::Destroyer { damage: 1.0})\n\n .build();\n\n }\n\n world.insert(sprite_animations);\n\n info!(\"Room setup complete\");\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 63, "score": 5.001888311690481 }, { "content": " movement = true;\n\n }\n\n if input.axis_value(\"player_move_y\").unwrap() < 0.0 {\n\n character_meta.direction = CharacterDirection::Down;\n\n character_meta.moving = true;\n\n velocity_y -= character_move.speed;\n\n movement = true;\n\n } \n\n if !movement {\n\n character_meta.moving = false;\n\n }\n\n physics_body.velocity = Velocity3::linear(velocity_x, velocity_y, 0.0);\n\n if input.action_is_down(\"attack\").unwrap() {\n\n if self.attack_released {\n\n self.attack_released = false;\n\n let transform: Transform = transform.clone();\n\n let direction: CharacterDirection = character_meta.direction.clone();\n\n let sprite_name = format!(\"sword-attack-{}\", direction.as_str());\n\n let sprite = sprite_animation_store.get_sprite_render(&sprite_name).unwrap();\n\n lazy_update.exec_mut(move |world| {\n", "file_path": "src/charactermove.rs", "rank": 64, "score": 4.776164349903117 }, { "content": "//! Contains helper functions\n\n\n\nuse crate::characteranimation::CharacterAnimation;\n\nuse crate::charactermeta::CharacterDirection;\n\nuse crate::charactermeta::CharacterMeta;\n\nuse crate::charactermove::CharacterMove;\n\nuse crate::spriteanimation::SpriteAnimation;\n\nuse crate::spriteanimationloader::SpriteAnimationStore;\n\nuse crate::forces::RadialForceField;\n\nuse amethyst::{\n\n prelude::*,\n\n core::transform::Transform,\n\n ecs::world::EntityBuilder,\n\n renderer::{SpriteRender},\n\n};\n\nuse specs_physics::{PhysicsBodyBuilder, PhysicsBody,\n\n nphysics::object::BodyStatus,\n\n nalgebra::{Vector3},\n\n PhysicsColliderBuilder,\n\n PhysicsCollider,\n", "file_path": "src/helper.rs", "rank": 65, "score": 4.732327967281879 }, { "content": " let delta = time.delta_seconds();\n\n let mut rng = rand::prelude::thread_rng();\n\n let random_number: f32 = rng.gen();\n\n let probability_to_spawn = delta / self.average_part_spawn;\n\n if random_number < probability_to_spawn {\n\n let entity = entities.create();\n\n\n\n let mut transform = Transform::default();\n\n let x_pos = rng.gen::<f32>() * (self.max_x - self.min_x) + self.min_x;\n\n let y_pos = rng.gen::<f32>() * (self.max_y - self.min_y) + self.min_y;\n\n transform.set_translation_xyz(x_pos, y_pos, 0.0);\n\n transforms.insert(entity, transform).unwrap();\n\n\n\n let physics_body: PhysicsBody<f32> = PhysicsBodyBuilder::from(BodyStatus::Dynamic)\n\n .lock_rotations(true)\n\n .build();\n\n physics_bodies.insert(entity, physics_body).unwrap();\n\n\n\n let sprite = sprite_animation_store.get_sprite_render(\"particle\").unwrap();\n\n sprite_render.insert(entity, sprite).unwrap();\n\n\n\n let delayed_remove = DelayedRemove::new(self.lifespan);\n\n delayed_removes.insert(entity, delayed_remove).unwrap();\n\n }\n\n }\n\n}", "file_path": "src/randomparticles.rs", "rank": 66, "score": 4.470300848591364 }, { "content": " .animations\n\n .get(&animation_right)\n\n .map(|x| x.clone())\n\n .unwrap_or(vec![0]),\n\n };\n\n let sprite_render = SpriteRender {\n\n sprite_sheet: animations.sprite_sheet_handle.clone(),\n\n sprite_number: 0,\n\n };\n\n let mut transform = Transform::default();\n\n transform.set_translation_xyz(x, y, -y);\n\n\n\n let physics_body: PhysicsBody<f32> = PhysicsBodyBuilder::from(BodyStatus::Dynamic)\n\n .lock_rotations(true)\n\n .build();\n\n let physics_collider: PhysicsCollider<f32> =\n\n PhysicsColliderBuilder::from(Shape::Cuboid {\n\n half_extents: Vector3::new(13.0, 13.0, 300.0)\n\n })\n\n .angular_prediction(0.0)\n", "file_path": "src/helper.rs", "rank": 67, "score": 3.794150092140494 }, { "content": " collider.id(), error);\n\n }\n\n }\n\n }\n\n if let (Some(destroyable), Some(destroyer)) = (destroyables.get_mut(collision.collider2), destroyers.get(collision.collider1)) {\n\n info!(\"Damage Collision\");\n\n let collider = collision.collider2.clone();\n\n destroyable.health -= destroyer.damage;\n\n if destroyable.health < 0.0 {\n\n if let Err(error) = entities.delete(collider) {\n\n warn!(\"Couldn't remove entity {} with zero health: {}\",\n\n collider.id(), error);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/damage.rs", "rank": 68, "score": 3.6709845500240053 }, { "content": " fn default() -> Self {\n\n CharacterMoveSystem {\n\n attack_released: true\n\n }\n\n }\n\n}\n\n\n\nimpl<'s> System<'s> for CharacterMoveSystem {\n\n type SystemData = (\n\n WriteStorage<'s, CharacterMeta>,\n\n WriteStorage<'s, PhysicsBody<f32>>,\n\n ReadStorage<'s, CharacterMove>,\n\n ReadStorage<'s, UserMove>,\n\n ReadStorage<'s, Transform>,\n\n Read<'s, InputHandler<StringBindings>>,\n\n Read<'s, LazyUpdate>,\n\n ReadExpect<'s, SpriteAnimationStore>,\n\n );\n\n\n\n fn run(\n", "file_path": "src/charactermove.rs", "rank": 69, "score": 3.3342853788562383 }, { "content": " sprite_animation.pause = true;\n\n let character_meta = CharacterMeta::new(CharacterDirection::Down);\n\n let character_animation = CharacterAnimation {\n\n prev_character_meta: character_meta.clone(),\n\n walk_up_animation: animations\n\n .animations\n\n .get(&animation_up)\n\n .map(|x| x.clone())\n\n .unwrap_or(vec![0]),\n\n walk_down_animation: animations\n\n .animations\n\n .get(&animation_down)\n\n .map(|x| x.clone())\n\n .unwrap_or(vec![0]),\n\n walk_left_animation: animations\n\n .animations\n\n .get(&animation_left)\n\n .map(|x| x.clone())\n\n .unwrap_or(vec![0]),\n\n walk_right_animation: animations\n", "file_path": "src/helper.rs", "rank": 70, "score": 2.997991734965993 }, { "content": " .with(SyncParametersToPhysicsSystem::<f32>::default(),\n\n \"sync_gravity_to_physics_system\",\n\n &[],\n\n )\n\n .with(PhysicsStepperSystem::<f32>::default(),\n\n \"physics_stepper_system\",\n\n &[\n\n \"sync_bodies_to_physics_system\",\n\n \"sync_colliders_to_physics_system\",\n\n \"sync_gravity_to_physics_system\",\n\n ],\n\n )\n\n .with(SyncBodiesFromPhysicsSystem::<f32, Transform>::default(),\n\n \"sync_bodies_from_physics_system\",\n\n &[\"physics_stepper_system\"],\n\n )\n\n .with(roomexit::RoomExitSystem::new(world), \"roomexit\", &[\"sync_bodies_from_physics_system\"])\n\n .with(damage::DestroySystem::default(), \"destroy\", &[\"sync_bodies_from_physics_system\"]);\n\n RenderingBundle::<DefaultBackend>::new()\n\n .with_plugin(\n", "file_path": "src/main.rs", "rank": 71, "score": 2.8779793755105167 }, { "content": "use crate::{\n\n charactermeta::CharacterDirection, damage::Destroyer, delayedremove::DelayedRemove,\n\n};\n\nuse specs_physics::{PhysicsBodyBuilder, PhysicsBody,\n\n nphysics::object::BodyStatus,\n\n nalgebra::{Vector3},\n\n PhysicsColliderBuilder,\n\n PhysicsCollider,\n\n colliders::Shape,\n\n};\n\nuse amethyst::{core::Transform, ecs::world::World, prelude::*, renderer::SpriteRender};\n\n\n", "file_path": "src/swordattack.rs", "rank": 72, "score": 2.826946190137275 }, { "content": "# License of the code\n\n\n\nMIT License\n\n\n\nCopyright (c) 2019 Simon Goller\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n\n\n\n\n# Images\n\nThe female healer sprites were provided by Antifaria on opengamesart.org.\n\nIt can be found here: https://opengameart.org/content/antifareas-rpg-sprite-set-1-enlarged-w-transparent-background-fixed\n\n\n\nThanks to the autor since I have terrible drawing skills :).\n", "file_path": "LICENSE.md", "rank": 73, "score": 2.7099579198374246 }, { "content": " ));\n\n }\n\n let sprite_sheet = SpriteSheet {\n\n texture: texture_handle,\n\n sprites,\n\n };\n\n\n\n let sprite_sheet_handle = {\n\n let loader = world.read_resource::<Loader>();\n\n loader.load_from_data(\n\n sprite_sheet,\n\n (),\n\n &world.read_resource::<AssetStorage<SpriteSheet>>(),\n\n )\n\n };\n\n\n\n SpriteAnimationStore {\n\n sprite_sheet_handle,\n\n animations: animations.animations.clone(),\n\n images: animations.images.clone(),\n\n }\n\n}\n", "file_path": "src/spriteanimationloader.rs", "rank": 74, "score": 2.580295199757064 }, { "content": "\n\n fn reverse(&self) -> Direction {\n\n match self {\n\n Direction::North => Direction::South,\n\n Direction::South => Direction::North,\n\n Direction::East => Direction::West,\n\n Direction::West => Direction::East,\n\n }\n\n }\n\n}\n", "file_path": "src/map.rs", "rank": 75, "score": 2.4820243133089352 }, { "content": " let binding_path = app_root.join(\"resources/binding_config.ron\");\n\n let display_config_path = app_root.join(\"resources/display_config.ron\");\n\n let input_bundle =\n\n InputBundle::<StringBindings>::new().with_bindings_from_file(binding_path).unwrap();\n\n\n\n let mut dispatcher_builder = ecs::DispatcherBuilder::new();\n\n input_bundle.build(world, &mut dispatcher_builder).unwrap();\n\n //.with_bundle(input_bundle)?\n\n //.with(physics::PhysicsSystem, \"physics\", &[])\n\n let mut dispatcher_builder =\n\n dispatcher_builder.with(delayedremove::DelayedRemoveSystem, \"delayed_remove\", &[])\n\n .with(\n\n spriteanimation::SpriteAnimationSystem,\n\n \"sprite_animation\",\n\n &[],\n\n )\n\n .with(randomparticles::SpawnParticleSystem {\n\n average_part_spawn: 0.01,\n\n min_x: 0.0,\n\n max_x: 640.0,\n", "file_path": "src/main.rs", "rank": 76, "score": 2.34257733356398 }, { "content": "extern crate amethyst;\n\nextern crate nalgebra as na;\n\nextern crate regex;\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate log;\n\nextern crate specs_physics;\n\n\n\nuse amethyst::{\n\n input::{InputBundle, StringBindings},\n\n core::transform::{Transform, TransformBundle},\n\n prelude::*,\n\n renderer::{\n\n Camera, RenderToWindow, RenderFlat2D, RenderingBundle,\n\n types::DefaultBackend,\n\n },\n\n utils::application_root_dir,\n\n ecs,\n\n core::bundle::SystemBundle,\n\n core::ArcThreadPool,\n", "file_path": "src/main.rs", "rank": 77, "score": 2.196849110135132 }, { "content": " // Do not apply force to yourself\n\n if dist > 0.1 {\n\n let applied_force_abs = radial_force_field.strength / dist / dist;\n\n let force_vector = (body_position - force_position)\n\n .normalize() * applied_force_abs; \n\n let force = Force3::linear(force_vector);\n\n physics.apply_external_force(&force);\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/forces.rs", "rank": 78, "score": 2.0738307857258373 }, { "content": " .build();\n\n\n\n println!(\"Create character end\");\n\n\n\n entity_builder\n\n .with(sprite_render)\n\n .with(transform)\n\n .with(sprite_animation)\n\n //.with(Transparent)\n\n .with(CharacterMove::new(128.0))\n\n .with(character_meta)\n\n .with(character_animation)\n\n // .with(Physics::new())\n\n // .with(BoundingRect::new(left, right, bottom, top))\n\n .with(physics_body)\n\n .with(physics_collider)\n\n .with(RadialForceField::new(20000000.0))\n\n \n\n}\n\n\n", "file_path": "src/helper.rs", "rank": 79, "score": 1.9858944979918798 }, { "content": " colliders::Shape,\n\n};\n\n/// Assembles a character on the map\n\n///\n\n/// Assigns the components to the EntityBuilder which are required\n\n/// to have a moving character on the screen.\n\n///\n\n/// For the animations, it requires to have animation names following\n\n/// this pattern:\n\n/// * (name)_walk_up\n\n/// * (name)_walk_down\n\n/// * (name)_walk_left\n\n/// * (name)_walk_right\n\n///\n\n/// ## Examples\n\n/// ```\n\n/// use helper::create_character;\n\n///\n\n/// create_character(\n\n/// world.create_entity(),\n\n/// &animations,\n\n/// (300.0, 300.0),\n\n/// (-16.0, 16.0, -16.0, 16.0),\n\n/// \"hero\"\n\n/// ).build();\n\n/// ```\n", "file_path": "src/helper.rs", "rank": 80, "score": 1.7757816854989579 } ]
Rust
part_8/src/render.rs
CroPo/roguelike-tutorial-2018
6ceceb445087ef42ed988574ef5ac04e341d9889
use tcod::console::{Console, Root, blit, Offscreen}; use map_objects::map::GameMap; use tcod::Map; use ecs::Ecs; use ecs::component::Position; use ecs::component::Render; use ecs::id::EntityId; use tcod::Color; use tcod::colors; use tcod::BackgroundFlag; use tcod::TextAlignment; use ecs::component::Actor; use message::MessageLog; use std::rc::Rc; use textwrap::wrap; use ecs::component::Name; use ecs::component::Inventory; use game_states::GameState; #[derive(PartialEq, Eq, PartialOrd, Ord)] pub enum RenderOrder { Corpse = 1, Item = 2, Actor = 3, } pub fn render_all(ecs: &Ecs, map: &mut GameMap, fov_map: &Map, game_state: &GameState, console: &mut Offscreen, panel: &mut Offscreen, root_console: &mut Root, bar_width: i32, panel_y: i32, log_panel: &MessagePanel, mouse_pos: (i32, i32)) { map.draw(console, fov_map); let component_ids = ecs.get_all_ids::<Render>(); let mut ids_filtered: Vec<&EntityId> = component_ids.iter().filter(|id| { if let Some(p) = ecs.get_component::<Position>(**id) { fov_map.is_in_fov(p.position.0, p.position.1) } else { false } }).collect(); ids_filtered.sort_by(|id_a, id_b| { let comp_a = ecs.get_component::<Render>(**id_a).unwrap(); let comp_b = ecs.get_component::<Render>(**id_b).unwrap(); comp_a.order.cmp(&comp_b.order) }); ids_filtered.iter().for_each(|id| { let c = ecs.get_component::<Render>(**id).unwrap(); c.draw(ecs, console) }); blit(console, (0, 0), (console.width(), console.height()), root_console, (0, 0), 1.0, 1.0); panel.set_default_foreground(colors::LIGHT_GREY); panel.set_default_background(colors::BLACK); panel.clear(); panel.print_ex(1, 0, BackgroundFlag::None, TextAlignment::Left, get_names_under_mouse(ecs, fov_map, mouse_pos)); if let Some(p) = ecs.get_component::<Actor>(ecs.player_entity_id) { panel.set_default_background(colors::BLACK); render_bar(panel, (1, 1), bar_width, "HP", p.hp, p.max_hp, colors::RED, colors::DARK_RED); } log_panel.render(panel); blit(panel, (0, 0), (panel.width(), panel.height()), root_console, (0, panel_y), 1.0, 1.0); if *game_state == GameState::ShowInventory || *game_state == GameState::ShowInventoryDrop { inventory_menu(root_console, ecs, "Inventory", 50, console.width(), console.height()); } root_console.flush() } pub fn clear_all(ecs: &Ecs, console: &mut Console) { ecs.get_all::<Position>().iter().for_each(|(e, _)| { let render_component = ecs.get_component::<Render>(*e); match render_component { Some(r) => { r.clear(ecs, console) } None => () } }); } pub fn render_bar(panel: &mut Offscreen, pos: (i32, i32), width: i32, name: &str, value: i32, max: i32, bar_color: Color, back_color: Color) { let filled_width = (value as f64 / max as f64 * width as f64).round() as i32; panel.set_default_background(back_color); panel.rect(pos.0, pos.1, width, 1, false, BackgroundFlag::Screen); if filled_width > 0 { panel.set_default_background(bar_color); panel.rect(pos.0, pos.1, filled_width, 1, false, BackgroundFlag::Screen) } panel.set_default_foreground(colors::WHITE); panel.print_ex(pos.0 + width / 2, pos.1, BackgroundFlag::None, TextAlignment::Center, format!("{}: {}/{}", name, value, max)); } fn get_names_under_mouse(ecs: &Ecs, fov_map: &Map, mouse_pos: (i32, i32)) -> String { let mut names = vec![]; ecs.get_all::<Position>().iter().filter(|(_, p)| { p.position.0 == mouse_pos.0 && p.position.1 == mouse_pos.1 && fov_map.is_in_fov(mouse_pos.0, mouse_pos.1) }).for_each(|(id, _)| { if let Some(n) = ecs.get_component::<Name>(*id) { names.push(n.name.clone()); } }); names.join(",") } pub fn selection_menu(console: &mut Root, title: &str, options: Vec<String>, width: i32, screen_width: i32, screen_height: i32) { let header_height = console.get_height_rect(0, 0, width, screen_height, title); let height = header_height + options.len() as i32; let mut menu_panel = Offscreen::new(width, height); menu_panel.set_default_foreground(colors::WHITE); menu_panel.print_rect_ex(0, 0, width, height, BackgroundFlag::None, TextAlignment::Left, title); let mut y = header_height; let mut letter_index = 'a' as u8; for option in options { let text = format!("({}) {}", letter_index as char, option); menu_panel.print_ex(0, y, BackgroundFlag::None, TextAlignment::Left, text); y+=1; letter_index+=1; } let x = screen_width / 2 - width / 2; let y = screen_height / 2 - height / 2; blit(&menu_panel, (0, 0), (width, height), console, (x, y), 1.0, 1.0); } pub fn inventory_menu(console: &mut Root, ecs: &Ecs, title: &str, width: i32, screen_width: i32, screen_height: i32) { if let Some(inventory) = ecs.get_component::<Inventory>(ecs.player_entity_id) { let items = if inventory.items.len() == 0 { vec!["Inventory is empty".to_string()] } else { inventory.items.iter().filter(|item_id|{ ecs.has_component::<Name>(**item_id) }).map(|item_id| { ecs.get_component::<Name>(*item_id).unwrap().name.clone() }).collect() }; selection_menu(console, title, items, width, screen_width, screen_height); } } pub struct MessagePanel { pos: (i32, i32), dimensions: (i32, i32), log: Rc<MessageLog>, } impl MessagePanel { pub fn new(pos: (i32, i32), dimensions: (i32, i32), log: Rc<MessageLog>) -> MessagePanel { MessagePanel { pos, dimensions, log, } } pub fn render(&self, panel: &mut Offscreen) { let mut total_lines = 0; 'l: for m in self.log.messages().iter().rev() { let lines = wrap(&m.text, self.dimensions.0 as usize); panel.set_default_foreground(m.color); for l in lines { panel.print_ex(self.pos.0, self.pos.1 + total_lines, BackgroundFlag::None, TextAlignment::Left, l.to_string()); total_lines += 1; if self.pos.1 + total_lines > self.dimensions.1 { break 'l; } } }; } }
use tcod::console::{Console, Root, blit, Offscreen}; use map_objects::map::GameMap; use tcod::Map; use ecs::Ecs; use ecs::component::Position; use ecs::component::Render; use ecs::id::EntityId; use tcod::Color; use tcod::colors; use tcod::BackgroundFlag; use tcod::TextAlignment; use ecs::component::Actor; use message::MessageLog; use std::rc::Rc; use textwrap::wrap; use ecs::component::Name; use ecs::component::Inventory; use game_states::GameState; #[derive(PartialEq, Eq, PartialOrd, Ord)] pub enum RenderOrder { Corpse = 1, Item = 2, Actor = 3, } pub fn render_all(ecs: &Ecs, map: &mut GameMap, fov_map: &Map, game_state: &GameState, console: &mut Offscreen, panel: &mut Offscreen, root_console: &mut Root, bar_width: i32, panel_y: i32, log_panel: &MessagePanel, mouse_pos: (i32, i32)) { map.draw(console, fov_map); let component_ids = ecs.get_all_ids::<Render>(); let mut ids_filtered: Vec<&EntityId> = component_ids.iter().filter(|id| { if let Some(p) = ecs.get_component::<Position>(**id) { fov_map.is_in_fov(p.position.0, p.position.1) } else { false } }).collect(); ids_filtered.sort_by(|id_a, id_b| { let comp_a = ecs.get_component::<Render>(**id_a).unwrap(); let comp_b = ecs.get_component::<Render>(**id_b).unwrap(); comp_a.order.cmp(&comp_b.order) }); ids_filtered.iter().for_each(|id| { let c = ecs.get_component::<Render>(**id).unwrap(); c.draw(ecs, console) }); blit(console, (0, 0), (console.width(), console.height()), root_console, (0, 0), 1.0, 1.0); panel.set_default_foreground(colors::LIGHT_GREY); panel.set_default_background(colors::BLACK); panel.clear(); panel.print_ex(1, 0, BackgroundFlag::None, TextAlignment::Left, get_names_under_mouse(ecs, fov_map, mouse_pos)); if let Some(p) = ecs.get_component::<Actor>(ecs.player_entity_id) { panel.set_default_background(colors::BLACK); render_bar(panel, (1, 1), bar_width, "HP", p.hp, p.max_hp, colors::RED, colors::DARK_RED); } log_panel.render(panel); blit(panel, (0, 0), (panel.width(), panel.height()), root_console, (0, panel_y), 1.0, 1.0); if *game_state == GameState::ShowInventory || *game_state == GameState::ShowInventoryDrop { inventory_menu(root_console, ecs, "Inventory", 50, console.width(), console.height()); } root_console.flush() } pub fn clear_all(ecs: &Ecs, console: &mut Console) { ecs.get_all::<Position>().iter().for_each(|(e, _)| { let render_component = ecs.get_component::<Render>(*e); match render_component { Some(r) => { r.clear(ecs, console) } None => () } }); } pub fn render_bar(panel: &mut Offscreen, pos: (i32, i32), width: i32, name: &str, value: i32, max: i32, bar_color: Color, back_color: Color) { let filled_width = (value as f64 / max as f64 * width as f64).round() as i32; panel.set_default_background(back_color); panel.rect(pos.0, pos.1, width, 1, false, BackgroundFlag::Screen); if filled_width > 0 { panel.set_default_background(bar_color); panel.rect(pos.0, pos.1, filled_width, 1, false, BackgroundFlag::Screen) } panel.set_default_foreground(colors::WHITE); panel.print_ex(pos.0 + width / 2, pos.1, BackgroundFlag::None, TextAlignment::Center, format!("{}: {}/{}", name, value, max)); } fn get_names_under_mouse(ecs: &Ecs, fov_map: &Map, mouse_pos: (i32, i32)) -> String { let mut names = vec![]; ecs.get_all::<Position>().iter().filter(|(_, p)| { p.position.0 == mouse_pos.0 && p.position.1 == mouse_pos.1 && fov_map.is_in_fov(mouse_pos.0, mouse_pos.1) }).for_each(|(id, _)| { if let Some(n) = ecs.get_component::<Name>(*id) { names.push(n.name.clone()); } }); names.join(",") } pub fn selection_menu(console: &mut Root, title: &str, options: Vec<String>, width: i32, screen_width: i32, screen_height: i32) { let header_height = console.get_height_rect(0, 0, width, screen_height, title); let height = header_height + options.len() as i32; let mut menu_panel = Offscreen::new(width, height); menu_panel.set_default_foreground(colors::WHITE); menu_panel.print_rect_ex(0, 0, width, height, BackgroundFlag::None, TextAlignment::Left, title); let mut y = header_height; let mut letter_index = 'a' as u8; for option in options { let text = forma
pub fn inventory_menu(console: &mut Root, ecs: &Ecs, title: &str, width: i32, screen_width: i32, screen_height: i32) { if let Some(inventory) = ecs.get_component::<Inventory>(ecs.player_entity_id) { let items = if inventory.items.len() == 0 { vec!["Inventory is empty".to_string()] } else { inventory.items.iter().filter(|item_id|{ ecs.has_component::<Name>(**item_id) }).map(|item_id| { ecs.get_component::<Name>(*item_id).unwrap().name.clone() }).collect() }; selection_menu(console, title, items, width, screen_width, screen_height); } } pub struct MessagePanel { pos: (i32, i32), dimensions: (i32, i32), log: Rc<MessageLog>, } impl MessagePanel { pub fn new(pos: (i32, i32), dimensions: (i32, i32), log: Rc<MessageLog>) -> MessagePanel { MessagePanel { pos, dimensions, log, } } pub fn render(&self, panel: &mut Offscreen) { let mut total_lines = 0; 'l: for m in self.log.messages().iter().rev() { let lines = wrap(&m.text, self.dimensions.0 as usize); panel.set_default_foreground(m.color); for l in lines { panel.print_ex(self.pos.0, self.pos.1 + total_lines, BackgroundFlag::None, TextAlignment::Left, l.to_string()); total_lines += 1; if self.pos.1 + total_lines > self.dimensions.1 { break 'l; } } }; } }
t!("({}) {}", letter_index as char, option); menu_panel.print_ex(0, y, BackgroundFlag::None, TextAlignment::Left, text); y+=1; letter_index+=1; } let x = screen_width / 2 - width / 2; let y = screen_height / 2 - height / 2; blit(&menu_panel, (0, 0), (width, height), console, (x, y), 1.0, 1.0); }
function_block-function_prefixed
[ { "content": "/// Display a selection menu of various options\n\npub fn selection_menu(console: &mut Root, title: &str, options: Vec<String>, width: i32, screen_width: i32, screen_height: i32) {\n\n let header_height = console.get_height_rect(0, 0, width, screen_height, title);\n\n let height = header_height + options.len() as i32;\n\n let mut menu_panel = Offscreen::new(width, height);\n\n\n\n menu_panel.set_default_foreground(colors::WHITE);\n\n menu_panel.print_rect_ex(0, 0, width, height, BackgroundFlag::None, TextAlignment::Left, title);\n\n\n\n let mut y = header_height;\n\n let mut letter_index = 'a' as u8;\n\n\n\n for option in options {\n\n let text = format!(\"({}) {}\", letter_index as char, option);\n\n menu_panel.print_ex(0, y, BackgroundFlag::None, TextAlignment::Left, text);\n\n y+=1;\n\n letter_index+=1;\n\n }\n\n\n\n let x = screen_width / 2 - width / 2;\n\n let y = screen_height / 2 - height / 2;\n\n\n\n blit(&menu_panel, (0, 0),\n\n (width, height),\n\n console, (x, y),\n\n 1.0, 1.0);\n\n}\n\n\n", "file_path": "part_9/src/render.rs", "rank": 0, "score": 554610.3014461827 }, { "content": "/// Display a selection menu of various options\n\npub fn selection_menu(console: &mut Root, title: &str, options: Vec<String>, width: i32, screen_width: i32, screen_height: i32) {\n\n let header_height = console.get_height_rect(0, 0, width, screen_height, title);\n\n let height = header_height + options.len() as i32;\n\n let mut menu_panel = Offscreen::new(width, height);\n\n\n\n menu_panel.set_default_foreground(colors::WHITE);\n\n menu_panel.print_rect_ex(0, 0, width, height, BackgroundFlag::None, TextAlignment::Left, title);\n\n\n\n let mut y = header_height;\n\n let mut letter_index = 'a' as u8;\n\n\n\n for option in options {\n\n let text = format!(\"({}) {}\", letter_index as char, option);\n\n menu_panel.print_ex(0, y, BackgroundFlag::None, TextAlignment::Left, text);\n\n y += 1;\n\n letter_index += 1;\n\n }\n\n\n\n let x = screen_width / 2 - width / 2;\n\n let y = screen_height / 2 - height / 2;\n\n\n\n blit(&menu_panel, (0, 0),\n\n (width, height),\n\n console, (x, y),\n\n 1.0, 1.0);\n\n}\n\n\n", "file_path": "part_12/src/render.rs", "rank": 1, "score": 554610.3014461825 }, { "content": "/// Display a selection menu of various options\n\npub fn selection_menu(console: &mut Root, title: &str, options: Vec<String>, width: i32, screen_width: i32, screen_height: i32) {\n\n let header_height = console.get_height_rect(0, 0, width, screen_height, title);\n\n let height = header_height + options.len() as i32;\n\n let mut menu_panel = Offscreen::new(width, height);\n\n\n\n menu_panel.set_default_foreground(colors::WHITE);\n\n menu_panel.print_rect_ex(0, 0, width, height, BackgroundFlag::None, TextAlignment::Left, title);\n\n\n\n let mut y = header_height;\n\n let mut letter_index = 'a' as u8;\n\n\n\n for option in options {\n\n let text = format!(\"({}) {}\", letter_index as char, option);\n\n menu_panel.print_ex(0, y, BackgroundFlag::None, TextAlignment::Left, text);\n\n y += 1;\n\n letter_index += 1;\n\n }\n\n\n\n let x = screen_width / 2 - width / 2;\n\n let y = screen_height / 2 - height / 2;\n\n\n\n blit(&menu_panel, (0, 0),\n\n (width, height),\n\n console, (x, y),\n\n 1.0, 1.0);\n\n}\n\n\n\n\n", "file_path": "part_13/src/render.rs", "rank": 2, "score": 554610.3014461825 }, { "content": "/// Display a selection menu of various options\n\npub fn selection_menu(console: &mut Root, title: &str, options: Vec<String>, width: i32, screen_width: i32, screen_height: i32) {\n\n let header_height = console.get_height_rect(0, 0, width, screen_height, title);\n\n let height = header_height + options.len() as i32;\n\n let mut menu_panel = Offscreen::new(width, height);\n\n\n\n menu_panel.set_default_foreground(colors::WHITE);\n\n menu_panel.print_rect_ex(0, 0, width, height, BackgroundFlag::None, TextAlignment::Left, title);\n\n\n\n let mut y = header_height;\n\n let mut letter_index = 'a' as u8;\n\n\n\n for option in options {\n\n let text = format!(\"({}) {}\", letter_index as char, option);\n\n menu_panel.print_ex(0, y, BackgroundFlag::None, TextAlignment::Left, text);\n\n y += 1;\n\n letter_index += 1;\n\n }\n\n\n\n let x = screen_width / 2 - width / 2;\n\n let y = screen_height / 2 - height / 2;\n\n\n\n blit(&menu_panel, (0, 0),\n\n (width, height),\n\n console, (x, y),\n\n 1.0, 1.0);\n\n}\n\n\n", "file_path": "part_11/src/render.rs", "rank": 3, "score": 554610.3014461824 }, { "content": "/// Display a selection menu of various options\n\npub fn selection_menu(console: &mut Root, title: &str, options: Vec<String>, width: i32, screen_width: i32, screen_height: i32) {\n\n let header_height = console.get_height_rect(0, 0, width, screen_height, title);\n\n let height = header_height + options.len() as i32;\n\n let mut menu_panel = Offscreen::new(width, height);\n\n\n\n menu_panel.set_default_foreground(colors::WHITE);\n\n menu_panel.print_rect_ex(0, 0, width, height, BackgroundFlag::None, TextAlignment::Left, title);\n\n\n\n let mut y = header_height;\n\n let mut letter_index = 'a' as u8;\n\n\n\n for option in options {\n\n let text = format!(\"({}) {}\", letter_index as char, option);\n\n menu_panel.print_ex(0, y, BackgroundFlag::None, TextAlignment::Left, text);\n\n y+=1;\n\n letter_index+=1;\n\n }\n\n\n\n let x = screen_width / 2 - width / 2;\n\n let y = screen_height / 2 - height / 2;\n\n\n\n blit(&menu_panel, (0, 0),\n\n (width, height),\n\n console, (x, y),\n\n 1.0, 1.0);\n\n}\n\n\n", "file_path": "part_10/src/render.rs", "rank": 5, "score": 554610.3014461827 }, { "content": "/// Render a bar to graphically represent a value\n\npub fn render_bar(panel: &mut Offscreen, pos: (i32, i32), width: i32, name: &str, value: i32, max: i32, bar_color: Color, back_color: Color) {\n\n let filled_width = (value as f64 / max as f64 * width as f64).round() as i32;\n\n\n\n panel.set_default_background(back_color);\n\n panel.rect(pos.0, pos.1, width, 1, false, BackgroundFlag::Screen);\n\n\n\n if filled_width > 0 {\n\n panel.set_default_background(bar_color);\n\n panel.rect(pos.0, pos.1, filled_width, 1, false, BackgroundFlag::Screen)\n\n }\n\n\n\n panel.set_default_foreground(colors::WHITE);\n\n panel.print_ex(pos.0 + width / 2, pos.1, BackgroundFlag::None,\n\n TextAlignment::Center, format!(\"{}: {}/{}\", name, value, max));\n\n}\n\n\n", "file_path": "part_7/src/render.rs", "rank": 6, "score": 549127.020347334 }, { "content": "/// Render a bar to graphically represent a value\n\npub fn render_bar(panel: &mut Offscreen, pos: (i32, i32), width: i32, name: &str, value: u32, max: u32, bar_color: Color, back_color: Color) {\n\n let filled_width = (value as f64 / max as f64 * width as f64).round() as i32;\n\n\n\n panel.set_default_background(back_color);\n\n panel.rect(pos.0, pos.1, width, 1, false, BackgroundFlag::Screen);\n\n\n\n if filled_width > 0 {\n\n panel.set_default_background(bar_color);\n\n panel.rect(pos.0, pos.1, filled_width, 1, false, BackgroundFlag::Screen)\n\n }\n\n\n\n panel.set_default_foreground(colors::WHITE);\n\n panel.print_ex(pos.0 + width / 2, pos.1, BackgroundFlag::None,\n\n TextAlignment::Center, format!(\"{}: {}/{}\", name, value, max));\n\n}\n\n\n", "file_path": "part_10/src/render.rs", "rank": 8, "score": 534521.8544152355 }, { "content": "/// Render a bar to graphically represent a value\n\npub fn render_bar(panel: &mut Offscreen, pos: (i32, i32), width: i32, name: &str, value: u32, max: u32, bar_color: Color, back_color: Color) {\n\n let filled_width = (value as f64 / max as f64 * width as f64).round() as i32;\n\n\n\n panel.set_default_background(back_color);\n\n panel.rect(pos.0, pos.1, width, 1, false, BackgroundFlag::Screen);\n\n\n\n if filled_width > 0 {\n\n panel.set_default_background(bar_color);\n\n panel.rect(pos.0, pos.1, filled_width, 1, false, BackgroundFlag::Screen)\n\n }\n\n\n\n panel.set_default_foreground(colors::WHITE);\n\n panel.print_ex(pos.0 + width / 2, pos.1, BackgroundFlag::None,\n\n TextAlignment::Center, format!(\"{}: {}/{}\", name, value, max));\n\n}\n\n\n", "file_path": "part_13/src/render.rs", "rank": 9, "score": 534521.8544152353 }, { "content": "/// Render a bar to graphically represent a value\n\npub fn render_bar(panel: &mut Offscreen, pos: (i32, i32), width: i32, name: &str, value: u32, max: u32, bar_color: Color, back_color: Color) {\n\n let filled_width = (value as f64 / max as f64 * width as f64).round() as i32;\n\n\n\n panel.set_default_background(back_color);\n\n panel.rect(pos.0, pos.1, width, 1, false, BackgroundFlag::Screen);\n\n\n\n if filled_width > 0 {\n\n panel.set_default_background(bar_color);\n\n panel.rect(pos.0, pos.1, filled_width, 1, false, BackgroundFlag::Screen)\n\n }\n\n\n\n panel.set_default_foreground(colors::WHITE);\n\n panel.print_ex(pos.0 + width / 2, pos.1, BackgroundFlag::None,\n\n TextAlignment::Center, format!(\"{}: {}/{}\", name, value, max));\n\n}\n\n\n", "file_path": "part_9/src/render.rs", "rank": 10, "score": 534521.8544152355 }, { "content": "/// Render a bar to graphically represent a value\n\npub fn render_bar(panel: &mut Offscreen, pos: (i32, i32), width: i32, name: &str, value: u32, max: u32, bar_color: Color, back_color: Color) {\n\n let filled_width = (value as f64 / max as f64 * width as f64).round() as i32;\n\n\n\n panel.set_default_background(back_color);\n\n panel.rect(pos.0, pos.1, width, 1, false, BackgroundFlag::Screen);\n\n\n\n if filled_width > 0 {\n\n panel.set_default_background(bar_color);\n\n panel.rect(pos.0, pos.1, filled_width, 1, false, BackgroundFlag::Screen)\n\n }\n\n\n\n panel.set_default_foreground(colors::WHITE);\n\n panel.print_ex(pos.0 + width / 2, pos.1, BackgroundFlag::None,\n\n TextAlignment::Center, format!(\"{}: {}/{}\", name, value, max));\n\n}\n\n\n", "file_path": "part_12/src/render.rs", "rank": 11, "score": 534521.8544152355 }, { "content": "/// Render a bar to graphically represent a value\n\npub fn render_bar(panel: &mut Offscreen, pos: (i32, i32), width: i32, name: &str, value: u32, max: u32, bar_color: Color, back_color: Color) {\n\n let filled_width = (value as f64 / max as f64 * width as f64).round() as i32;\n\n\n\n panel.set_default_background(back_color);\n\n panel.rect(pos.0, pos.1, width, 1, false, BackgroundFlag::Screen);\n\n\n\n if filled_width > 0 {\n\n panel.set_default_background(bar_color);\n\n panel.rect(pos.0, pos.1, filled_width, 1, false, BackgroundFlag::Screen)\n\n }\n\n\n\n panel.set_default_foreground(colors::WHITE);\n\n panel.print_ex(pos.0 + width / 2, pos.1, BackgroundFlag::None,\n\n TextAlignment::Center, format!(\"{}: {}/{}\", name, value, max));\n\n}\n\n\n", "file_path": "part_11/src/render.rs", "rank": 12, "score": 534521.8544152353 }, { "content": "pub fn inventory_menu(console: &mut Root, ecs: &Ecs, title: &str, width: i32, screen_width: i32, screen_height: i32) {\n\n if let Some(inventory) = ecs.get_component::<Inventory>(ecs.player_entity_id) {\n\n let items = if inventory.items.len() == 0 {\n\n vec![\"Inventory is empty\".to_string()]\n\n } else {\n\n inventory.items.iter().filter(|item_id| {\n\n ecs.has_component::<Name>(**item_id)\n\n }).map(|item_id| {\n\n ecs.get_component::<Name>(*item_id).unwrap().name.clone()\n\n }).collect()\n\n };\n\n\n\n selection_menu(console, title, items, width, screen_width, screen_height);\n\n }\n\n}\n\n\n", "file_path": "part_12/src/render.rs", "rank": 13, "score": 516321.13965815573 }, { "content": "pub fn inventory_menu(console: &mut Root, ecs: &Ecs, title: &str, width: i32, screen_width: i32, screen_height: i32) {\n\n\n\n if let Some(inventory) = ecs.get_component::<Inventory>(ecs.player_entity_id) {\n\n\n\n let items = if inventory.items.len() == 0 {\n\n vec![\"Inventory is empty\".to_string()]\n\n } else {\n\n inventory.items.iter().filter(|item_id|{\n\n ecs.has_component::<Name>(**item_id)\n\n }).map(|item_id| {\n\n ecs.get_component::<Name>(*item_id).unwrap().name.clone()\n\n }).collect()\n\n };\n\n\n\n selection_menu(console, title, items, width, screen_width, screen_height);\n\n }\n\n}\n\n\n\n\n\npub struct MessagePanel {\n", "file_path": "part_10/src/render.rs", "rank": 14, "score": 516321.1396581558 }, { "content": "pub fn inventory_menu(console: &mut Root, ecs: &Ecs, title: &str, width: i32, screen_width: i32, screen_height: i32) {\n\n\n\n if let Some(inventory) = ecs.get_component::<Inventory>(ecs.player_entity_id) {\n\n\n\n let items = if inventory.items.len() == 0 {\n\n vec![\"Inventory is empty\".to_string()]\n\n } else {\n\n inventory.items.iter().filter(|item_id|{\n\n ecs.has_component::<Name>(**item_id)\n\n }).map(|item_id| {\n\n ecs.get_component::<Name>(*item_id).unwrap().name.clone()\n\n }).collect()\n\n };\n\n\n\n selection_menu(console, title, items, width, screen_width, screen_height);\n\n }\n\n}\n\n\n\n\n\npub struct MessagePanel {\n", "file_path": "part_9/src/render.rs", "rank": 15, "score": 516321.13965815585 }, { "content": "pub fn inventory_menu(console: &mut Root, ecs: &Ecs, title: &str, width: i32, screen_width: i32, screen_height: i32) {\n\n if let Some(inventory) = ecs.get_component::<Inventory>(ecs.player_entity_id) {\n\n let items = if inventory.items.len() == 0 {\n\n vec![\"Inventory is empty\".to_string()]\n\n } else {\n\n inventory.items.iter().filter(|item_id| {\n\n ecs.has_component::<Name>(**item_id)\n\n }).map(|item_id| {\n\n ecs.get_component::<Name>(*item_id).unwrap().name.clone()\n\n }).collect()\n\n };\n\n\n\n selection_menu(console, title, items, width, screen_width, screen_height);\n\n }\n\n}\n\n\n", "file_path": "part_13/src/render.rs", "rank": 17, "score": 516321.1396581558 }, { "content": "pub fn inventory_menu(console: &mut Root, ecs: &Ecs, title: &str, width: i32, screen_width: i32, screen_height: i32) {\n\n if let Some(inventory) = ecs.get_component::<Inventory>(ecs.player_entity_id) {\n\n let items = if inventory.items.len() == 0 {\n\n vec![\"Inventory is empty\".to_string()]\n\n } else {\n\n inventory.items.iter().filter(|item_id| {\n\n ecs.has_component::<Name>(**item_id)\n\n }).map(|item_id| {\n\n ecs.get_component::<Name>(*item_id).unwrap().name.clone()\n\n }).collect()\n\n };\n\n\n\n selection_menu(console, title, items, width, screen_width, screen_height);\n\n }\n\n}\n\n\n", "file_path": "part_11/src/render.rs", "rank": 18, "score": 516321.1396581558 }, { "content": "pub fn equipment_menu(console: &mut Root, ecs: &Ecs, title: &str, width: i32, screen_width: i32, screen_height: i32) {\n\n if let Some(inventory) = ecs.get_component::<Inventory>(ecs.player_entity_id) {\n\n if let Some(equipment) = ecs.get_component::<Equipment>(ecs.player_entity_id) {\n\n\n\n let equippable : Vec<&EntityId> = inventory.items.iter().filter(|item_id| {\n\n ecs.has_component::<Equippable>(**item_id)\n\n }).collect();\n\n\n\n let items = if equippable.len() == 0 {\n\n vec![\"No equippable items in inventory\".to_string()]\n\n } else {\n\n equippable.iter().filter(|item_id| {\n\n ecs.has_component::<Name>(***item_id)\n\n }).map(|item_id| {\n\n let mut item_name = ecs.get_component::<Name>(**item_id).unwrap().name.clone();\n\n\n\n let equippable = ecs.get_component::<Equippable>(**item_id).unwrap();\n\n\n\n match equippable.slot {\n\n EquipmentSlot::Armor => item_name += &format!(\" [+{} HP]\", equippable.bonus_max_hp),\n", "file_path": "part_13/src/render.rs", "rank": 19, "score": 490514.36838308134 }, { "content": "pub fn character_screen(console: &mut Root, ecs: &Ecs, width: i32, height: i32, screen_width: i32, screen_height: i32) {\n\n\n\n let mut panel = Offscreen::new(width, height);\n\n panel.set_default_foreground(colors::WHITE);\n\n\n\n let mut text_row = 3;\n\n\n\n panel.print_rect_ex(0, 1, width, height, BackgroundFlag::None, TextAlignment::Left,\n\n \"Character Information\");\n\n\n\n if let Some(l) = ecs.get_component::<Level>(ecs.player_entity_id) {\n\n panel.print_rect_ex(0, text_row, width, height, BackgroundFlag::None, TextAlignment::Left,\n\n format!(\"Level: {}\", l.level));\n\n panel.print_rect_ex(0, text_row+1, width, height, BackgroundFlag::None, TextAlignment::Left,\n\n format!(\"Total XP: {}\", l.xp_total));\n\n panel.print_rect_ex(0, text_row+2, width, height, BackgroundFlag::None, TextAlignment::Left,\n\n format!(\"XP to next Level: {}\", l.xp_to_level(l.level as i32 + 1)));\n\n\n\n text_row = 7;\n\n }\n", "file_path": "part_13/src/render.rs", "rank": 20, "score": 459553.62833749305 }, { "content": "pub fn character_screen(console: &mut Root, ecs: &Ecs, width: i32, height: i32, screen_width: i32, screen_height: i32) {\n\n\n\n let mut panel = Offscreen::new(width, height);\n\n panel.set_default_foreground(colors::WHITE);\n\n\n\n let mut text_row = 3;\n\n\n\n panel.print_rect_ex(0, 1, width, height, BackgroundFlag::None, TextAlignment::Left,\n\n \"Character Information\");\n\n\n\n if let Some(l) = ecs.get_component::<Level>(ecs.player_entity_id) {\n\n panel.print_rect_ex(0, text_row, width, height, BackgroundFlag::None, TextAlignment::Left,\n\n format!(\"Level: {}\", l.level));\n\n panel.print_rect_ex(0, text_row+1, width, height, BackgroundFlag::None, TextAlignment::Left,\n\n format!(\"Total XP: {}\", l.xp_total));\n\n panel.print_rect_ex(0, text_row+2, width, height, BackgroundFlag::None, TextAlignment::Left,\n\n format!(\"XP to next Level: {}\", l.xp_to_level(l.level as i32 + 1)));\n\n\n\n text_row = 7;\n\n }\n", "file_path": "part_11/src/render.rs", "rank": 21, "score": 459553.62833749293 }, { "content": "pub fn character_screen(console: &mut Root, ecs: &Ecs, width: i32, height: i32, screen_width: i32, screen_height: i32) {\n\n\n\n let mut panel = Offscreen::new(width, height);\n\n panel.set_default_foreground(colors::WHITE);\n\n\n\n let mut text_row = 3;\n\n\n\n panel.print_rect_ex(0, 1, width, height, BackgroundFlag::None, TextAlignment::Left,\n\n \"Character Information\");\n\n\n\n if let Some(l) = ecs.get_component::<Level>(ecs.player_entity_id) {\n\n panel.print_rect_ex(0, text_row, width, height, BackgroundFlag::None, TextAlignment::Left,\n\n format!(\"Level: {}\", l.level));\n\n panel.print_rect_ex(0, text_row+1, width, height, BackgroundFlag::None, TextAlignment::Left,\n\n format!(\"Total XP: {}\", l.xp_total));\n\n panel.print_rect_ex(0, text_row+2, width, height, BackgroundFlag::None, TextAlignment::Left,\n\n format!(\"XP to next Level: {}\", l.xp_to_level(l.level as i32 + 1)));\n\n\n\n text_row = 7;\n\n }\n", "file_path": "part_12/src/render.rs", "rank": 22, "score": 459553.62833749293 }, { "content": "fn message_box(console: &mut Root, title: &str, screen_width: i32, screen_height: i32) {\n\n selection_menu(console, title, vec![], 24, screen_width, screen_height);\n\n}\n\n\n", "file_path": "part_10/src/render.rs", "rank": 23, "score": 427974.67746962665 }, { "content": "fn message_box(console: &mut Root, title: &str, screen_width: i32, screen_height: i32) {\n\n selection_menu(console, title, vec![], 24, screen_width, screen_height);\n\n}\n\n\n", "file_path": "part_11/src/render.rs", "rank": 24, "score": 427974.6774696267 }, { "content": "fn message_box(console: &mut Root, title: &str, screen_width: i32, screen_height: i32) {\n\n selection_menu(console, title, vec![], 24, screen_width, screen_height);\n\n}\n\n\n", "file_path": "part_12/src/render.rs", "rank": 25, "score": 427974.6774696267 }, { "content": "fn message_box(console: &mut Root, title: &str, screen_width: i32, screen_height: i32) {\n\n selection_menu(console, title, vec![], 24, screen_width, screen_height);\n\n}\n\n\n", "file_path": "part_13/src/render.rs", "rank": 26, "score": 427974.6774696267 }, { "content": "pub fn level_up_menu(console: &mut Root, ecs: &Ecs, screen_width: i32, screen_height: i32) {\n\n if let Some(actor) = ecs.get_component::<Actor>(ecs.player_entity_id) {\n\n let items =\n\n vec![\n\n format!(\"+20 Maximum Health (currently: {})\", actor.max_hp),\n\n format!(\"+1 Attack Power (currently: {})\", actor.power),\n\n format!(\"+1 Defense (currently: {})\", actor.defense),\n\n ];\n\n\n\n selection_menu(console, \"Level Up! Select a stat to raise:\", items, 40, screen_width, screen_height);\n\n }\n\n}\n\n\n\npub struct MessagePanel {\n\n pos: (i32, i32),\n\n dimensions: (i32, i32),\n\n log: Rc<MessageLog>,\n\n}\n\n\n\nimpl MessagePanel {\n", "file_path": "part_12/src/render.rs", "rank": 27, "score": 422092.55399861047 }, { "content": "pub fn level_up_menu(console: &mut Root, ecs: &Ecs, screen_width: i32, screen_height: i32) {\n\n if let Some(actor) = ecs.get_component::<Actor>(ecs.player_entity_id) {\n\n let items =\n\n vec![\n\n format!(\"+20 Maximum Health (currently: {})\", actor.max_hp),\n\n format!(\"+1 Attack Power (currently: {})\", actor.power),\n\n format!(\"+1 Defense (currently: {})\", actor.defense),\n\n ];\n\n\n\n selection_menu(console, \"Level Up! Select a stat to raise:\", items, 40, screen_width, screen_height);\n\n }\n\n}\n\n\n\npub struct MessagePanel {\n\n pos: (i32, i32),\n\n dimensions: (i32, i32),\n\n log: Rc<MessageLog>,\n\n}\n\n\n\nimpl MessagePanel {\n", "file_path": "part_11/src/render.rs", "rank": 28, "score": 422092.55399861047 }, { "content": "pub fn level_up_menu(console: &mut Root, ecs: &Ecs, screen_width: i32, screen_height: i32) {\n\n if let Some(actor) = ecs.get_component::<Actor>(ecs.player_entity_id) {\n\n let items =\n\n vec![\n\n format!(\"+20 Maximum Health (currently: {})\", actor.max_hp(ecs)),\n\n format!(\"+1 Attack Power (currently: {})\", actor.power(ecs)),\n\n format!(\"+1 Defense (currently: {})\", actor.defense(ecs)),\n\n ];\n\n\n\n selection_menu(console, \"Level Up! Select a stat to raise:\", items, 40, screen_width, screen_height);\n\n }\n\n}\n\n\n\npub struct MessagePanel {\n\n pos: (i32, i32),\n\n dimensions: (i32, i32),\n\n log: Rc<MessageLog>,\n\n}\n\n\n\nimpl MessagePanel {\n", "file_path": "part_13/src/render.rs", "rank": 29, "score": 422092.5539986104 }, { "content": "/// Render all `Entity`s which got both the `Render` and the `Position` component assigned onto the console\n\npub fn render_all(ecs: &Ecs, map: &mut GameMap, fov_map: &Map, fov_recompute: bool, console: &mut Offscreen, root_console: &mut Root) {\n\n map.draw(console, fov_map, fov_recompute);\n\n\n\n\n\n let component_ids = ecs.get_all_ids::<Render>();\n\n let mut ids_filtered: Vec<&EntityId> = component_ids.iter().filter(|id| {\n\n if let Some(p) = ecs.get_component::<Position>(**id) {\n\n fov_map.is_in_fov(p.position.0, p.position.1)\n\n } else {\n\n false\n\n }\n\n }).collect();\n\n ids_filtered.sort_by(|id_a, id_b|{\n\n\n\n let comp_a = ecs.get_component::<Render>(**id_a).unwrap();\n\n let comp_b = ecs.get_component::<Render>(**id_b).unwrap();\n\n\n\n comp_a.order.cmp(&comp_b.order)\n\n });\n\n ids_filtered.iter().for_each(|id| {\n", "file_path": "part_6/src/render.rs", "rank": 30, "score": 416835.49663826905 }, { "content": "pub fn render_all(objs: &Vec<Entity>, map: &mut GameMap, fov_map: &Map, fov_recompute: bool, console: &mut Offscreen, root_console: &mut Root) {\n\n map.draw(console, fov_map, fov_recompute);\n\n\n\n for obj in objs.iter().filter(|e| fov_map.is_in_fov(e.pos.0, e.pos.1) ) {\n\n obj.draw(console);\n\n }\n\n\n\n blit(console, (0, 0),\n\n (console.width(), console.height()),\n\n root_console, (0, 0),\n\n 1.0, 1.0)\n\n}\n\n\n", "file_path": "part_5/src/render.rs", "rank": 31, "score": 415474.1429750852 }, { "content": "pub fn render_all<T: Render, U: Render>(objs: &Vec<T>, map: &U, console: &mut Root, screen_width: i32, screen_height: i32) {\n\n\n\n let mut offscreen = Box::new(Offscreen::new(screen_width, screen_height));\n\n\n\n map.draw(&mut offscreen);\n\n\n\n for obj in objs {\n\n obj.draw(&mut offscreen);\n\n }\n\n\n\n console::blit(&offscreen,\n\n (0, 0),\n\n (screen_width, screen_height),\n\n console,\n\n (0, 0), 1.0, 1.0);\n\n}\n\n\n", "file_path": "part_2/src/render.rs", "rank": 32, "score": 404159.14634101785 }, { "content": "pub fn render_all<T: Render, U: Render>(objs: &Vec<T>, map: &U, console: &mut Root, screen_width: i32, screen_height: i32) {\n\n\n\n let mut offscreen = Box::new(Offscreen::new(screen_width, screen_height));\n\n\n\n map.draw(&mut offscreen);\n\n\n\n for obj in objs {\n\n obj.draw(&mut offscreen);\n\n }\n\n\n\n console::blit(&offscreen,\n\n (0, 0),\n\n (screen_width, screen_height),\n\n console,\n\n (0, 0), 1.0, 1.0);\n\n}\n\n\n", "file_path": "part_3/src/render.rs", "rank": 33, "score": 404159.14634101785 }, { "content": "pub fn render_all<T: Render>(objs: &Vec<T>, map: &mut GameMap, fov_map: &Map, fov_recompute: bool, console: &mut Root) {\n\n\n\n map.draw(console, fov_map, fov_recompute);\n\n\n\n for obj in objs {\n\n obj.draw(console);\n\n }\n\n}\n\n\n", "file_path": "part_4/src/render.rs", "rank": 34, "score": 350937.1758722189 }, { "content": "/// Get a Vec of the names of all Entities which are under the cursor.\n\nfn get_names_under_mouse(ecs: &Ecs, fov_map: &Map, mouse_pos: (i32, i32)) -> String {\n\n let mut names = vec![];\n\n\n\n let mut ids_filtered: Vec<EntityId> = ecs.get_all::<Position>().iter().filter(|(id, p)| {\n\n p.position.0 == mouse_pos.0 && p.position.1 == mouse_pos.1\n\n && fov_map.is_in_fov(mouse_pos.0, mouse_pos.1)\n\n && ecs.has_component::<Render>(**id)\n\n }).map(|(id, _)| {\n\n id.clone()\n\n }).collect();\n\n\n\n ids_filtered.sort_by(|id_a, id_b| {\n\n let comp_a = ecs.get_component::<Render>(*id_a).unwrap();\n\n let comp_b = ecs.get_component::<Render>(*id_b).unwrap();\n\n\n\n comp_b.order.cmp(&comp_a.order)\n\n });\n\n\n\n ids_filtered.iter().for_each(|id| {\n\n names.push(generate_entity_text(ecs, *id));\n\n });\n\n\n\n names.join(\"; \")\n\n}\n\n\n", "file_path": "part_13/src/render.rs", "rank": 36, "score": 323507.7934262394 }, { "content": "/// Get a Vec of the names of all Entities which are under the cursor.\n\nfn get_names_under_mouse(ecs: &Ecs, fov_map: &Map, mouse_pos: (i32, i32)) -> String {\n\n let mut names = vec![];\n\n\n\n let mut ids_filtered: Vec<EntityId> = ecs.get_all::<Position>().iter().filter(|(id, p)| {\n\n p.position.0 == mouse_pos.0 && p.position.1 == mouse_pos.1\n\n && fov_map.is_in_fov(mouse_pos.0, mouse_pos.1)\n\n && ecs.has_component::<Render>(**id)\n\n }).map(|(id, _)| {\n\n id.clone()\n\n }).collect();\n\n\n\n ids_filtered.sort_by(|id_a, id_b| {\n\n let comp_a = ecs.get_component::<Render>(*id_a).unwrap();\n\n let comp_b = ecs.get_component::<Render>(*id_b).unwrap();\n\n\n\n comp_b.order.cmp(&comp_a.order)\n\n });\n\n\n\n ids_filtered.iter().for_each(|id| {\n\n names.push(generate_entity_text(ecs, *id));\n\n });\n\n\n\n names.join(\"; \")\n\n}\n\n\n", "file_path": "part_12/src/render.rs", "rank": 37, "score": 323507.7934262394 }, { "content": "/// Get a Vec of the names of all Entities which are under the cursor.\n\nfn get_names_under_mouse(ecs: &Ecs, fov_map: &Map, mouse_pos: (i32, i32)) -> String {\n\n let mut names = vec![];\n\n\n\n ecs.get_all::<Position>().iter().filter(|(_, p)| {\n\n p.position.0 == mouse_pos.0 && p.position.1 == mouse_pos.1 && fov_map.is_in_fov(mouse_pos.0, mouse_pos.1)\n\n }).for_each(|(id, _)| {\n\n if let Some(n) = ecs.get_component::<Name>(*id) {\n\n names.push(n.name.clone());\n\n }\n\n });\n\n\n\n names.join(\",\")\n\n}\n\n\n\n\n\npub struct MessagePanel {\n\n pos: (i32, i32),\n\n dimensions: (i32, i32),\n\n log: Rc<MessageLog>,\n\n}\n", "file_path": "part_7/src/render.rs", "rank": 38, "score": 323507.7934262394 }, { "content": "/// Get a Vec of the names of all Entities which are under the cursor.\n\nfn get_names_under_mouse(ecs: &Ecs, fov_map: &Map, mouse_pos: (i32, i32)) -> String {\n\n let mut names = vec![];\n\n\n\n ecs.get_all::<Position>().iter().filter(|(_, p)| {\n\n p.position.0 == mouse_pos.0 && p.position.1 == mouse_pos.1 && fov_map.is_in_fov(mouse_pos.0, mouse_pos.1)\n\n }).for_each(|(id, _)| {\n\n if let Some(n) = ecs.get_component::<Name>(*id) {\n\n names.push(n.name.clone());\n\n }\n\n });\n\n\n\n names.join(\",\")\n\n}\n\n\n", "file_path": "part_10/src/render.rs", "rank": 39, "score": 323507.7934262394 }, { "content": "/// Get a Vec of the names of all Entities which are under the cursor.\n\nfn get_names_under_mouse(ecs: &Ecs, fov_map: &Map, mouse_pos: (i32, i32)) -> String {\n\n let mut names = vec![];\n\n\n\n ecs.get_all::<Position>().iter().filter(|(_, p)| {\n\n p.position.0 == mouse_pos.0 && p.position.1 == mouse_pos.1 && fov_map.is_in_fov(mouse_pos.0, mouse_pos.1)\n\n }).for_each(|(id, _)| {\n\n if let Some(n) = ecs.get_component::<Name>(*id) {\n\n names.push(n.name.clone());\n\n }\n\n });\n\n\n\n names.join(\",\")\n\n}\n\n\n", "file_path": "part_9/src/render.rs", "rank": 40, "score": 323507.79342623946 }, { "content": "/// Get a Vec of the names of all Entities which are under the cursor.\n\nfn get_names_under_mouse(ecs: &Ecs, fov_map: &Map, mouse_pos: (i32, i32)) -> String {\n\n let mut names = vec![];\n\n\n\n let mut ids_filtered: Vec<EntityId> = ecs.get_all::<Position>().iter().filter(|(id, p)| {\n\n p.position.0 == mouse_pos.0 && p.position.1 == mouse_pos.1\n\n && fov_map.is_in_fov(mouse_pos.0, mouse_pos.1)\n\n && ecs.has_component::<Render>(**id)\n\n }).map(|(id, _)| {\n\n id.clone()\n\n }).collect();\n\n\n\n ids_filtered.sort_by(|id_a, id_b| {\n\n let comp_a = ecs.get_component::<Render>(*id_a).unwrap();\n\n let comp_b = ecs.get_component::<Render>(*id_b).unwrap();\n\n\n\n comp_b.order.cmp(&comp_a.order)\n\n });\n\n\n\n ids_filtered.iter().for_each(|id| {\n\n names.push(generate_entity_text(ecs, *id));\n\n });\n\n\n\n names.join(\"; \")\n\n}\n\n\n", "file_path": "part_11/src/render.rs", "rank": 41, "score": 323507.7934262394 }, { "content": "/// Render all `Entity`s which got both the `Render` and the `Position` component assigned onto the console\n\npub fn render_all(ecs: &Ecs, map: &mut GameMap, fov_map: &Map, fov_recompute: bool,\n\n console: &mut Offscreen, panel: &mut Offscreen, root_console: &mut Root,\n\n bar_width: i32, panel_y: i32, log_panel: &MessagePanel, mouse_pos: (i32, i32)) {\n\n map.draw(console, fov_map, fov_recompute);\n\n\n\n\n\n let component_ids = ecs.get_all_ids::<Render>();\n\n let mut ids_filtered: Vec<&EntityId> = component_ids.iter().filter(|id| {\n\n if let Some(p) = ecs.get_component::<Position>(**id) {\n\n fov_map.is_in_fov(p.position.0, p.position.1)\n\n } else {\n\n false\n\n }\n\n }).collect();\n\n ids_filtered.sort_by(|id_a, id_b| {\n\n let comp_a = ecs.get_component::<Render>(**id_a).unwrap();\n\n let comp_b = ecs.get_component::<Render>(**id_b).unwrap();\n\n\n\n comp_a.order.cmp(&comp_b.order)\n\n });\n", "file_path": "part_7/src/render.rs", "rank": 42, "score": 298892.5792772225 }, { "content": "/// Render all `Entity`s which got both the `Render` and the `Position` component assigned onto the console\n\npub fn render_all(ecs: &Ecs, map: &mut GameMap, fov_map: &Map, game_state: &GameState,\n\n console: &mut Offscreen, panel: &mut Offscreen, root_console: &mut Root,\n\n bar_width: i32, panel_y: i32, log_panel: &MessagePanel, mouse_pos: (i32, i32)) {\n\n map.draw(console, fov_map);\n\n\n\n\n\n let component_ids = ecs.get_all_ids::<Render>();\n\n let mut ids_filtered: Vec<&EntityId> = component_ids.iter().filter(|id| {\n\n if let Some(p) = ecs.get_component::<Position>(**id) {\n\n fov_map.is_in_fov(p.position.0, p.position.1)\n\n } else {\n\n false\n\n }\n\n }).collect();\n\n ids_filtered.sort_by(|id_a, id_b| {\n\n let comp_a = ecs.get_component::<Render>(**id_a).unwrap();\n\n let comp_b = ecs.get_component::<Render>(**id_b).unwrap();\n\n\n\n comp_a.order.cmp(&comp_b.order)\n\n });\n", "file_path": "part_9/src/render.rs", "rank": 44, "score": 295518.08013464836 }, { "content": "/// Returns a value from a list of values by dungeon level\n\n/// The list needs to be passed as a list of tuples (min_level:i32, weight:i32)\n\n/// If no eligible weight can be found 0 will be returned\n\npub fn by_dungeon_level(chance_table: Cow<Vec<(i32, i32)>>, level: u8) -> i32 {\n\n let mut value_of_level = 0;\n\n chance_table.iter().for_each(|(value, min_level)|{\n\n if level as i32 >= *min_level {\n\n value_of_level = *value;\n\n }\n\n });\n\n value_of_level\n\n}", "file_path": "part_13/src/random_utils.rs", "rank": 45, "score": 279141.2457505871 }, { "content": "/// Returns a value from a list of values by dungeon level\n\n/// The list needs to be passed as a list of tuples (min_level:i32, weight:i32)\n\n/// If no eligible weight can be found 0 will be returned\n\npub fn by_dungeon_level(chance_table: Cow<Vec<(i32, i32)>>, level: u8) -> i32 {\n\n let mut value_of_level = 0;\n\n chance_table.iter().for_each(|(value, min_level)|{\n\n if level as i32 >= *min_level {\n\n value_of_level = *value;\n\n }\n\n });\n\n value_of_level\n\n}", "file_path": "part_12/src/random_utils.rs", "rank": 46, "score": 279141.2457505871 }, { "content": "pub fn recompute_fov(fov_map: &mut Map, position: (i32, i32), radius: i32, light_walls: bool, algorithm: FovAlgorithm) {\n\n fov_map.compute_fov(position.0, position.1, radius,light_walls,algorithm);\n\n\n\n}", "file_path": "part_9/src/map_objects/fov.rs", "rank": 47, "score": 277294.15912344557 }, { "content": "pub fn recompute_fov(fov_map: &mut Map, position: (i32, i32), radius: i32, light_walls: bool, algorithm: FovAlgorithm) {\n\n fov_map.compute_fov(position.0, position.1, radius,light_walls,algorithm);\n\n\n\n}", "file_path": "part_7/src/map_objects/fov.rs", "rank": 48, "score": 277294.1591234455 }, { "content": "pub fn recompute_fov(fov_map: &mut Map, position: (i32, i32), radius: i32, light_walls: bool, algorithm: FovAlgorithm) {\n\n fov_map.compute_fov(position.0, position.1, radius,light_walls,algorithm);\n\n\n\n}", "file_path": "part_4/src/map_objects/fov.rs", "rank": 49, "score": 277294.1591234455 }, { "content": "pub fn recompute_fov(fov_map: &mut Map, position: (i32, i32), radius: i32, light_walls: bool, algorithm: FovAlgorithm) {\n\n fov_map.compute_fov(position.0, position.1, radius,light_walls,algorithm);\n\n\n\n}", "file_path": "part_6/src/map_objects/fov.rs", "rank": 50, "score": 277294.15912344557 }, { "content": "pub fn recompute_fov(fov_map: &mut Map, position: (i32, i32), radius: i32, light_walls: bool, algorithm: FovAlgorithm) {\n\n fov_map.compute_fov(position.0, position.1, radius,light_walls,algorithm);\n\n\n\n}", "file_path": "part_5/src/map_objects/fov.rs", "rank": 51, "score": 277294.1591234455 }, { "content": "pub fn recompute_fov(fov_map: &mut Map, position: (i32, i32), radius: i32, light_walls: bool, algorithm: FovAlgorithm) {\n\n fov_map.compute_fov(position.0, position.1, radius,light_walls,algorithm);\n\n\n\n}", "file_path": "part_8/src/map_objects/fov.rs", "rank": 52, "score": 277294.1591234455 }, { "content": "pub fn recompute_fov(ecs: &Ecs, fov_map: &mut Map, settings: &Settings) {\n\n let p = ecs.get_component::<Position>(ecs.player_entity_id).unwrap();\n\n fov_map.compute_fov(p.position.0, p.position.1,\n\n settings.fov_radius(),\n\n settings.fov_light_walls(),\n\n settings.fov_algorithm());\n\n}", "file_path": "part_12/src/map_objects/fov.rs", "rank": 53, "score": 276814.7050727054 }, { "content": "pub fn recompute_fov(ecs: &Ecs, fov_map: &mut Map, settings: &Settings) {\n\n let p = ecs.get_component::<Position>(ecs.player_entity_id).unwrap();\n\n fov_map.compute_fov(p.position.0, p.position.1,\n\n settings.fov_radius(),\n\n settings.fov_light_walls(),\n\n settings.fov_algorithm());\n\n}", "file_path": "part_10/src/map_objects/fov.rs", "rank": 54, "score": 276814.7050727054 }, { "content": "pub fn recompute_fov(ecs: &Ecs, fov_map: &mut Map, settings: &Settings) {\n\n let p = ecs.get_component::<Position>(ecs.player_entity_id).unwrap();\n\n fov_map.compute_fov(p.position.0, p.position.1,\n\n settings.fov_radius(),\n\n settings.fov_light_walls(),\n\n settings.fov_algorithm());\n\n}", "file_path": "part_11/src/map_objects/fov.rs", "rank": 55, "score": 276814.7050727054 }, { "content": "pub fn recompute_fov(ecs: &Ecs, fov_map: &mut Map, settings: &Settings) {\n\n let p = ecs.get_component::<Position>(ecs.player_entity_id).unwrap();\n\n fov_map.compute_fov(p.position.0, p.position.1,\n\n settings.fov_radius(),\n\n settings.fov_light_walls(),\n\n settings.fov_algorithm());\n\n}", "file_path": "part_13/src/map_objects/fov.rs", "rank": 56, "score": 276814.7050727054 }, { "content": "/// Clear all `Entity`s which got both the `Render` and the `Position` component assigned from the console\n\npub fn clear_all(ecs: &Ecs, console: &mut Console) {\n\n ecs.get_all::<Position>().iter().for_each(|(e, p)| {\n\n let render_component = ecs.get_component::<Render>(*e);\n\n match render_component {\n\n Some(r) => {\n\n r.clear(ecs, console)\n\n }\n\n None => ()\n\n }\n\n });\n\n}\n", "file_path": "part_6/src/render.rs", "rank": 57, "score": 257988.59679026864 }, { "content": "/// Clear all `Entity`s which got both the `Render` and the `Position` component assigned from the console\n\npub fn clear_all(ecs: &Ecs, console: &mut Console) {\n\n ecs.get_all::<Position>().iter().for_each(|(e, _)| {\n\n let render_component = ecs.get_component::<Render>(*e);\n\n match render_component {\n\n Some(r) => {\n\n r.clear(ecs, console)\n\n }\n\n None => ()\n\n }\n\n });\n\n}\n\n\n", "file_path": "part_9/src/render.rs", "rank": 58, "score": 257988.59679026858 }, { "content": "/// Clear all `Entity`s which got both the `Render` and the `Position` component assigned from the console\n\npub fn clear_all(ecs: &Ecs, console: &mut Console) {\n\n ecs.get_all::<Position>().iter().for_each(|(e, _)| {\n\n let render_component = ecs.get_component::<Render>(*e);\n\n match render_component {\n\n Some(r) => {\n\n r.clear(ecs, console)\n\n }\n\n None => ()\n\n }\n\n });\n\n}\n\n\n", "file_path": "part_7/src/render.rs", "rank": 60, "score": 257988.5967902686 }, { "content": "pub fn clear_all<T: Render>(objs: &Vec<T>, console: &mut Console) {\n\n for obj in objs {\n\n obj.clear(console);\n\n }\n\n}\n", "file_path": "part_4/src/render.rs", "rank": 61, "score": 248438.45376617968 }, { "content": "pub fn clear_all<T: Render>(objs: &Vec<T>,console: &mut Console) {\n\n for obj in objs {\n\n obj.clear(console);\n\n }\n\n}\n", "file_path": "part_2/src/render.rs", "rank": 62, "score": 248438.45376617968 }, { "content": "pub fn clear_all<T: Render>(objs: &Vec<T>,console: &mut Console) {\n\n for obj in objs {\n\n obj.clear(console);\n\n }\n\n}\n", "file_path": "part_3/src/render.rs", "rank": 63, "score": 248438.45376617968 }, { "content": "pub fn clear_all<T: Render>(objs: &Vec<T>, console: &mut Console) {\n\n for obj in objs {\n\n obj.clear(console);\n\n }\n\n}\n", "file_path": "part_5/src/render.rs", "rank": 64, "score": 248438.45376617968 }, { "content": "/// Returns a randomly selected index from a weighted list\n\npub fn random_choice_index(chances: Vec<i32>) -> usize {\n\n let mut rng = thread_rng();\n\n let random_chance = rng.gen_range(1, chances.iter().sum());\n\n\n\n let mut running_sum = 0;\n\n let mut choice = 0;\n\n\n\n for chance in chances {\n\n running_sum += chance;\n\n if random_chance <= running_sum {\n\n break\n\n }\n\n choice+=1;\n\n }\n\n choice\n\n}\n\n\n", "file_path": "part_12/src/random_utils.rs", "rank": 65, "score": 224866.76351638814 }, { "content": "/// Returns a randomly selected index from a weighted list\n\npub fn random_choice_index(chances: Vec<i32>) -> usize {\n\n let mut rng = thread_rng();\n\n let random_chance = rng.gen_range(1, chances.iter().sum());\n\n\n\n let mut running_sum = 0;\n\n let mut choice = 0;\n\n\n\n for chance in chances {\n\n running_sum += chance;\n\n if random_chance <= running_sum {\n\n break\n\n }\n\n choice+=1;\n\n }\n\n choice\n\n}\n\n\n", "file_path": "part_13/src/random_utils.rs", "rank": 66, "score": 224866.76351638814 }, { "content": "pub fn load() -> Option<JsonValue> {\n\n\n\n match OpenOptions::new().read(true).open(SAVE_FILE_NAME) {\n\n Ok(mut f) => deserialize( &mut f),\n\n _ => None\n\n }\n\n\n\n}\n\n\n", "file_path": "part_10/src/savegame.rs", "rank": 67, "score": 223678.8922067605 }, { "content": "pub fn load() -> Option<JsonValue> {\n\n\n\n match OpenOptions::new().read(true).open(SAVE_FILE_NAME) {\n\n Ok(mut f) => deserialize( &mut f),\n\n _ => None\n\n }\n\n\n\n}\n\n\n", "file_path": "part_11/src/savegame.rs", "rank": 68, "score": 223678.8922067605 }, { "content": "pub fn load() -> Option<JsonValue> {\n\n\n\n match OpenOptions::new().read(true).open(SAVE_FILE_NAME) {\n\n Ok(mut f) => deserialize( &mut f),\n\n _ => None\n\n }\n\n\n\n}\n\n\n", "file_path": "part_13/src/savegame.rs", "rank": 69, "score": 223678.8922067605 }, { "content": "pub fn load() -> Option<JsonValue> {\n\n\n\n match OpenOptions::new().read(true).open(SAVE_FILE_NAME) {\n\n Ok(mut f) => deserialize( &mut f),\n\n _ => None\n\n }\n\n\n\n}\n\n\n", "file_path": "part_12/src/savegame.rs", "rank": 70, "score": 223678.8922067605 }, { "content": "pub fn initialize_fov(game_map: &GameMap) -> Map {\n\n let mut fov_map = Map::new(game_map.dimensions.0, game_map.dimensions.1);\n\n\n\n for x in 0..game_map.dimensions.0 {\n\n for y in 0..game_map.dimensions.1 {\n\n let tile = game_map.get_tile(x as usize, y as usize);\n\n fov_map.set(x, y, !tile.block_sight, !tile.block_move);\n\n }\n\n }\n\n fov_map\n\n}\n\n\n", "file_path": "part_5/src/map_objects/fov.rs", "rank": 71, "score": 217076.2818870254 }, { "content": "pub fn initialize_fov(game_map: &GameMap) -> Map {\n\n let mut fov_map = Map::new(game_map.dimensions.0, game_map.dimensions.1);\n\n\n\n for x in 0..game_map.dimensions.0 {\n\n for y in 0..game_map.dimensions.1 {\n\n let tile = game_map.get_tile(x as usize, y as usize);\n\n fov_map.set(x, y, !tile.block_sight, !tile.block_move);\n\n }\n\n }\n\n fov_map\n\n}\n\n\n", "file_path": "part_13/src/map_objects/fov.rs", "rank": 72, "score": 217076.2818870254 }, { "content": "pub fn initialize_fov(game_map: &GameMap) -> Map {\n\n let mut fov_map = Map::new(game_map.dimensions.0, game_map.dimensions.1);\n\n\n\n for x in 0..game_map.dimensions.0 {\n\n for y in 0..game_map.dimensions.1 {\n\n let tile = game_map.get_tile(x as usize, y as usize);\n\n fov_map.set(x, y, !tile.block_sight, !tile.block_move);\n\n }\n\n }\n\n fov_map\n\n}\n\n\n", "file_path": "part_7/src/map_objects/fov.rs", "rank": 73, "score": 217076.2818870254 }, { "content": "pub fn initialize_fov(game_map: &GameMap) -> Map {\n\n let mut fov_map = Map::new(game_map.dimensions.0, game_map.dimensions.1);\n\n\n\n for x in 0..game_map.dimensions.0 {\n\n for y in 0..game_map.dimensions.1 {\n\n let tile = game_map.get_tile(x as usize, y as usize);\n\n fov_map.set(x, y, !tile.block_sight, !tile.block_move);\n\n }\n\n }\n\n fov_map\n\n}\n\n\n", "file_path": "part_10/src/map_objects/fov.rs", "rank": 74, "score": 217076.2818870254 }, { "content": "pub fn initialize_fov(game_map: &GameMap) -> Map {\n\n let mut fov_map = Map::new(game_map.dimensions.0, game_map.dimensions.1);\n\n\n\n for x in 0..game_map.dimensions.0 {\n\n for y in 0..game_map.dimensions.1 {\n\n let tile = game_map.get_tile(x as usize, y as usize);\n\n fov_map.set(x, y, !tile.block_sight, !tile.block_move);\n\n }\n\n }\n\n fov_map\n\n}\n\n\n", "file_path": "part_11/src/map_objects/fov.rs", "rank": 75, "score": 217076.2818870254 }, { "content": "pub fn initialize_fov(game_map: &GameMap) -> Map {\n\n let mut fov_map = Map::new(game_map.dimensions.0, game_map.dimensions.1);\n\n\n\n for x in 0..game_map.dimensions.0 {\n\n for y in 0..game_map.dimensions.1 {\n\n let tile = game_map.get_tile(x as usize, y as usize);\n\n fov_map.set(x, y, !tile.block_sight, !tile.block_move);\n\n }\n\n }\n\n fov_map\n\n}\n\n\n", "file_path": "part_4/src/map_objects/fov.rs", "rank": 76, "score": 217076.2818870254 }, { "content": "pub fn initialize_fov(game_map: &GameMap) -> Map {\n\n let mut fov_map = Map::new(game_map.dimensions.0, game_map.dimensions.1);\n\n\n\n for x in 0..game_map.dimensions.0 {\n\n for y in 0..game_map.dimensions.1 {\n\n let tile = game_map.get_tile(x as usize, y as usize);\n\n fov_map.set(x, y, !tile.block_sight, !tile.block_move);\n\n }\n\n }\n\n fov_map\n\n}\n\n\n", "file_path": "part_9/src/map_objects/fov.rs", "rank": 77, "score": 217076.2818870254 }, { "content": "pub fn initialize_fov(game_map: &GameMap) -> Map {\n\n let mut fov_map = Map::new(game_map.dimensions.0, game_map.dimensions.1);\n\n\n\n for x in 0..game_map.dimensions.0 {\n\n for y in 0..game_map.dimensions.1 {\n\n let tile = game_map.get_tile(x as usize, y as usize);\n\n fov_map.set(x, y, !tile.block_sight, !tile.block_move);\n\n }\n\n }\n\n fov_map\n\n}\n\n\n", "file_path": "part_6/src/map_objects/fov.rs", "rank": 78, "score": 217076.2818870254 }, { "content": "pub fn initialize_fov(game_map: &GameMap) -> Map {\n\n let mut fov_map = Map::new(game_map.dimensions.0, game_map.dimensions.1);\n\n\n\n for x in 0..game_map.dimensions.0 {\n\n for y in 0..game_map.dimensions.1 {\n\n let tile = game_map.get_tile(x as usize, y as usize);\n\n fov_map.set(x, y, !tile.block_sight, !tile.block_move);\n\n }\n\n }\n\n fov_map\n\n}\n\n\n", "file_path": "part_12/src/map_objects/fov.rs", "rank": 79, "score": 217076.2818870254 }, { "content": "pub fn initialize_fov(game_map: &GameMap) -> Map {\n\n let mut fov_map = Map::new(game_map.dimensions.0, game_map.dimensions.1);\n\n\n\n for x in 0..game_map.dimensions.0 {\n\n for y in 0..game_map.dimensions.1 {\n\n let tile = game_map.get_tile(x as usize, y as usize);\n\n fov_map.set(x, y, !tile.block_sight, !tile.block_move);\n\n }\n\n }\n\n fov_map\n\n}\n\n\n", "file_path": "part_8/src/map_objects/fov.rs", "rank": 80, "score": 217076.2818870254 }, { "content": "fn move_player(player_x: &mut i32, player_y: &mut i32, movement: (i32,i32)) {\n\n *player_x += movement.0;\n\n *player_y += movement.1;\n\n}\n\n\n", "file_path": "part_1/src/main.rs", "rank": 81, "score": 211179.84419850382 }, { "content": "fn deserialize(file: &mut File) -> Option<JsonValue> {\n\n let mut data = String::new();\n\n file.read_to_string(&mut data);\n\n\n\n match json::parse(&data) {\n\n Ok(parsed) => Some(parsed),\n\n Err(e) => None\n\n }\n\n}\n\n\n\n\n", "file_path": "part_11/src/savegame.rs", "rank": 82, "score": 208290.21955648198 }, { "content": "fn deserialize(file: &mut File) -> Option<JsonValue> {\n\n let mut data = String::new();\n\n file.read_to_string(&mut data);\n\n\n\n match json::parse(&data) {\n\n Ok(parsed) => Some(parsed),\n\n Err(e) => None\n\n }\n\n}\n\n\n\n\n", "file_path": "part_13/src/savegame.rs", "rank": 83, "score": 208290.21955648198 }, { "content": "fn deserialize(file: &mut File) -> Option<JsonValue> {\n\n let mut data = String::new();\n\n file.read_to_string(&mut data);\n\n\n\n match json::parse(&data) {\n\n Ok(parsed) => Some(parsed),\n\n Err(e) => None\n\n }\n\n}\n\n\n\n\n", "file_path": "part_12/src/savegame.rs", "rank": 84, "score": 208290.21955648198 }, { "content": "fn deserialize(file: &mut File) -> Option<JsonValue> {\n\n let mut data = String::new();\n\n file.read_to_string(&mut data);\n\n\n\n match json::parse(&data) {\n\n Ok(parsed) => Some(parsed),\n\n Err(e) => None\n\n }\n\n}\n\n\n\n\n", "file_path": "part_10/src/savegame.rs", "rank": 85, "score": 208290.21955648198 }, { "content": "fn generate_entity_text(ecs: &Ecs, id: EntityId) -> String {\n\n let mut name = if let Some(n) = ecs.get_component::<Name>(id) {\n\n n.name.clone()\n\n } else {\n\n format!(\"{}\", id)\n\n };\n\n\n\n if let Some(a) = ecs.get_component::<Level>(id) {\n\n name = format!(\"{},lv.{}\", name, a.level);\n\n }\n\n if let Some(c) = ecs.get_component::<Corpse>(id) {\n\n name = format!(\"{},Dead\", name);\n\n }\n\n\n\n name\n\n}\n\n\n", "file_path": "part_13/src/render.rs", "rank": 86, "score": 185075.62881389423 }, { "content": "fn generate_entity_text(ecs: &Ecs, id: EntityId) -> String {\n\n let mut name = if let Some(n) = ecs.get_component::<Name>(id) {\n\n n.name.clone()\n\n } else {\n\n format!(\"{}\", id)\n\n };\n\n\n\n if let Some(a) = ecs.get_component::<Level>(id) {\n\n name = format!(\"{},lv.{}\", name, a.level);\n\n }\n\n if let Some(c) = ecs.get_component::<Corpse>(id) {\n\n name = format!(\"{},Dead\", name);\n\n }\n\n\n\n name\n\n}\n\n\n", "file_path": "part_11/src/render.rs", "rank": 87, "score": 185075.62881389423 }, { "content": "fn generate_entity_text(ecs: &Ecs, id: EntityId) -> String {\n\n let mut name = if let Some(n) = ecs.get_component::<Name>(id) {\n\n n.name.clone()\n\n } else {\n\n format!(\"{}\", id)\n\n };\n\n\n\n if let Some(a) = ecs.get_component::<Level>(id) {\n\n name = format!(\"{},lv.{}\", name, a.level);\n\n }\n\n if let Some(c) = ecs.get_component::<Corpse>(id) {\n\n name = format!(\"{},Dead\", name);\n\n }\n\n\n\n name\n\n}\n\n\n", "file_path": "part_12/src/render.rs", "rank": 88, "score": 185075.62881389423 }, { "content": "pub fn handle_input(state: &GameState, event: Option<(EventFlags, Event)>) -> Option<InputAction> {\n\n if let Some(e) = event {\n\n match e {\n\n (KEY_PRESS, Event::Key(key)) => {\n\n match state {\n\n GameState::PlayersTurn => handle_keys_player_turn(key),\n\n GameState::ShowInventoryUse | GameState::ShowInventoryDrop\n\n | GameState::MainMenu | GameState::ShowQuitGameMenu => handle_keys_selection_menu(key),\n\n _ => handle_keys_default(key),\n\n }\n\n }\n\n (_, Event::Mouse(mouse)) => match state {\n\n GameState::Targeting( .. ) => handle_mouse_targeting(mouse),\n\n _ => handle_mouse_default(mouse)\n\n },\n\n _ => None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "part_10/src/game/input.rs", "rank": 89, "score": 160716.88571450638 }, { "content": "pub fn handle_input(state: &GameState, event: Option<(EventFlags, Event)>) -> Option<InputAction> {\n\n if let Some(e) = event {\n\n match e {\n\n (KEY_PRESS, Event::Key(key)) => {\n\n match state {\n\n GameState::PlayersTurn => handle_keys_player_turn(key),\n\n GameState::ShowInventoryUse | GameState::ShowInventoryDrop\n\n | GameState::MainMenu | GameState::ShowQuitGameMenu\n\n | GameState::ShowLeveUpMenu => handle_keys_selection_menu(key),\n\n _ => handle_keys_default(key),\n\n }\n\n }\n\n (_, Event::Mouse(mouse)) => match state {\n\n GameState::Targeting( .. ) => handle_mouse_targeting(mouse),\n\n _ => handle_mouse_default(mouse)\n\n },\n\n _ => None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "part_11/src/game/input.rs", "rank": 90, "score": 160716.88571450638 }, { "content": "pub fn handle_input(state: &GameState, event: Option<(EventFlags, Event)>) -> Option<InputAction> {\n\n if let Some(e) = event {\n\n match e {\n\n (KEY_PRESS, Event::Key(key)) => {\n\n match state {\n\n GameState::PlayersTurn => handle_keys_player_turn(key),\n\n GameState::ShowInventoryUse | GameState::ShowInventoryDrop | GameState::ShowInventoryEquip\n\n | GameState::MainMenu | GameState::ShowQuitGameMenu\n\n | GameState::ShowLeveUpMenu => handle_keys_selection_menu(key),\n\n _ => handle_keys_default(key),\n\n }\n\n }\n\n (_, Event::Mouse(mouse)) => match state {\n\n GameState::Targeting( .. ) => handle_mouse_targeting(mouse),\n\n _ => handle_mouse_default(mouse)\n\n },\n\n _ => None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "part_13/src/game/input.rs", "rank": 91, "score": 160716.88571450638 }, { "content": "pub fn handle_input(state: &GameState, event: Option<(EventFlags, Event)>) -> Option<InputAction> {\n\n if let Some(e) = event {\n\n match e {\n\n (KEY_PRESS, Event::Key(key)) => {\n\n match state {\n\n GameState::PlayersTurn => handle_keys_player_turn(key),\n\n GameState::ShowInventoryUse | GameState::ShowInventoryDrop\n\n | GameState::MainMenu | GameState::ShowQuitGameMenu\n\n | GameState::ShowLeveUpMenu => handle_keys_selection_menu(key),\n\n _ => handle_keys_default(key),\n\n }\n\n }\n\n (_, Event::Mouse(mouse)) => match state {\n\n GameState::Targeting( .. ) => handle_mouse_targeting(mouse),\n\n _ => handle_mouse_default(mouse)\n\n },\n\n _ => None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "part_12/src/game/input.rs", "rank": 92, "score": 160716.88571450638 }, { "content": "pub fn save(game: &RefMut<Game>) {\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .truncate(true)\n\n .create(true)\n\n .open(SAVE_FILE_NAME).unwrap();\n\n\n\n\n\n let data = game.serialize().to_string();\n\n file.write_all(data.into_bytes().as_slice());\n\n}\n\n\n", "file_path": "part_12/src/savegame.rs", "rank": 93, "score": 152190.76886348933 }, { "content": "pub fn save(game: &RefMut<Game>) {\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .truncate(true)\n\n .create(true)\n\n .open(SAVE_FILE_NAME).unwrap();\n\n\n\n\n\n let data = game.serialize().to_string();\n\n file.write_all(data.into_bytes().as_slice());\n\n}\n\n\n", "file_path": "part_11/src/savegame.rs", "rank": 94, "score": 152190.76886348933 }, { "content": "pub fn save(game: &RefMut<Game>) {\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .truncate(true)\n\n .create(true)\n\n .open(SAVE_FILE_NAME).unwrap();\n\n\n\n\n\n let data = game.serialize().to_string();\n\n file.write_all(data.into_bytes().as_slice());\n\n}\n\n\n", "file_path": "part_10/src/savegame.rs", "rank": 95, "score": 152190.76886348933 }, { "content": "pub fn save(game: &RefMut<Game>) {\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .truncate(true)\n\n .create(true)\n\n .open(SAVE_FILE_NAME).unwrap();\n\n\n\n\n\n let data = game.serialize().to_string();\n\n file.write_all(data.into_bytes().as_slice());\n\n}\n\n\n", "file_path": "part_13/src/savegame.rs", "rank": 96, "score": 152190.76886348933 }, { "content": "/// Render all `Entity`s which got both the `Render` and the `Position` component assigned onto the console\n\npub fn render_all(engine: &Engine, game: &RefMut<Game>) {\n\n match engine.state {\n\n GameState::MainMenu => render_main_menu(&engine),\n\n _ => render_game(&engine, &game)\n\n }\n\n}\n\n\n", "file_path": "part_11/src/render.rs", "rank": 97, "score": 141261.46209714777 }, { "content": "/// Render all `Entity`s which got both the `Render` and the `Position` component assigned onto the console\n\npub fn render_all(engine: &Engine, game: &RefMut<Game>) {\n\n\n\n match engine.state {\n\n GameState::MainMenu => render_main_menu(&engine),\n\n _ => render_game(&engine, &game)\n\n }\n\n}\n\n\n", "file_path": "part_10/src/render.rs", "rank": 98, "score": 141261.46209714777 }, { "content": "/// Render all `Entity`s which got both the `Render` and the `Position` component assigned onto the console\n\npub fn render_all(engine: &Engine, game: &RefMut<Game>) {\n\n match engine.state {\n\n GameState::MainMenu => render_main_menu(&engine),\n\n _ => render_game(&engine, &game)\n\n }\n\n}\n\n\n", "file_path": "part_12/src/render.rs", "rank": 99, "score": 141261.46209714777 } ]
Rust
clinkv2/src/kzg10/prover.rs
sunhuachuang/ckb-zkp
4031a458a301a87cfdea9f6c662cc8a837d6ae51
use ark_ec::PairingEngine; use ark_ff::{Field, One, ToBytes, UniformRand, Zero}; use ark_poly::polynomial::univariate::DensePolynomial; use ark_poly::{EvaluationDomain, GeneralEvaluationDomain, Polynomial, UVPolynomial}; use ark_std::{cfg_iter, cfg_iter_mut}; use merlin::Transcript; use rand::Rng; #[cfg(feature = "parallel")] use rayon::prelude::*; use crate::{ kzg10::{Proof, ProveAssignment, ProveKey, KZG10}, r1cs::{Index, SynthesisError}, }; pub fn create_random_proof<E: PairingEngine, R: Rng>( circuit: &ProveAssignment<E>, kzg10_ck: &ProveKey<'_, E>, rng: &mut R, ) -> Result<Proof<E>, SynthesisError> { let m_io = circuit.input_assignment.len(); let m_mid = circuit.aux_assignment.len(); let n = circuit.input_assignment[0].len(); let mut transcript = Transcript::new(b"CLINKv2"); let domain: GeneralEvaluationDomain<E::Fr> = EvaluationDomain::<E::Fr>::new(n).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; let domain_size = domain.size(); let mut r_q_polys = vec![]; let mut r_mid_comms = vec![]; let mut r_mid_q_values = vec![]; let mut r_mid_q_rands = vec![]; let zero = E::Fr::zero(); let one = E::Fr::one(); let hiding_bound = Some(2); for j in 0..m_io { let rj_coeffs = domain.ifft(&circuit.input_assignment[j]); let rj_poly = DensePolynomial::from_coefficients_vec(rj_coeffs); r_q_polys.push(rj_poly); } for j in 0..m_mid { let rj_coeffs = domain.ifft(&circuit.aux_assignment[j]); let mut rj_poly = DensePolynomial::from_coefficients_vec(rj_coeffs); let rho = zero; let rho_poly = DensePolynomial::from_coefficients_vec(vec![rho; 1]); let vanishing_poly = domain.vanishing_polynomial(); rj_poly += &(&rho_poly * &vanishing_poly.into()); let (rj_comm, rj_rand) = KZG10::<E>::commit(&kzg10_ck, &rj_poly, hiding_bound, Some(rng))?; r_q_polys.push(rj_poly); r_mid_comms.push(rj_comm); r_mid_q_rands.push(rj_rand); } let mut r_mid_comms_bytes = vec![]; r_mid_comms.write(&mut r_mid_comms_bytes)?; transcript.append_message(b"witness polynomial commitments", &r_mid_comms_bytes); let mut c = [0u8; 31]; transcript.challenge_bytes(b"batching challenge", &mut c); let eta = E::Fr::from_random_bytes(&c).unwrap(); let m_abc = circuit.at.len(); let mut sum_coset_ab = vec![zero; domain_size]; let mut sum_c = vec![zero; domain_size]; let mut eta_i = one; for i in 0..m_abc { let mut ai_coeffs = vec![zero; domain_size]; for (coeff, index) in (&circuit.at[i]).into_iter() { let id = match index { Index::Input(j) => *j, Index::Aux(j) => m_io + *j, }; for k in 0..r_q_polys[id].coeffs.len() { ai_coeffs[k] += &(r_q_polys[id].coeffs[k] * coeff); } } let mut ai = DensePolynomial::from_coefficients_vec(ai_coeffs); let mut bi_coeffs = vec![zero; domain_size]; for (coeff, index) in (&circuit.bt[i]).into_iter() { let id = match index { Index::Input(j) => *j, Index::Aux(j) => m_io + *j, }; for k in 0..r_q_polys[id].coeffs.len() { bi_coeffs[k] += &(r_q_polys[id].coeffs[k] * coeff); } } let mut bi = DensePolynomial::from_coefficients_vec(bi_coeffs); domain.coset_fft_in_place(&mut ai.coeffs); domain.coset_fft_in_place(&mut bi.coeffs); let coset_ab_values = domain.mul_polynomials_in_evaluation_domain(&ai, &bi); drop(ai); drop(bi); cfg_iter!(coset_ab_values) .zip(&mut sum_coset_ab) .for_each(|(coset_abij, sum_coset_ab_j)| *sum_coset_ab_j += &(eta_i * (*coset_abij))); let mut ci_values = vec![zero; domain_size]; for (coeff, index) in (&circuit.ct[i]).into_iter() { match index { Index::Input(j) => { cfg_iter_mut!(&mut ci_values) .zip(&circuit.input_assignment[*j]) .for_each(|(cij, rij)| *cij += &(*rij * coeff)); } Index::Aux(j) => { cfg_iter_mut!(&mut ci_values) .zip(&circuit.aux_assignment[*j]) .for_each(|(cij, rij)| *cij += &(*rij * coeff)); } }; } cfg_iter!(ci_values) .zip(&mut sum_c) .for_each(|(cij, sum_c_j)| *sum_c_j += &(eta_i * (*cij))); eta_i = eta_i * &eta; } domain.ifft_in_place(&mut sum_c); domain.coset_fft_in_place(&mut sum_c); cfg_iter_mut!(sum_coset_ab) .zip(sum_c) .for_each(|(sum_coset_ab_j, sum_coset_c_j)| *sum_coset_ab_j -= &sum_coset_c_j); domain.divide_by_vanishing_poly_on_coset_in_place(&mut sum_coset_ab); domain.coset_ifft_in_place(&mut sum_coset_ab); let q_poly = DensePolynomial::from_coefficients_vec(sum_coset_ab); let (q_comm, q_rand) = KZG10::<E>::commit(&kzg10_ck, &q_poly, hiding_bound, Some(rng))?; let mut q_comm_bytes = vec![]; q_comm.write(&mut q_comm_bytes)?; transcript.append_message(b"quotient polynomial commitments", &q_comm_bytes); let mut c = [0u8; 31]; transcript.challenge_bytes(b"random point", &mut c); let zeta = E::Fr::from_random_bytes(&c).unwrap(); r_q_polys.push(q_poly); r_mid_q_rands.push(q_rand); for j in 0..(m_mid + 1) { let value = r_q_polys[j + m_io].evaluate(&zeta); r_mid_q_values.push(value); } let opening_challenge = E::Fr::rand(rng); let r_mid_q_proof = KZG10::<E>::batch_open( &kzg10_ck, &r_q_polys[m_io..], zeta, opening_challenge, &r_mid_q_rands, )?; let proof = Proof { r_mid_comms, q_comm, r_mid_q_values, r_mid_q_proof, opening_challenge, }; Ok(proof) }
use ark_ec::PairingEngine; use ark_ff::{Field, One, ToBytes, UniformRand, Zero}; use ark_poly::polynomial::univariate::DensePolynomial; use ark_poly::{EvaluationDomain, GeneralEvaluationDomain, Polynomial, UVPolynomial}; use ark_std::{cfg_iter, cfg_iter_mut}; use merlin::Transcript; use rand::Rng; #[cfg(feature = "parallel")] use rayon::prelude::*; use crate::{ kzg10::{Proof, ProveAssignment, ProveKey, KZG10}, r1cs::{Index, SynthesisError}, };
pub fn create_random_proof<E: PairingEngine, R: Rng>( circuit: &ProveAssignment<E>, kzg10_ck: &ProveKey<'_, E>, rng: &mut R, ) -> Result<Proof<E>, SynthesisError> { let m_io = circuit.input_assignment.len(); let m_mid = circuit.aux_assignment.len(); let n = circuit.input_assignment[0].len(); let mut transcript = Transcript::new(b"CLINKv2"); let domain: GeneralEvaluationDomain<E::Fr> = EvaluationDomain::<E::Fr>::new(n).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; let domain_size = domain.size(); let mut r_q_polys = vec![]; let mut r_mid_comms = vec![]; let mut r_mid_q_values = vec![]; let mut r_mid_q_rands = vec![]; let zero = E::Fr::zero(); let one = E::Fr::one(); let hiding_bound = Some(2); for j in 0..m_io { let rj_coeffs = domain.ifft(&circuit.input_assignment[j]); let rj_poly = DensePolynomial::from_coefficients_vec(rj_coeffs); r_q_polys.push(rj_poly); } for j in 0..m_mid { let rj_coeffs = domain.ifft(&circuit.aux_assignment[j]); let mut rj_poly = DensePolynomial::from_coefficients_vec(rj_coeffs); let rho = zero; let rho_poly = DensePolynomial::from_coefficients_vec(vec![rho; 1]); let vanishing_poly = domain.vanishing_polynomial(); rj_poly += &(&rho_poly * &vanishing_poly.into()); let (rj_comm, rj_rand) = KZG10::<E>::commit(&kzg10_ck, &rj_poly, hiding_bound, Some(rng))?; r_q_polys.push(rj_poly); r_mid_comms.push(rj_comm); r_mid_q_rands.push(rj_rand); } let mut r_mid_comms_bytes = vec![]; r_mid_comms.write(&mut r_mid_comms_bytes)?; transcript.append_message(b"witness polynomial commitments", &r_mid_comms_bytes); let mut c = [0u8; 31]; transcript.challenge_bytes(b"batching challenge", &mut c); let eta = E::Fr::from_random_bytes(&c).unwrap(); let m_abc = circuit.at.len(); let mut sum_coset_ab = vec![zero; domain_size]; let mut sum_c = vec![zero; domain_size]; let mut eta_i = one; for i in 0..m_abc { let mut ai_coeffs = vec![zero; domain_size]; for (coeff, index) in (&circuit.at[i]).into_iter() { let id = match index { Index::Input(j) => *j, Index::Aux(j) => m_io + *j, }; for k in 0..r_q_polys[id].coeffs.len() { ai_coeffs[k] += &(r_q_polys[id].coeffs[k] * coeff); } } let mut ai = DensePolynomial::from_coefficients_vec(ai_coeffs); let mut bi_coeffs = vec![zero; domain_size]; for (coeff, index) in (&circuit.bt[i]).into_iter() { let id = match index { Index::Input(j) => *j, Index::Aux(j) => m_io + *j, }; for k in 0..r_q_polys[id].coeffs.len() { bi_coeffs[k] += &(r_q_polys[id].coeffs[k] * coeff); } } let mut bi = DensePolynomial::from_coefficients_vec(bi_coeffs); domain.coset_fft_in_place(&mut ai.coeffs); domain.coset_fft_in_place(&mut bi.coeffs); let coset_ab_values = domain.mul_polynomials_in_evaluation_domain(&ai, &bi); drop(ai); drop(bi); cfg_iter!(coset_ab_values) .zip(&mut sum_coset_ab) .for_each(|(coset_abij, sum_coset_ab_j)| *sum_coset_ab_j += &(eta_i * (*coset_abij))); let mut ci_values = vec![zero; domain_size]; for (coeff, index) in (&circuit.ct[i]).into_iter() { match index { Index::Input(j) => { cfg_iter_mut!(&mut ci_values) .zip(&circuit.input_assignment[*j]) .for_each(|(cij, rij)| *cij += &(*rij * coeff)); } Index::Aux(j) => { cfg_iter_mut!(&mut ci_values) .zip(&circuit.aux_assignment[*j]) .for_each(|(cij, rij)| *cij += &(*rij * coeff)); } }; } cfg_iter!(ci_values) .zip(&mut sum_c) .for_each(|(cij, sum_c_j)| *sum_c_j += &(eta_i * (*cij))); eta_i = eta_i * &eta; } domain.ifft_in_place(&mut sum_c); domain.coset_fft_in_place(&mut sum_c); cfg_iter_mut!(sum_coset_ab) .zip(sum_c) .for_each(|(sum_coset_ab_j, sum_coset_c_j)| *sum_coset_ab_j -= &sum_coset_c_j); domain.divide_by_vanishing_poly_on_coset_in_place(&mut sum_coset_ab); domain.coset_ifft_in_place(&mut sum_coset_ab); let q_poly = DensePolynomial::from_coefficients_vec(sum_coset_ab); let (q_comm, q_rand) = KZG10::<E>::commit(&kzg10_ck, &q_poly, hiding_bound, Some(rng))?; let mut q_comm_bytes = vec![]; q_comm.write(&mut q_comm_bytes)?; transcript.append_message(b"quotient polynomial commitments", &q_comm_bytes); let mut c = [0u8; 31]; transcript.challenge_bytes(b"random point", &mut c); let zeta = E::Fr::from_random_bytes(&c).unwrap(); r_q_polys.push(q_poly); r_mid_q_rands.push(q_rand); for j in 0..(m_mid + 1) { let value = r_q_polys[j + m_io].evaluate(&zeta); r_mid_q_values.push(value); } let opening_challenge = E::Fr::rand(rng); let r_mid_q_proof = KZG10::<E>::batch_open( &kzg10_ck, &r_q_polys[m_io..], zeta, opening_challenge, &r_mid_q_rands, )?; let proof = Proof { r_mid_comms, q_comm, r_mid_q_values, r_mid_q_proof, opening_challenge, }; Ok(proof) }
function_block-full_function
[ { "content": "type Kzg10Proof<E> = kzg10::Proof<E>;\n", "file_path": "clinkv2/src/kzg10/mod.rs", "rank": 0, "score": 123476.63471316447 }, { "content": "fn skip_leading_zeros_and_convert_to_bigints<F: PrimeField>(\n\n p: &DensePolynomial<F>,\n\n) -> (usize, Vec<F::BigInt>) {\n\n let mut num_leading_zeros = 0;\n\n while p.coeffs[num_leading_zeros].is_zero() && num_leading_zeros < p.coeffs.len() {\n\n num_leading_zeros += 1;\n\n }\n\n let coeffs = convert_to_bigints(&p.coeffs[num_leading_zeros..]);\n\n (num_leading_zeros, coeffs)\n\n}\n\n\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 1, "score": 92167.76003254506 }, { "content": "type Kzg10Comm<E> = kzg10::Commitment<E>;\n\n\n\n/// The proof in Clinkv2.\n\n#[derive(Clone, Debug, Eq, PartialEq, CanonicalSerialize, CanonicalDeserialize)]\n\npub struct Proof<E: PairingEngine> {\n\n pub r_mid_comms: Vec<Kzg10Comm<E>>,\n\n pub q_comm: Kzg10Comm<E>,\n\n pub r_mid_q_values: Vec<E::Fr>,\n\n pub r_mid_q_proof: Kzg10Proof<E>,\n\n pub opening_challenge: E::Fr,\n\n}\n\n\n", "file_path": "clinkv2/src/kzg10/mod.rs", "rank": 2, "score": 91468.7211145301 }, { "content": "type PC = MarlinKZG10<E, DensePolynomial<Fr>>;\n", "file_path": "ckb-contracts/contracts/universal_plonk_verifier/src/entry.rs", "rank": 3, "score": 74661.75827325066 }, { "content": "//! Here we construct a polynomial commitment that enables users to commit to a\n\n//! single polynomial `p`, and then later provide an evaluation proof that\n\n//! convinces verifiers that a claimed value `v` is the true evaluation of `p`\n\n//! at a chosen point `x`. Our construction follows the template of the construction\n\n//! proposed by Kate, Zaverucha, and Goldberg ([KZG11](http://cacr.uwaterloo.ca/techreports/2010/cacr2010-10.pdf)).\n\n//! This construction achieves extractability in the algebraic group model (AGM).\n\n\n\nuse ark_ec::{\n\n msm::{FixedBaseMSM, VariableBaseMSM},\n\n AffineCurve, PairingEngine, ProjectiveCurve,\n\n};\n\nuse ark_ff::{to_bytes, Field, One, PrimeField, ToBytes, UniformRand, Zero};\n\nuse ark_poly::{polynomial::univariate::DensePolynomial, Polynomial, UVPolynomial};\n\nuse ark_serialize::*;\n\nuse ark_std::{cfg_iter, io};\n\nuse core::marker::PhantomData;\n\nuse core::ops::{Add, AddAssign};\n\nuse rand::Rng;\n\n\n\n#[cfg(feature = \"parallel\")]\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 4, "score": 59179.36293409381 }, { "content": " /// This is the evaluation of the random polynomial at the point for which\n\n /// the evaluation proof was produced.\n\n pub random_v: Option<E::Fr>,\n\n}\n\n\n\nimpl<E: PairingEngine> Proof<E> {\n\n fn _size_in_bytes(&self) -> usize {\n\n let hiding_size = if self.random_v.is_some() {\n\n to_bytes![E::Fr::zero()].unwrap().len()\n\n } else {\n\n 0\n\n };\n\n to_bytes![E::G1Affine::zero()].unwrap().len() / 2 + hiding_size\n\n }\n\n}\n\n\n\n/// `KZG10` is an implementation of the polynomial commitment scheme of\n\n/// [Kate, Zaverucha and Goldbgerg][kzg10]\n\n///\n\n/// [kzg10]: http://cacr.uwaterloo.ca/techreports/2010/cacr2010-10.pdf\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 5, "score": 59174.48144837938 }, { "content": " /// For KZG10, the commitment randomness is a random polynomial.\n\n pub blinding_polynomial: DensePolynomial<E::Fr>,\n\n}\n\n\n\nimpl<E: PairingEngine> Randomness<E> {\n\n /// Does `self` provide any hiding properties to the corresponding commitment?\n\n /// `self.is_hiding() == true` only if the underlying polynomial is non-zero.\n\n #[inline]\n\n pub fn is_hiding(&self) -> bool {\n\n !self.blinding_polynomial.is_zero()\n\n }\n\n\n\n /// What is the degree of the hiding polynomial for a given hiding bound?\n\n #[inline]\n\n pub fn calculate_hiding_polynomial_degree(hiding_bound: usize) -> usize {\n\n hiding_bound + 1\n\n }\n\n\n\n fn empty() -> Self {\n\n Self {\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 6, "score": 59174.091406313826 }, { "content": "\n\n pub fn batch_open<'a>(\n\n powers: &Powers<'_, E>,\n\n polynomials: &[DensePolynomial<E::Fr>],\n\n point: E::Fr,\n\n opening_challenge: E::Fr,\n\n rands: &Vec<Randomness<E>>,\n\n ) -> Result<Proof<E>, Error> {\n\n let mut p = DensePolynomial::zero();\n\n let mut r = Randomness::empty();\n\n // let mut shifted_w = DensePolynomial::zero();\n\n // let mut shifted_r = Randomness::empty();\n\n // let mut shifted_r_witness = DensePolynomial::zero();\n\n\n\n let mut challenge_j = E::Fr::one();\n\n\n\n for (polynomial, rand) in polynomials.into_iter().zip(rands) {\n\n Self::check_degree_is_within_bounds(polynomial.degree(), powers.size())?;\n\n // compute challenge^j and challenge^{j+1}.\n\n //let challenge_j = opening_challenge.pow([2 * j as u64]);\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 7, "score": 59173.86098311165 }, { "content": " //let witness_time = start_timer!(|| \"Computing random witness polynomial\");\n\n let random_witness_polynomial = random_p / &divisor;\n\n //end_timer!(witness_time);\n\n Some(random_witness_polynomial)\n\n } else {\n\n None\n\n };\n\n\n\n Ok((witness_polynomial, random_witness_polynomial))\n\n }\n\n\n\n pub(crate) fn open_with_witness_polynomial<'a>(\n\n powers: &Powers<'_, E>,\n\n point: E::Fr,\n\n randomness: &Randomness<E>,\n\n witness_polynomial: &DensePolynomial<E::Fr>,\n\n hiding_witness_polynomial: Option<&DensePolynomial<E::Fr>>,\n\n ) -> Result<Proof<E>, Error> {\n\n Self::check_degree_is_too_large(witness_polynomial.degree(), powers.size())?;\n\n let (num_leading_zeros, witness_coeffs) =\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 8, "score": 59172.42497466907 }, { "content": "pub struct KZG10<E: PairingEngine> {\n\n _engine: PhantomData<E>,\n\n}\n\n\n\nimpl<E: PairingEngine> KZG10<E> {\n\n /// Constructs public parameters when given as input the maximum degree `degree`\n\n /// for the polynomial commitment scheme.\n\n pub fn setup<R: Rng>(\n\n max_degree: usize,\n\n produce_g2_powers: bool,\n\n rng: &mut R,\n\n ) -> Result<UniversalParams<E>, Error> {\n\n if max_degree < 1 {\n\n return Err(Error::DegreeIsZero);\n\n }\n\n //let setup_time = start_timer!(|| format!(\"KZG10::Setup with degree {}\", max_degree));\n\n let beta = E::Fr::rand(rng);\n\n let g = E::G1Projective::rand(rng);\n\n let gamma_g = E::G1Projective::rand(rng);\n\n let h = E::G2Projective::rand(rng);\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 9, "score": 59170.644039820145 }, { "content": " }\n\n }\n\n\n\n pub(crate) fn check_hiding_bound(\n\n hiding_poly_degree: usize,\n\n num_powers: usize,\n\n ) -> Result<(), Error> {\n\n if hiding_poly_degree == 0 {\n\n Err(Error::HidingBoundIsZero)\n\n } else if hiding_poly_degree >= num_powers {\n\n // The above check uses `>=` because committing to a hiding poly with\n\n // degree `hiding_poly_degree` requires `hiding_poly_degree + 1`\n\n // powers.\n\n Err(Error::HidingBoundToolarge {\n\n hiding_poly_degree,\n\n num_powers,\n\n })\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 10, "score": 59170.408043229756 }, { "content": "use rayon::prelude::*;\n\n\n\nuse crate::*;\n\n\n\n/// `UniversalParams` are the universal parameters for the KZG10 scheme.\n\n#[derive(Derivative)]\n\n#[derivative(Default(bound = \"\"), Clone(bound = \"\"), Debug(bound = \"\"))]\n\npub struct UniversalParams<E: PairingEngine> {\n\n /// Group elements of the form `{ \\beta^i G }`, where `i` ranges from 0 to `degree`.\n\n pub powers_of_g: Vec<E::G1Affine>,\n\n /// Group elements of the form `{ \\beta^i \\gamma G }`, where `i` ranges from 0 to `degree`.\n\n pub powers_of_gamma_g: Vec<E::G1Affine>,\n\n /// The generator of G2.\n\n pub h: E::G2Affine,\n\n /// \\beta times the above generator of G2.\n\n pub beta_h: E::G2Affine,\n\n /// Group elements of the form `{ \\beta^i G2 }`, where `i` ranges from `0` to `-degree`.\n\n pub prepared_neg_powers_of_h: Option<Vec<E::G2Prepared>>,\n\n /// The generator of G2, prepared for use in pairings.\n\n #[derivative(Debug = \"ignore\")]\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 11, "score": 59170.220070134426 }, { "content": " #[inline]\n\n fn add_assign(&mut self, (f, other): (E::Fr, &'a Randomness<E>)) {\n\n self.blinding_polynomial += (f, &other.blinding_polynomial);\n\n }\n\n}\n\n\n\n/// `Proof` is an evaluation proof that is output by `KZG10::open`.\n\n#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)]\n\n#[derivative(\n\n Default(bound = \"\"),\n\n Hash(bound = \"\"),\n\n Clone(bound = \"\"),\n\n Copy(bound = \"\"),\n\n Debug(bound = \"\"),\n\n PartialEq(bound = \"\"),\n\n Eq(bound = \"\")\n\n)]\n\npub struct Proof<E: PairingEngine> {\n\n /// This is a commitment to the witness polynomial; see [KZG10] for more details.\n\n pub w: E::G1Affine,\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 12, "score": 59169.64212591166 }, { "content": " skip_leading_zeros_and_convert_to_bigints(&witness_polynomial);\n\n\n\n //let witness_comm_time = start_timer!(|| \"Computing commitment to witness polynomial\");\n\n let mut w = VariableBaseMSM::multi_scalar_mul(\n\n &powers.powers_of_g[num_leading_zeros..],\n\n &witness_coeffs,\n\n );\n\n //end_timer!(witness_comm_time);\n\n\n\n let random_v = if let Some(hiding_witness_polynomial) = hiding_witness_polynomial {\n\n let blinding_p = &randomness.blinding_polynomial;\n\n //let blinding_eval_time = start_timer!(|| \"Evaluating random polynomial\");\n\n let blinding_evaluation = blinding_p.evaluate(&point);\n\n //end_timer!(blinding_eval_time);\n\n\n\n let random_witness_coeffs = convert_to_bigints(&hiding_witness_polynomial.coeffs);\n\n // let witness_comm_time =\n\n // start_timer!(|| \"Computing commitment to random witness polynomial\");\n\n w += &VariableBaseMSM::multi_scalar_mul(\n\n &powers.powers_of_gamma_g,\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 13, "score": 59169.60506798795 }, { "content": " /// The label of the missing polynomial.\n\n label: String,\n\n },\n\n\n\n /// The LHS of the equation is empty.\n\n MissingLHS {\n\n /// The label of the equation.\n\n label: String,\n\n },\n\n\n\n /// The provided polynomial was meant to be hiding, but `rng` was `None`.\n\n MissingRng,\n\n\n\n /// The degree provided in setup was too small; degree 0 polynomials\n\n /// are not supported.\n\n DegreeIsZero,\n\n\n\n /// The degree of the polynomial passed to `commit` or `open`\n\n /// was too large.\n\n TooManyCoefficients {\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 14, "score": 59169.32681341397 }, { "content": " // ));\n\n\n\n let (num_leading_zeros, plain_coeffs) =\n\n skip_leading_zeros_and_convert_to_bigints(&polynomial);\n\n\n\n //let msm_time = start_timer!(|| \"MSM to compute commitment to plaintext poly\");\n\n let mut commitment = VariableBaseMSM::multi_scalar_mul(\n\n &powers.powers_of_g[num_leading_zeros..],\n\n &plain_coeffs,\n\n );\n\n //end_timer!(msm_time);\n\n\n\n let mut randomness = Randomness::empty();\n\n if let Some(hiding_degree) = hiding_bound {\n\n let mut rng = rng.ok_or(Error::MissingRng)?;\n\n // let sample_random_poly_time = start_timer!(|| format!(\n\n // \"Sampling a random polynomial of degree {}\",\n\n // hiding_degree\n\n // ));\n\n\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 15, "score": 59169.11353799469 }, { "content": " blinding_polynomial: DensePolynomial::zero(),\n\n }\n\n }\n\n\n\n fn rand<R: Rng>(hiding_bound: usize, _: bool, rng: &mut R) -> Self {\n\n let mut randomness = Randomness::empty();\n\n let hiding_poly_degree = Self::calculate_hiding_polynomial_degree(hiding_bound);\n\n randomness.blinding_polynomial = DensePolynomial::rand(hiding_poly_degree, rng);\n\n randomness\n\n }\n\n}\n\n\n\nimpl<'a, E: PairingEngine> Add<&'a Randomness<E>> for Randomness<E> {\n\n type Output = Self;\n\n\n\n #[inline]\n\n fn add(mut self, other: &'a Self) -> Self {\n\n self.blinding_polynomial += &other.blinding_polynomial;\n\n self\n\n }\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 16, "score": 59169.02421721992 }, { "content": "\n\n /// Compute witness polynomial.\n\n ///\n\n /// The witness polynomial w(x) the quotient of the division (p(x) - p(z)) / (x - z)\n\n /// Observe that this quotient does not change with z because\n\n /// p(z) is the remainder term. We can therefore omit p(z) when computing the quotient.\n\n pub fn compute_witness_polynomial(\n\n p: &DensePolynomial<E::Fr>,\n\n point: E::Fr,\n\n randomness: &Randomness<E>,\n\n ) -> Result<(DensePolynomial<E::Fr>, Option<DensePolynomial<E::Fr>>), Error> {\n\n let divisor = DensePolynomial::from_coefficients_vec(vec![-point, E::Fr::one()]);\n\n\n\n //let witness_time = start_timer!(|| \"Computing witness polynomial\");\n\n let witness_polynomial = p / &divisor;\n\n //end_timer!(witness_time);\n\n\n\n let random_witness_polynomial = if randomness.is_hiding() {\n\n let random_p = &randomness.blinding_polynomial;\n\n\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 17, "score": 59168.9750406682 }, { "content": " &random_witness_coeffs,\n\n );\n\n //end_timer!(witness_comm_time);\n\n Some(blinding_evaluation)\n\n } else {\n\n None\n\n };\n\n\n\n Ok(Proof {\n\n w: w.into_affine(),\n\n random_v,\n\n })\n\n }\n\n\n\n /// On input a polynomial `p` and a point `point`, outputs a proof for the same.\n\n // pub(crate) fn open<'a>(\n\n pub fn open<'a>(\n\n powers: &Powers<'_, E>,\n\n p: &DensePolynomial<E::Fr>,\n\n point: E::Fr,\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 18, "score": 59168.741497663985 }, { "content": " f,\n\n \"`QuerySet` refers to polynomial \\\"{}\\\", but `Evaluations` does not contain an evaluation for it.\",\n\n label\n\n ),\n\n Error::MissingLHS { label } => {\n\n write!(f, \"Equation \\\"{}\\\" does not have a LHS.\", label)\n\n },\n\n Error::MissingRng => write!(f, \"hiding commitments require `Some(rng)`\"),\n\n Error::DegreeIsZero => write!(\n\n f,\n\n \"this scheme does not support committing to degree 0 polynomials\"\n\n ),\n\n Error::TooManyCoefficients {\n\n num_coefficients,\n\n num_powers,\n\n } => write!(\n\n f,\n\n \"the number of coefficients in the polynomial ({:?}) is greater than\\\n\n the maximum number of powers in `Powers` ({:?})\",\n\n num_coefficients, num_powers\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 19, "score": 59168.26542865842 }, { "content": " /// The number of coefficients in the polynomial.\n\n num_coefficients: usize,\n\n /// The maximum number of powers provided in `Powers`.\n\n num_powers: usize,\n\n },\n\n\n\n /// The hiding bound was not `None`, but the hiding bound was zero.\n\n HidingBoundIsZero,\n\n\n\n /// The hiding bound was too large for the given `Powers`.\n\n HidingBoundToolarge {\n\n /// The hiding bound\n\n hiding_poly_degree: usize,\n\n /// The number of powers.\n\n num_powers: usize,\n\n },\n\n\n\n /// The degree provided to `trim` was too large.\n\n TrimmingDegreeTooLarge,\n\n\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 20, "score": 59167.95488460059 }, { "content": " pub prepared_h: E::G2Prepared,\n\n /// \\beta times the above generator of G2, prepared for use in pairings.\n\n #[derivative(Debug = \"ignore\")]\n\n pub prepared_beta_h: E::G2Prepared,\n\n}\n\n\n\nimpl<E: PairingEngine> UniversalParams<E> {\n\n fn _max_degree(&self) -> usize {\n\n self.powers_of_g.len() - 1\n\n }\n\n}\n\n\n\n/// `Powers` is used to commit to and create evaluation proofs for a given\n\n/// polynomial.\n\n#[derive(Derivative)]\n\n#[derivative(\n\n Default(bound = \"\"),\n\n Hash(bound = \"\"),\n\n Clone(bound = \"\"),\n\n Debug(bound = \"\")\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 21, "score": 59167.84757772843 }, { "content": " /// The provided `enforced_degree_bounds` was `Some<&[]>`.\n\n EmptyDegreeBounds,\n\n\n\n /// The provided equation contained multiple polynomials, of which least one\n\n /// had a strict degree bound.\n\n EquationHasDegreeBounds(String),\n\n\n\n /// The required degree bound is not supported by ck/vk\n\n UnsupportedDegreeBound(usize),\n\n\n\n /// The degree bound for the `index`-th polynomial passed to `commit`, `open`\n\n /// or `check` was incorrect, that is, `degree_bound >= poly_degree` or\n\n /// `degree_bound <= max_degree`.\n\n IncorrectDegreeBound {\n\n /// Degree of the polynomial.\n\n poly_degree: usize,\n\n /// Degree bound.\n\n degree_bound: usize,\n\n /// Maximum supported degree.\n\n supported_degree: usize,\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 22, "score": 59167.81394206696 }, { "content": " num_powers: usize,\n\n ) -> Result<(), Error> {\n\n // if num_coefficients < 1 {\n\n // Err(Error::DegreeIsZero)\n\n // } else {\n\n Self::check_degree_is_too_large(num_coefficients, num_powers)\n\n // }\n\n }\n\n\n\n pub(crate) fn check_degree_is_too_large(\n\n num_coefficients: usize,\n\n num_powers: usize,\n\n ) -> Result<(), Error> {\n\n if num_coefficients > num_powers {\n\n Err(Error::TooManyCoefficients {\n\n num_coefficients,\n\n num_powers,\n\n })\n\n } else {\n\n Ok(())\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 23, "score": 59166.99315857248 }, { "content": "\n\n fn accumulate_commitments_and_values<'a>(\n\n _vk: &VerifierKey<E>,\n\n commitments: &[Commitment<E>],\n\n values: &[E::Fr],\n\n opening_challenge: E::Fr,\n\n ) -> Result<(E::G1Projective, E::Fr), Error> {\n\n //let acc_time = start_timer!(|| \"Accumulating commitments and values\");\n\n let mut combined_comm = E::G1Projective::zero();\n\n let mut combined_value = E::Fr::zero();\n\n let mut challenge_i = E::Fr::one();\n\n for (commitment, value) in commitments.into_iter().zip(values) {\n\n combined_comm += &commitment.0.mul(challenge_i);\n\n combined_value += &(*value * &challenge_i);\n\n challenge_i *= &opening_challenge.square();\n\n }\n\n\n\n //end_timer!(acc_time);\n\n Ok((combined_comm, combined_value))\n\n }\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 24, "score": 59166.82953057544 }, { "content": " pub fn batch_check_to_mul_values<R: Rng>(\n\n vk: &VerifierKey<E>,\n\n commitments: &[Commitment<E>],\n\n points: &[E::Fr],\n\n values: &[E::Fr],\n\n proofs: &[Proof<E>],\n\n rng: &mut R,\n\n ) -> Result<bool, Error> {\n\n // let check_time =\n\n // start_timer!(|| format!(\"Checking {} evaluation proofs\", commitments.len()));\n\n let g = vk.g.into_projective();\n\n let gamma_g = vk.gamma_g.into_projective();\n\n\n\n let mut total_c = <E::G1Projective>::zero();\n\n let mut total_w = <E::G1Projective>::zero();\n\n\n\n //let combination_time = start_timer!(|| \"Combining commitments and proofs\");\n\n let mut randomizer = E::Fr::one();\n\n // Instead of multiplying g and gamma_g in each turn, we simply accumulate\n\n // their coefficients and perform a final multiplication at the end.\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 25, "score": 59166.72525347697 }, { "content": "impl<'a, E: PairingEngine> AddAssign<(E::Fr, &'a Commitment<E>)> for Commitment<E> {\n\n #[inline]\n\n fn add_assign(&mut self, (f, other): (E::Fr, &'a Commitment<E>)) {\n\n let mut other = other.0.mul(f.into_repr());\n\n other.add_assign_mixed(&self.0);\n\n self.0 = other.into();\n\n }\n\n}\n\n\n\n/// `Randomness` hides the polynomial inside a commitment. It is output by `KZG10::commit`.\n\n#[derive(Derivative)]\n\n#[derivative(\n\n Default(bound = \"\"),\n\n Hash(bound = \"\"),\n\n Clone(bound = \"\"),\n\n Debug(bound = \"\"),\n\n PartialEq(bound = \"\"),\n\n Eq(bound = \"\")\n\n)]\n\npub struct Randomness<E: PairingEngine> {\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 26, "score": 59166.564713968335 }, { "content": "impl<E: PairingEngine> Eq for VerifierKey<E> {}\n\n\n\n/// `Commitment` commits to a polynomial. It is output by `KZG10::commit`.\n\n#[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)]\n\n#[derivative(\n\n Default(bound = \"\"),\n\n Hash(bound = \"\"),\n\n Clone(bound = \"\"),\n\n Copy(bound = \"\"),\n\n Debug(bound = \"\"),\n\n PartialEq(bound = \"\"),\n\n Eq(bound = \"\")\n\n)]\n\npub struct Commitment<E: PairingEngine>(\n\n /// The commitment is a group element.\n\n pub E::G1Affine,\n\n);\n\n\n\nimpl<E: PairingEngine> Commitment<E> {\n\n #[inline]\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 27, "score": 59166.564713968335 }, { "content": "\n\n //let to_affine_time = start_timer!(|| \"Converting results to affine for pairing\");\n\n let affine_points = E::G1Projective::batch_normalization_into_affine(&[-total_w, total_c]);\n\n let (total_w, total_c) = (affine_points[0], affine_points[1]);\n\n //end_timer!(to_affine_time);\n\n\n\n //let pairing_time = start_timer!(|| \"Performing product of pairings\");\n\n let result = E::product_of_pairings(&[\n\n (total_w.into(), vk.prepared_beta_h.clone()),\n\n (total_c.into(), vk.prepared_h.clone()),\n\n ])\n\n .is_one();\n\n //end_timer!(pairing_time);\n\n //end_timer!(check_time, || format!(\"Result: {}\", result));\n\n Ok(result)\n\n }\n\n\n\n // Functions for checking errors\n\n pub(crate) fn check_degree_is_within_bounds(\n\n num_coefficients: usize,\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 28, "score": 59165.8229333891 }, { "content": " beta_h: pp.beta_h,\n\n prepared_h: pp.prepared_h.clone(),\n\n prepared_beta_h: pp.prepared_beta_h.clone(),\n\n };\n\n Ok((powers, vk))\n\n }\n\n\n\n /// Outputs a commitment to `polynomial`.\n\n pub fn commit<R: Rng>(\n\n powers: &Powers<'_, E>,\n\n polynomial: &DensePolynomial<E::Fr>,\n\n hiding_bound: Option<usize>,\n\n rng: Option<&mut R>,\n\n ) -> Result<(Commitment<E>, Randomness<E>), Error> {\n\n Self::check_degree_is_within_bounds(polynomial.degree(), powers.size())?;\n\n\n\n // let commit_time = start_timer!(|| format!(\n\n // \"Committing to polynomial of degree {} with hiding_bound: {:?}\",\n\n // polynomial.degree(),\n\n // hiding_bound,\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 29, "score": 59165.268620627074 }, { "content": " /// Index of the offending polynomial.\n\n label: String,\n\n },\n\n\n\n /// The inputs to `commit`, `open` or `verify` had incorrect lengths.\n\n IncorrectInputLength(String),\n\n\n\n /// The commitment was generated incorrectly, tampered with, or doesn't support the polynomial.\n\n MalformedCommitment(String),\n\n}\n\n\n\nimpl core::fmt::Display for Error {\n\n fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {\n\n match self {\n\n Error::MissingPolynomial { label } => write!(\n\n f,\n\n \"`QuerySet` refers to polynomial \\\"{}\\\", but it was not provided.\",\n\n label\n\n ),\n\n Error::MissingEvaluation { label } => write!(\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 30, "score": 59164.995295991044 }, { "content": " p += (challenge_j, polynomial);\n\n r += (challenge_j, rand);\n\n\n\n challenge_j *= &opening_challenge.square();\n\n }\n\n\n\n //let proof_time = start_timer!(|| \"Creating proof for unshifted polynomials\");\n\n let proof = Self::open(powers, &p, point, &r)?;\n\n let w = proof.w.into_projective();\n\n let random_v = proof.random_v;\n\n //end_timer!(proof_time);\n\n\n\n Ok(Proof {\n\n w: w.into_affine(),\n\n random_v,\n\n })\n\n }\n\n\n\n /// Verifies that `value` is the evaluation at `point` of the polynomial\n\n /// committed inside `comm`.\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 31, "score": 59164.88380810329 }, { "content": " rand: &Randomness<E>,\n\n ) -> Result<Proof<E>, Error> {\n\n Self::check_degree_is_within_bounds(p.degree(), powers.size())?;\n\n //let open_time = start_timer!(|| format!(\"Opening polynomial of degree {}\", p.degree()));\n\n\n\n //let witness_time = start_timer!(|| \"Computing witness polynomials\");\n\n let (witness_poly, hiding_witness_poly) = Self::compute_witness_polynomial(p, point, rand)?;\n\n //end_timer!(witness_time);\n\n\n\n let proof = Self::open_with_witness_polynomial(\n\n powers,\n\n point,\n\n rand,\n\n &witness_poly,\n\n hiding_witness_poly.as_ref(),\n\n );\n\n\n\n //end_timer!(open_time);\n\n proof\n\n }\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 32, "score": 59164.879521206116 }, { "content": " }\n\n}\n\n\n\n/// `VerifierKey` is used to check evaluation proofs for a given commitment.\n\n#[derive(Derivative)]\n\n#[derivative(Default(bound = \"\"), Clone(bound = \"\"), Debug(bound = \"\"))]\n\npub struct VerifierKey<E: PairingEngine> {\n\n /// The generator of G1.\n\n pub g: E::G1Affine,\n\n /// The generator of G1 that is used for making a commitment hiding.\n\n pub gamma_g: E::G1Affine,\n\n /// The generator of G2.\n\n pub h: E::G2Affine,\n\n /// \\beta times the above generator of G2.\n\n pub beta_h: E::G2Affine,\n\n /// The generator of G2, prepared for use in pairings.\n\n #[derivative(Debug = \"ignore\")]\n\n pub prepared_h: E::G2Prepared,\n\n /// \\beta times the above generator of G2, prepared for use in pairings.\n\n #[derivative(Debug = \"ignore\")]\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 33, "score": 59164.65525873333 }, { "content": " fn _empty() -> Self {\n\n Commitment(E::G1Affine::zero())\n\n }\n\n\n\n fn _has_degree_bound(&self) -> bool {\n\n false\n\n }\n\n\n\n fn _size_in_bytes(&self) -> usize {\n\n to_bytes![E::G1Affine::zero()].unwrap().len() / 2\n\n }\n\n}\n\n\n\nimpl<E: PairingEngine> ToBytes for Commitment<E> {\n\n #[inline]\n\n fn write<W: io::Write>(&self, mut writer: W) -> io::Result<()> {\n\n self.0.write(&mut writer)\n\n }\n\n}\n\n\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 34, "score": 59164.27700150646 }, { "content": "}\n\n\n\nimpl<'a, E: PairingEngine> Add<(E::Fr, &'a Randomness<E>)> for Randomness<E> {\n\n type Output = Self;\n\n\n\n #[inline]\n\n fn add(mut self, other: (E::Fr, &'a Randomness<E>)) -> Self {\n\n self += other;\n\n self\n\n }\n\n}\n\n\n\nimpl<'a, E: PairingEngine> AddAssign<&'a Randomness<E>> for Randomness<E> {\n\n #[inline]\n\n fn add_assign(&mut self, other: &'a Self) {\n\n self.blinding_polynomial += &other.blinding_polynomial;\n\n }\n\n}\n\n\n\nimpl<'a, E: PairingEngine> AddAssign<(E::Fr, &'a Randomness<E>)> for Randomness<E> {\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 35, "score": 59164.08292597099 }, { "content": " f,\n\n \"the eqaution \\\"{}\\\" contained degree-bounded polynomials\",\n\n e\n\n ),\n\n Error::UnsupportedDegreeBound(bound) => write!(\n\n f,\n\n \"the degree bound ({:?}) is not supported by the parameters\",\n\n bound,\n\n ),\n\n Error::IncorrectDegreeBound {\n\n poly_degree,\n\n degree_bound,\n\n supported_degree,\n\n label,\n\n } => write!(\n\n f,\n\n \"the degree bound ({:?}) for the polynomial {} \\\n\n (having degree {:?}) is greater than the maximum \\\n\n supported degree ({:?})\",\n\n degree_bound, label, poly_degree, supported_degree\n\n ),\n\n Error::IncorrectInputLength(err) => write!(f, \"{}\", err),\n\n Error::MalformedCommitment(err) => write!(f, \"{}\", err)\n\n }\n\n }\n\n}\n\n\n\n//impl ark_std::io::Error for Error {}\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 36, "score": 59163.88038180261 }, { "content": " let mut g_multiplier = E::Fr::zero();\n\n let mut gamma_g_multiplier = E::Fr::zero();\n\n for (((c, z), v), proof) in commitments.iter().zip(points).zip(values).zip(proofs) {\n\n let w = proof.w;\n\n let mut temp = w.mul(*z);\n\n temp.add_assign_mixed(&c.0);\n\n let c = temp;\n\n g_multiplier += &(randomizer * v);\n\n if let Some(random_v) = proof.random_v {\n\n gamma_g_multiplier += &(randomizer * &random_v);\n\n }\n\n total_c += &c.mul(randomizer.into());\n\n total_w += &w.mul(randomizer.into());\n\n // We don't need to sample randomizers from the full field,\n\n // only from 128-bit strings.\n\n randomizer = u128::rand(rng).into();\n\n }\n\n total_c -= &g.mul(g_multiplier.into());\n\n total_c -= &gamma_g.mul(gamma_g_multiplier.into());\n\n //end_timer!(combination_time);\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 37, "score": 59163.75936787952 }, { "content": " randomness = Randomness::rand(hiding_degree, false, &mut rng);\n\n Self::check_hiding_bound(\n\n randomness.blinding_polynomial.degree(),\n\n powers.powers_of_gamma_g.len(),\n\n )?;\n\n //end_timer!(sample_random_poly_time);\n\n }\n\n\n\n let random_ints = convert_to_bigints(&randomness.blinding_polynomial.coeffs);\n\n //let msm_time = start_timer!(|| \"MSM to compute commitment to random poly\");\n\n let random_commitment =\n\n VariableBaseMSM::multi_scalar_mul(&powers.powers_of_gamma_g, random_ints.as_slice())\n\n .into_affine();\n\n //end_timer!(msm_time);\n\n\n\n commitment.add_assign_mixed(&random_commitment);\n\n\n\n //end_timer!(commit_time);\n\n Ok((Commitment(commitment.into()), randomness))\n\n }\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 38, "score": 59163.67573751042 }, { "content": " let prepared_neg_powers_of_h = if produce_g2_powers {\n\n let mut neg_powers_of_beta = vec![E::Fr::one()];\n\n let mut cur = E::Fr::one() / &beta;\n\n for _ in 0..max_degree {\n\n neg_powers_of_beta.push(cur);\n\n cur /= &beta;\n\n }\n\n\n\n let neg_h_table = FixedBaseMSM::get_window_table(scalar_bits, window_size, h);\n\n let neg_powers_of_h = FixedBaseMSM::multi_scalar_mul::<E::G2Projective>(\n\n scalar_bits,\n\n window_size,\n\n &neg_h_table,\n\n &neg_powers_of_beta,\n\n );\n\n\n\n let affines = E::G2Projective::batch_normalization_into_affine(&neg_powers_of_h);\n\n Some(affines.into_iter().map(|a| a.into()).collect())\n\n } else {\n\n None\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 39, "score": 59163.59992848745 }, { "content": " ),\n\n Error::HidingBoundIsZero => write!(\n\n f,\n\n \"this scheme does not support non-`None` hiding bounds that are 0\"\n\n ),\n\n Error::HidingBoundToolarge {\n\n hiding_poly_degree,\n\n num_powers,\n\n } => write!(\n\n f,\n\n \"the degree of the hiding poly ({:?}) is not less than the maximum number of powers in `Powers` ({:?})\",\n\n hiding_poly_degree, num_powers\n\n ),\n\n Error::TrimmingDegreeTooLarge => {\n\n write!(f, \"the degree provided to `trim` was too large\")\n\n }\n\n Error::EmptyDegreeBounds => {\n\n write!(f, \"provided `enforced_degree_bounds` was `Some<&[]>`\")\n\n }\n\n Error::EquationHasDegreeBounds(e) => write!(\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 40, "score": 59162.693254489815 }, { "content": "\n\n let mut powers_of_beta = vec![E::Fr::one()];\n\n\n\n let mut cur = beta;\n\n for _ in 0..max_degree {\n\n powers_of_beta.push(cur);\n\n cur *= &beta;\n\n }\n\n\n\n let window_size = FixedBaseMSM::get_mul_window_size(max_degree + 1);\n\n\n\n let scalar_bits = E::Fr::size_in_bits();\n\n //let g_time = start_timer!(|| \"Generating powers of G\");\n\n let g_table = FixedBaseMSM::get_window_table(scalar_bits, window_size, g);\n\n let powers_of_g = FixedBaseMSM::multi_scalar_mul::<E::G1Projective>(\n\n scalar_bits,\n\n window_size,\n\n &g_table,\n\n &powers_of_beta,\n\n );\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 41, "score": 59162.46629567226 }, { "content": " pub prepared_beta_h: E::G2Prepared,\n\n}\n\n\n\nimpl<E: PairingEngine> CanonicalSerialize for VerifierKey<E> {\n\n #[inline]\n\n fn serialize<W: io::Write>(&self, mut writer: W) -> Result<(), SerializationError> {\n\n self.g.serialize(&mut writer)?;\n\n self.gamma_g.serialize(&mut writer)?;\n\n self.h.serialize(&mut writer)?;\n\n self.beta_h.serialize(&mut writer)\n\n }\n\n\n\n #[inline]\n\n fn serialized_size(&self) -> usize {\n\n self.g.serialized_size()\n\n + self.gamma_g.serialized_size()\n\n + self.h.serialized_size()\n\n + self.beta_h.serialized_size()\n\n }\n\n\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 42, "score": 59159.08570504944 }, { "content": " }\n\n\n\n pub fn trim(\n\n pp: &UniversalParams<E>,\n\n mut supported_degree: usize,\n\n ) -> Result<(Powers<'_, E>, VerifierKey<E>), Error> {\n\n if supported_degree == 1 {\n\n supported_degree += 1;\n\n }\n\n let powers_of_g = pp.powers_of_g[..=supported_degree].to_vec();\n\n let powers_of_gamma_g = pp.powers_of_gamma_g[..=supported_degree].to_vec();\n\n\n\n let powers = Powers {\n\n powers_of_g: Cow::Owned(powers_of_g),\n\n powers_of_gamma_g: Cow::Owned(powers_of_gamma_g),\n\n };\n\n let vk = VerifierKey {\n\n g: pp.powers_of_g[0],\n\n gamma_g: pp.powers_of_gamma_g[0],\n\n h: pp.h,\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 43, "score": 59159.08570504944 }, { "content": " };\n\n\n\n //end_timer!(prepared_neg_powers_of_h_time);\n\n\n\n let beta_h = h.mul(beta.into()).into_affine();\n\n let h = h.into_affine();\n\n let prepared_h = h.into();\n\n let prepared_beta_h = beta_h.into();\n\n\n\n let pp = UniversalParams {\n\n powers_of_g,\n\n powers_of_gamma_g,\n\n h,\n\n beta_h,\n\n prepared_neg_powers_of_h,\n\n prepared_h,\n\n prepared_beta_h,\n\n };\n\n //end_timer!(setup_time);\n\n Ok(pp)\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 44, "score": 59159.08570504944 }, { "content": " powers_of_gamma_g: Cow::Owned(powers_of_gamma_g),\n\n })\n\n }\n\n\n\n #[inline]\n\n fn deserialize_unchecked<R: io::Read>(mut reader: R) -> Result<Self, SerializationError> {\n\n let powers_of_g = Vec::<E::G1Affine>::deserialize_unchecked(&mut reader)?;\n\n let powers_of_gamma_g = Vec::<E::G1Affine>::deserialize_unchecked(&mut reader)?;\n\n\n\n Ok(Powers {\n\n powers_of_g: Cow::Owned(powers_of_g),\n\n powers_of_gamma_g: Cow::Owned(powers_of_gamma_g),\n\n })\n\n }\n\n}\n\n\n\nimpl<E: PairingEngine> Powers<'_, E> {\n\n /// The number of powers in `self`.\n\n pub fn size(&self) -> usize {\n\n self.powers_of_g.len()\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 45, "score": 59159.08570504944 }, { "content": "\n\nimpl<'a, E: PairingEngine> CanonicalDeserialize for Powers<'a, E> {\n\n #[inline]\n\n fn deserialize<R: io::Read>(mut reader: R) -> Result<Self, SerializationError> {\n\n let powers_of_g = Vec::<E::G1Affine>::deserialize(&mut reader)?;\n\n let powers_of_gamma_g = Vec::<E::G1Affine>::deserialize(&mut reader)?;\n\n\n\n Ok(Powers {\n\n powers_of_g: Cow::Owned(powers_of_g),\n\n powers_of_gamma_g: Cow::Owned(powers_of_gamma_g),\n\n })\n\n }\n\n\n\n #[inline]\n\n fn deserialize_uncompressed<R: io::Read>(mut reader: R) -> Result<Self, SerializationError> {\n\n let powers_of_g = Vec::<E::G1Affine>::deserialize_uncompressed(&mut reader)?;\n\n let powers_of_gamma_g = Vec::<E::G1Affine>::deserialize_uncompressed(&mut reader)?;\n\n\n\n Ok(Powers {\n\n powers_of_g: Cow::Owned(powers_of_g),\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 46, "score": 59159.08570504944 }, { "content": " fn uncompressed_size(&self) -> usize {\n\n self.g.uncompressed_size()\n\n + self.gamma_g.uncompressed_size()\n\n + self.h.uncompressed_size()\n\n + self.beta_h.uncompressed_size()\n\n }\n\n}\n\n\n\nimpl<E: PairingEngine> CanonicalDeserialize for VerifierKey<E> {\n\n #[inline]\n\n fn deserialize<R: io::Read>(mut reader: R) -> Result<Self, SerializationError> {\n\n let g = E::G1Affine::deserialize(&mut reader)?;\n\n let gamma_g = E::G1Affine::deserialize(&mut reader)?;\n\n let h = E::G2Affine::deserialize(&mut reader)?;\n\n let beta_h = E::G2Affine::deserialize(&mut reader)?;\n\n\n\n Ok(VerifierKey {\n\n g, gamma_g, h, beta_h,\n\n prepared_h: Default::default(),\n\n prepared_beta_h: Default::default(),\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 47, "score": 59159.08570504944 }, { "content": " #[inline]\n\n fn serialize_uncompressed<W: io::Write>(\n\n &self,\n\n mut writer: W,\n\n ) -> Result<(), SerializationError> {\n\n self.g.serialize_uncompressed(&mut writer)?;\n\n self.gamma_g.serialize_uncompressed(&mut writer)?;\n\n self.h.serialize_uncompressed(&mut writer)?;\n\n self.beta_h.serialize_uncompressed(&mut writer)\n\n }\n\n\n\n #[inline]\n\n fn serialize_unchecked<W: io::Write>(&self, mut writer: W) -> Result<(), SerializationError> {\n\n self.g.serialize_unchecked(&mut writer)?;\n\n self.gamma_g.serialize_unchecked(&mut writer)?;\n\n self.h.serialize_unchecked(&mut writer)?;\n\n self.beta_h.serialize_unchecked(&mut writer)\n\n }\n\n\n\n #[inline]\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 48, "score": 59159.08570504944 }, { "content": "\n\n pub fn batch_check<'a>(\n\n vk: &VerifierKey<E>,\n\n commitments: &[Commitment<E>],\n\n point: E::Fr,\n\n values: &[E::Fr],\n\n proof: &Proof<E>,\n\n opening_challenge: E::Fr,\n\n ) -> Result<bool, Error> {\n\n //let check_time = start_timer!(|| \"Checking evaluations\");\n\n let (combined_comm, combined_value) =\n\n Self::accumulate_commitments_and_values(vk, commitments, values, opening_challenge)?;\n\n let combined_comm = Commitment(combined_comm.into());\n\n let result = Self::check(vk, &combined_comm, point, combined_value, proof)?;\n\n //end_timer!(check_time);\n\n Ok(result)\n\n }\n\n\n\n /// Check that each `proof_i` in `proofs` is a valid proof of evaluation for\n\n /// `commitment_i` at `point_i`.\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 49, "score": 59159.08570504944 }, { "content": " let gamma_g = E::G1Affine::deserialize_unchecked(&mut reader)?;\n\n let h = E::G2Affine::deserialize_unchecked(&mut reader)?;\n\n let beta_h = E::G2Affine::deserialize_unchecked(&mut reader)?;\n\n\n\n Ok(VerifierKey {\n\n g, gamma_g, h, beta_h,\n\n prepared_h: Default::default(),\n\n prepared_beta_h: Default::default(),\n\n })\n\n }\n\n}\n\n\n\nimpl<E: PairingEngine> PartialEq for VerifierKey<E> {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.g == other.g\n\n && self.gamma_g == other.gamma_g\n\n && self.h == other.h\n\n && self.beta_h == other.beta_h\n\n }\n\n}\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 50, "score": 59159.08570504944 }, { "content": " pub fn check(\n\n vk: &VerifierKey<E>,\n\n comm: &Commitment<E>,\n\n point: E::Fr,\n\n value: E::Fr,\n\n proof: &Proof<E>,\n\n ) -> Result<bool, Error> {\n\n //let check_time = start_timer!(|| \"Checking evaluation\");\n\n let mut inner = comm.0.into_projective() - &vk.g.into_projective().mul(value.into());\n\n if let Some(random_v) = proof.random_v {\n\n inner -= &vk.gamma_g.mul(random_v);\n\n }\n\n let lhs = E::pairing(inner, vk.h);\n\n\n\n let inner = vk.beta_h.into_projective() - &vk.h.mul(point);\n\n let rhs = E::pairing(proof.w, inner);\n\n\n\n //end_timer!(check_time, || format!(\"Result: {}\", lhs == rhs));\n\n Ok(lhs == rhs)\n\n }\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 51, "score": 59159.08570504944 }, { "content": " //end_timer!(g_time);\n\n //let gamma_g_time = start_timer!(|| \"Generating powers of gamma * G\");\n\n let gamma_g_table = FixedBaseMSM::get_window_table(scalar_bits, window_size, gamma_g);\n\n let mut powers_of_gamma_g = FixedBaseMSM::multi_scalar_mul::<E::G1Projective>(\n\n scalar_bits,\n\n window_size,\n\n &gamma_g_table,\n\n &powers_of_beta,\n\n );\n\n // Add an additional power of gamma_g, because we want to be able to support\n\n // up to D queries.\n\n powers_of_gamma_g.push(powers_of_gamma_g.last().unwrap().mul(&beta.into()));\n\n //end_timer!(gamma_g_time);\n\n\n\n let powers_of_g = E::G1Projective::batch_normalization_into_affine(&powers_of_g);\n\n let powers_of_gamma_g =\n\n E::G1Projective::batch_normalization_into_affine(&powers_of_gamma_g);\n\n\n\n // let prepared_neg_powers_of_h_time =\n\n // start_timer!(|| \"Generating negative powers of h in G2\");\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 52, "score": 59159.08570504944 }, { "content": " })\n\n }\n\n\n\n #[inline]\n\n fn deserialize_uncompressed<R: io::Read>(mut reader: R) -> Result<Self, SerializationError> {\n\n let g = E::G1Affine::deserialize_uncompressed(&mut reader)?;\n\n let gamma_g = E::G1Affine::deserialize_uncompressed(&mut reader)?;\n\n let h = E::G2Affine::deserialize_uncompressed(&mut reader)?;\n\n let beta_h = E::G2Affine::deserialize_uncompressed(&mut reader)?;\n\n\n\n Ok(VerifierKey {\n\n g, gamma_g, h, beta_h,\n\n prepared_h: Default::default(),\n\n prepared_beta_h: Default::default(),\n\n })\n\n }\n\n\n\n #[inline]\n\n fn deserialize_unchecked<R: io::Read>(mut reader: R) -> Result<Self, SerializationError> {\n\n let g = E::G1Affine::deserialize_unchecked(&mut reader)?;\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 53, "score": 59159.08570504944 }, { "content": " #[inline]\n\n fn serialize_uncompressed<W: io::Write>(\n\n &self,\n\n mut writer: W,\n\n ) -> Result<(), SerializationError> {\n\n self.powers_of_g.serialize_uncompressed(&mut writer)?;\n\n self.powers_of_gamma_g.serialize_uncompressed(&mut writer)\n\n }\n\n\n\n #[inline]\n\n fn serialize_unchecked<W: io::Write>(&self, mut writer: W) -> Result<(), SerializationError> {\n\n self.powers_of_g.serialize_unchecked(&mut writer)?;\n\n self.powers_of_gamma_g.serialize_unchecked(&mut writer)\n\n }\n\n\n\n #[inline]\n\n fn uncompressed_size(&self) -> usize {\n\n self.powers_of_g.uncompressed_size() + self.powers_of_gamma_g.uncompressed_size()\n\n }\n\n}\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 54, "score": 59159.08570504944 }, { "content": ")]\n\npub struct Powers<'a, E: PairingEngine> {\n\n /// Group elements of the form `β^i G`, for different values of `i`.\n\n pub powers_of_g: Cow<'a, [E::G1Affine]>,\n\n /// Group elements of the form `β^i γG`, for different values of `i`.\n\n pub powers_of_gamma_g: Cow<'a, [E::G1Affine]>,\n\n}\n\n\n\nimpl<'a, E: PairingEngine> CanonicalSerialize for Powers<'a, E> {\n\n #[inline]\n\n fn serialize<W: io::Write>(&self, mut writer: W) -> Result<(), SerializationError> {\n\n self.powers_of_g.serialize(&mut writer)?;\n\n self.powers_of_gamma_g.serialize(&mut writer)\n\n }\n\n\n\n #[inline]\n\n fn serialized_size(&self) -> usize {\n\n self.powers_of_g.serialized_size() + self.powers_of_gamma_g.serialized_size()\n\n }\n\n\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 55, "score": 59159.08570504944 }, { "content": "fn convert_to_bigints<F: PrimeField>(p: &[F]) -> Vec<F::BigInt> {\n\n //let to_bigint_time = start_timer!(|| \"Converting polynomial coeffs to bigints\");\n\n let coeffs = cfg_iter!(p).map(|s| s.into_repr()).collect::<Vec<_>>();\n\n //end_timer!(to_bigint_time);\n\n coeffs\n\n}\n\n\n\n/// The error type for `DensePolynomialCommitment`.\n\n#[derive(Debug)]\n\npub enum Error {\n\n /// The query set contains a label for a polynomial that was not provided as\n\n /// input to the `PC::open`.\n\n MissingPolynomial {\n\n /// The label of the missing polynomial.\n\n label: String,\n\n },\n\n\n\n /// `Evaluations` does not contain an evaluation for the polynomial labelled\n\n /// `label` at a particular query.\n\n MissingEvaluation {\n", "file_path": "clinkv2/src/kzg10/kzg10.rs", "rank": 56, "score": 50606.856819040855 }, { "content": "use ark_ff::{One, Zero};\n\nuse zkp_curve::Curve;\n\nuse zkp_r1cs::Index;\n\n\n\nuse crate::Vec;\n\n\n\n// ~eq(x, rx)\n", "file_path": "spartan/src/polynomial.rs", "rank": 57, "score": 50309.55867004327 }, { "content": "pub fn to_labeled<F: Field>(label: &str, poly: DensePolynomial<F>) -> LabeledPolynomial<F> {\n\n LabeledPolynomial::new(label.to_string(), poly, None, None)\n\n}\n", "file_path": "plonk/src/utils.rs", "rank": 58, "score": 49536.65334766968 }, { "content": "pub fn scalar_mul<F: Field>(poly: &DensePolynomial<F>, scalar: &F) -> DensePolynomial<F> {\n\n if poly.is_zero() || scalar.is_zero() {\n\n return DensePolynomial::zero();\n\n }\n\n let coeffs: Vec<_> = cfg_iter!(poly.coeffs)\n\n .map(|coeff| *scalar * coeff)\n\n .collect();\n\n DensePolynomial::from_coefficients_vec(coeffs)\n\n}\n\n\n", "file_path": "plonk/src/utils.rs", "rank": 59, "score": 48850.30181497539 }, { "content": "use ark_ec::{\n\n msm::{FixedBaseMSM, VariableBaseMSM},\n\n AffineCurve, PairingEngine, ProjectiveCurve,\n\n};\n\nuse ark_ff::{One, PrimeField, UniformRand};\n\nuse ark_poly::{\n\n polynomial::univariate::DensePolynomial as Polynomial, Polynomial as BasePoly, UVPolynomial,\n\n};\n\nuse ark_std::cfg_iter;\n\nuse core::marker::PhantomData;\n\nuse rand::RngCore;\n\n\n\n#[cfg(feature = \"parallel\")]\n\nuse rayon::prelude::*;\n\n\n\nuse crate::Vec;\n\n\n\nuse crate::pc::data_structures::*;\n\n\n\n/// KZG10 implements KZG10 polynomial commitment scheme,\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 61, "score": 48575.31946203696 }, { "content": "use ark_ec::PairingEngine;\n\nuse ark_ff::{Field, One, ToBytes, Zero};\n\nuse ark_poly::{EvaluationDomain, GeneralEvaluationDomain, Polynomial};\n\nuse merlin::Transcript;\n\n\n\nuse crate::{\n\n kzg10::{Proof, VerifyAssignment, VerifyKey, KZG10},\n\n r1cs::{Index, SynthesisError},\n\n Vec,\n\n};\n\n\n", "file_path": "clinkv2/src/kzg10/verifier.rs", "rank": 62, "score": 48574.86949392706 }, { "content": "use ark_ec::PairingEngine;\n\nuse ark_ff::Field;\n\nuse ark_serialize::*;\n\n\n\npub mod kzg10;\n\npub mod prover;\n\npub mod verifier;\n\n\n\npub use kzg10::KZG10;\n\npub use prover::create_random_proof;\n\npub use verifier::verify_proof;\n\npub type VerifyKey<E> = kzg10::VerifierKey<E>;\n\npub type ProveKey<'a, E> = kzg10::Powers<'a, E>;\n\n\n\nuse crate::{String, Vec};\n\n\n\nuse super::r1cs::{ConstraintSystem, Index, LinearCombination, SynthesisError, Variable};\n\n\n", "file_path": "clinkv2/src/kzg10/mod.rs", "rank": 63, "score": 48562.59331938661 }, { "content": " Some(rand_p)\n\n } else {\n\n None\n\n };\n\n (witness_polynomial, hiding_witness_polynomial)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use ark_bls12_381::Bls12_381;\n\n use ark_std::test_rng;\n\n\n\n fn kzg10_template<E: PairingEngine>() -> Result<(), Error> {\n\n let rng = &mut test_rng();\n\n\n\n let degree = loop {\n\n let degree = usize::rand(rng) % 20;\n\n if degree >= 2 {\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 64, "score": 48561.77376989414 }, { "content": "use ark_bls12_381::{Bls12_381 as E, Fr};\n\nuse ark_ff::{Field, One};\n\nuse rand::prelude::*;\n\nuse std::time::{Duration, Instant};\n\nuse zkp_clinkv2::kzg10::{\n\n create_random_proof, verify_proof, ProveAssignment, VerifyAssignment, KZG10,\n\n};\n\nuse zkp_clinkv2::r1cs::{ConstraintSynthesizer, ConstraintSystem, SynthesisError};\n\n\n\n// We're going to use the BN-256 pairing-friendly elliptic curve.\n\n\n\n// We'll use these interfaces to construct our circuit.\n\n\n\nconst MIMC_ROUNDS: usize = 5;\n\nconst SAMPLES: usize = 8; //1048576//131070;//1048570;//131070;//16380;//16380;//16384\n\n\n", "file_path": "clinkv2/examples/mimc_kzg10.rs", "rank": 65, "score": 48561.51764064916 }, { "content": " fn check_hiding_bound(hiding_bound: usize, hiding_powers: usize) -> Result<(), Error> {\n\n if hiding_bound == 0 {\n\n Err(Error::HidingBoundIsZero)\n\n } else if hiding_bound > hiding_powers {\n\n Err(Error::HidingBoundTooLarge)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n\n\n fn compute_witness_polynomial(\n\n p: &Polynomial<E::Fr>,\n\n point: E::Fr,\n\n rand: &Rand<E::Fr>,\n\n ) -> (Polynomial<E::Fr>, Option<Polynomial<E::Fr>>) {\n\n let divisor = Polynomial::from_coefficients_vec(vec![-point, E::Fr::one()]);\n\n let witness_polynomial = p / &divisor;\n\n\n\n let hiding_witness_polynomial = if rand.is_hiding() {\n\n let rand_p = &rand.blinding_polynomial / &divisor;\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 66, "score": 48559.6966854486 }, { "content": " Ok(())\n\n }\n\n }\n\n\n\n fn skip_leading_zeros_and_convert_to_bigints<F: PrimeField>(\n\n p: &Polynomial<F>,\n\n ) -> (usize, Vec<F::BigInt>) {\n\n let mut num_leading_zeros = 0;\n\n while p.coeffs[num_leading_zeros].is_zero() && num_leading_zeros < p.coeffs.len() {\n\n num_leading_zeros += 1;\n\n }\n\n let coeffs = Self::convert_to_bigints(&p.coeffs[num_leading_zeros..]);\n\n (num_leading_zeros, coeffs)\n\n }\n\n\n\n fn convert_to_bigints<F: PrimeField>(p: &[F]) -> Vec<F::BigInt> {\n\n let coeffs = cfg_iter!(p).map(|s| s.into_repr()).collect();\n\n coeffs\n\n }\n\n\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 70, "score": 48556.53976766889 }, { "content": " break degree;\n\n }\n\n };\n\n\n\n let pp = KZG10::<E>::setup(degree, rng)?;\n\n let (ck, vk) = KZG10::<E>::trim(&pp, degree / 2)?;\n\n let p = loop {\n\n let p = Polynomial::rand(degree / 2, rng);\n\n if p.degree() > 0 {\n\n break p;\n\n }\n\n };\n\n let hiding_bound = Some(1);\n\n let powers = ck.powers();\n\n let (c, r) = KZG10::<E>::commit(&powers, &p, hiding_bound, Some(rng))?;\n\n let point = E::Fr::rand(rng);\n\n let value = p.evaluate(&point);\n\n let proof = KZG10::<E>::open(&powers, &p, point, &r)?;\n\n assert!(KZG10::<E>::check(&vk, &c, point, value, &proof)?);\n\n\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 71, "score": 48556.47573463127 }, { "content": " }\n\n Ok((Comm(comm.into()), rand))\n\n }\n\n\n\n pub fn open(\n\n ck: &Powers<'_, E>,\n\n p: &Polynomial<E::Fr>,\n\n point: E::Fr,\n\n rand: &Rand<E::Fr>,\n\n ) -> Result<Proof<E>, Error> {\n\n let max_degree = ck.powers_of_g.len();\n\n Self::check_degree_is_within_bounds(p.degree(), max_degree)?;\n\n\n\n let (poly, rand_poly) = Self::compute_witness_polynomial(p, point, rand);\n\n let (num_leading_zeros, witness_coeffs) =\n\n Self::skip_leading_zeros_and_convert_to_bigints(&poly);\n\n let mut w = VariableBaseMSM::multi_scalar_mul(\n\n &ck.powers_of_g[num_leading_zeros..],\n\n &witness_coeffs,\n\n );\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 72, "score": 48556.408918666195 }, { "content": "\n\n let domain: GeneralEvaluationDomain<E::Fr> =\n\n EvaluationDomain::<E::Fr>::new(n).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;\n\n\n\n //let domain_size = domain.size();\n\n\n\n let mut r_io_values = vec![];\n\n let lag_values = domain.evaluate_all_lagrange_coefficients(zeta);\n\n for j in 0..m_io {\n\n let mut rj_value = zero;\n\n for i in 0..io[j].len() {\n\n rj_value += &(lag_values[i] * &io[j][i]);\n\n }\n\n r_io_values.push(rj_value);\n\n }\n\n\n\n let vanishing_poly = domain.vanishing_polynomial();\n\n let vanishing_value = vanishing_poly.evaluate(&zeta);\n\n\n\n let mut ab_c = zero;\n", "file_path": "clinkv2/src/kzg10/verifier.rs", "rank": 73, "score": 48556.05792587186 }, { "content": " xl: Some(xl),\n\n xr: Some(xr),\n\n constants: &constants,\n\n };\n\n c.generate_constraints(&mut prover_pa, i).unwrap();\n\n }\n\n }\n\n let one = vec![Fr::one(); n];\n\n io.push(one);\n\n io.push(output);\n\n\n\n println!(\"Create prove...\");\n\n // Create a clinkv2 proof with our parameters.\n\n let proof = create_random_proof(&prover_pa, &kzg10_ck, rng).unwrap();\n\n let prove_time = prove_start.elapsed();\n\n\n\n // Verifier\n\n println!(\"Start verify prepare...\");\n\n let verify_start = Instant::now();\n\n\n", "file_path": "clinkv2/examples/mimc_kzg10.rs", "rank": 74, "score": 48555.89854553792 }, { "content": " let mut eta_i = one;\n\n\n\n for i in 0..m_abc {\n\n let mut ai = zero;\n\n for (coeff, index) in (&circuit.at[i]).into_iter() {\n\n match index {\n\n Index::Input(j) => ai += &(r_io_values[*j] * coeff),\n\n Index::Aux(j) => ai += &(proof.r_mid_q_values[*j] * coeff),\n\n }\n\n }\n\n\n\n let mut bi = zero;\n\n for (coeff, index) in (&circuit.bt[i]).into_iter() {\n\n match index {\n\n Index::Input(j) => bi += &(r_io_values[*j] * coeff),\n\n Index::Aux(j) => bi += &(proof.r_mid_q_values[*j] * coeff),\n\n }\n\n }\n\n\n\n let mut ci = zero;\n", "file_path": "clinkv2/src/kzg10/verifier.rs", "rank": 75, "score": 48555.844486576185 }, { "content": " ck: &Powers<'_, E>,\n\n p: &Polynomial<E::Fr>,\n\n hiding_bound: Option<usize>,\n\n rng: Option<&mut R>,\n\n ) -> Result<(Comm<E>, Rand<E::Fr>), Error> {\n\n Self::check_degree_is_within_bounds(p.degree(), ck.supported_degree())?;\n\n let (num_leading_zeros, coeffs) = Self::skip_leading_zeros_and_convert_to_bigints(p);\n\n let mut comm =\n\n VariableBaseMSM::multi_scalar_mul(&ck.powers_of_g[num_leading_zeros..], &coeffs);\n\n\n\n let mut rand = Rand::<E::Fr>::empty();\n\n if let Some(hiding_degree) = hiding_bound {\n\n let mut rng = rng.ok_or(Error::MissingRng)?;\n\n Self::check_hiding_bound(hiding_degree, ck.size())?;\n\n rand = Rand::rand(hiding_degree, &mut rng);\n\n let rand_coeffs = Self::convert_to_bigints(&rand.blinding_polynomial.coeffs);\n\n let rand_commitment =\n\n VariableBaseMSM::multi_scalar_mul(&ck.powers_of_gamma_g, &rand_coeffs)\n\n .into_affine();\n\n comm.add_assign_mixed(&rand_commitment);\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 77, "score": 48555.12422538612 }, { "content": " let eta = E::Fr::from_random_bytes(&c).unwrap();\n\n\n\n let mut q_comm_bytes = vec![];\n\n proof.q_comm.write(&mut q_comm_bytes)?;\n\n transcript.append_message(b\"quotient polynomial commitments\", &q_comm_bytes);\n\n\n\n c = [0u8; 31];\n\n transcript.challenge_bytes(b\"random point\", &mut c);\n\n let zeta = E::Fr::from_random_bytes(&c).unwrap();\n\n\n\n let r_mid_q_comms = [&proof.r_mid_comms, &[proof.q_comm][..]].concat();\n\n\n\n assert!(KZG10::<E>::batch_check(\n\n &kzg10_vk,\n\n &r_mid_q_comms,\n\n zeta,\n\n &proof.r_mid_q_values,\n\n &proof.r_mid_q_proof,\n\n proof.opening_challenge\n\n )?);\n", "file_path": "clinkv2/src/kzg10/verifier.rs", "rank": 78, "score": 48554.895101504844 }, { "content": "/// which optionally enables hiding following Marlin's specification\n\npub struct KZG10<E: PairingEngine> {\n\n _engine: PhantomData<E>,\n\n}\n\n\n\nimpl<E: PairingEngine> KZG10<E> {\n\n pub fn setup<R: RngCore>(max_degree: usize, rng: &mut R) -> Result<UniversalParams<E>, Error> {\n\n let beta = E::Fr::rand(rng);\n\n let g = E::G1Projective::rand(rng);\n\n let gamma_g = E::G1Projective::rand(rng);\n\n let h = E::G2Projective::rand(rng);\n\n\n\n let mut powers_of_beta = vec![E::Fr::one()];\n\n let mut cur = beta;\n\n for _ in 0..max_degree {\n\n powers_of_beta.push(cur);\n\n cur *= &beta;\n\n }\n\n\n\n let window_size = FixedBaseMSM::get_mul_window_size(max_degree + 1);\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 80, "score": 48554.03673419513 }, { "content": " Ok(())\n\n }\n\n\n\n #[test]\n\n fn kzg10_test() {\n\n for _ in 0..20 {\n\n kzg10_template::<Bls12_381>().expect(\"test failed for Bls12_381\");\n\n }\n\n }\n\n}\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 82, "score": 48552.69102550325 }, { "content": " where\n\n A: FnOnce() -> AR,\n\n AR: Into<String>,\n\n LA: FnOnce(LinearCombination<E::Fr>) -> LinearCombination<E::Fr>,\n\n LB: FnOnce(LinearCombination<E::Fr>) -> LinearCombination<E::Fr>,\n\n LC: FnOnce(LinearCombination<E::Fr>) -> LinearCombination<E::Fr>,\n\n {\n\n let num_constraints = self.num_constraints();\n\n\n\n self.at.push(Vec::new());\n\n self.bt.push(Vec::new());\n\n self.ct.push(Vec::new());\n\n\n\n push_constraints(a(LinearCombination::zero()), &mut self.at, num_constraints);\n\n push_constraints(b(LinearCombination::zero()), &mut self.bt, num_constraints);\n\n push_constraints(c(LinearCombination::zero()), &mut self.ct, num_constraints);\n\n }\n\n\n\n fn push_namespace<NR, N>(&mut self, _: N)\n\n where\n", "file_path": "clinkv2/src/kzg10/mod.rs", "rank": 83, "score": 48551.63182894985 }, { "content": "\n\n #[inline]\n\n fn enforce<A, AR, LA, LB, LC>(&mut self, _: A, a: LA, b: LB, c: LC)\n\n where\n\n A: FnOnce() -> AR,\n\n AR: Into<String>,\n\n LA: FnOnce(LinearCombination<E::Fr>) -> LinearCombination<E::Fr>,\n\n LB: FnOnce(LinearCombination<E::Fr>) -> LinearCombination<E::Fr>,\n\n LC: FnOnce(LinearCombination<E::Fr>) -> LinearCombination<E::Fr>,\n\n {\n\n let num_constraints = self.num_constraints();\n\n\n\n self.at.push(Vec::new());\n\n self.bt.push(Vec::new());\n\n self.ct.push(Vec::new());\n\n\n\n push_constraints(a(LinearCombination::zero()), &mut self.at, num_constraints);\n\n push_constraints(b(LinearCombination::zero()), &mut self.bt, num_constraints);\n\n push_constraints(c(LinearCombination::zero()), &mut self.ct, num_constraints);\n\n }\n", "file_path": "clinkv2/src/kzg10/mod.rs", "rank": 84, "score": 48551.57378374908 }, { "content": " e\n\n });\n\n let tmp = cs.alloc(\n\n || \"tmp\",\n\n || tmp_value.ok_or(SynthesisError::AssignmentMissing),\n\n index,\n\n )?;\n\n\n\n if index == 0 {\n\n cs.enforce(\n\n || \"tmp = (xL + Ci)^2\",\n\n |lc| lc + xl + (self.constants[i], CS::one()),\n\n |lc| lc + xl + (self.constants[i], CS::one()),\n\n |lc| lc + tmp,\n\n );\n\n }\n\n\n\n // new_xL = xR + (xL + Ci)^3\n\n // new_xL = xR + tmp * (xL + Ci)\n\n // new_xL - xR = tmp * (xL + Ci)\n", "file_path": "clinkv2/examples/mimc_kzg10.rs", "rank": 85, "score": 48551.5473694233 }, { "content": " // Two-demension vector\n\n pub input_assignment: Vec<Vec<E::Fr>>,\n\n pub aux_assignment: Vec<Vec<E::Fr>>,\n\n\n\n pub(crate) io_cur: usize,\n\n pub(crate) aux_cur: usize,\n\n}\n\n\n\nimpl<E: PairingEngine> Default for ProveAssignment<E> {\n\n fn default() -> ProveAssignment<E> {\n\n ProveAssignment {\n\n at: vec![],\n\n bt: vec![],\n\n ct: vec![],\n\n input_assignment: vec![],\n\n aux_assignment: vec![],\n\n io_cur: 0usize,\n\n aux_cur: 0usize,\n\n }\n\n }\n", "file_path": "clinkv2/src/kzg10/mod.rs", "rank": 87, "score": 48551.45953836067 }, { "content": "}\n\n\n\npub struct VerifyAssignment<E: PairingEngine> {\n\n // Constraints\n\n pub at: Vec<Vec<(E::Fr, Index)>>,\n\n pub bt: Vec<Vec<(E::Fr, Index)>>,\n\n pub ct: Vec<Vec<(E::Fr, Index)>>,\n\n\n\n // Assignments of variables\n\n // Two-demension vector\n\n pub input_assignment: Vec<Vec<E::Fr>>,\n\n pub aux_assignment: Vec<Vec<E::Fr>>,\n\n\n\n pub(crate) io_cur: usize,\n\n pub(crate) aux_cur: usize,\n\n}\n\n\n\nimpl<E: PairingEngine> Default for VerifyAssignment<E> {\n\n fn default() -> VerifyAssignment<E> {\n\n VerifyAssignment {\n", "file_path": "clinkv2/src/kzg10/mod.rs", "rank": 89, "score": 48551.22835990992 }, { "content": " )?\n\n };\n\n\n\n if index == 0 {\n\n cs.enforce(\n\n || \"new_xL = xR + (xL + Ci)^3\",\n\n |lc| lc + tmp,\n\n |lc| lc + xl + (self.constants[i], CS::one()),\n\n |lc| lc + new_xl - xr,\n\n );\n\n }\n\n\n\n // xR = xL\n\n xr = xl;\n\n xr_value = xl_value;\n\n\n\n // xL = new_xL\n\n xl = new_xl;\n\n xl_value = new_xl_value;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "clinkv2/examples/mimc_kzg10.rs", "rank": 90, "score": 48550.556366244804 }, { "content": " let mut verifier_pa = VerifyAssignment::<E>::default();\n\n\n\n // Create an instance of our circuit (with the witness)\n\n let verify_c = MiMCDemo {\n\n xl: None,\n\n xr: None,\n\n constants: &constants,\n\n };\n\n verify_c\n\n .generate_constraints(&mut verifier_pa, 0usize)\n\n .unwrap();\n\n\n\n println!(\"Start verify...\");\n\n\n\n // Check the proof\n\n assert!(verify_proof(&verifier_pa, &kzg10_vk, &proof, &io).unwrap());\n\n\n\n let verify_time = verify_start.elapsed();\n\n\n\n // Compute time\n", "file_path": "clinkv2/examples/mimc_kzg10.rs", "rank": 91, "score": 48550.41789475471 }, { "content": "\n\n let rand_v = if let Some(rand_poly) = rand_poly {\n\n let blinding_evaluation = rand.blinding_polynomial.evaluate(&point);\n\n let blinding_witness_coeffs = Self::convert_to_bigints(&rand_poly.coeffs);\n\n w +=\n\n &VariableBaseMSM::multi_scalar_mul(&ck.powers_of_gamma_g, &blinding_witness_coeffs);\n\n Some(blinding_evaluation)\n\n } else {\n\n None\n\n };\n\n\n\n Ok(Proof {\n\n w: w.into_affine(),\n\n rand_v,\n\n })\n\n }\n\n\n\n pub fn check(\n\n vk: &VerifierKey<E>,\n\n comm: &Comm<E>,\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 92, "score": 48550.407915765005 }, { "content": " point: E::Fr,\n\n value: E::Fr,\n\n proof: &Proof<E>,\n\n ) -> Result<bool, Error> {\n\n let mut u = comm.0.into_projective() - &(vk.g).mul(value);\n\n if let Some(rand_v) = proof.rand_v {\n\n u -= &vk.gamma_g.mul(rand_v);\n\n }\n\n let v = vk.beta_h.into_projective() - &(vk.h).mul(point);\n\n let lhs = E::pairing(u, vk.h);\n\n let rhs = E::pairing(proof.w, v);\n\n Ok(lhs == rhs)\n\n }\n\n\n\n fn check_degree_is_within_bounds(degree: usize, powers: usize) -> Result<(), Error> {\n\n if degree < 1 {\n\n Err(Error::DegreeIsZero)\n\n } else if degree > powers {\n\n Err(Error::DegreeOutOfBound)\n\n } else {\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 94, "score": 48550.05214421282 }, { "content": " let prepared_beta_h = beta_h.into();\n\n\n\n let pp = UniversalParams {\n\n powers_of_g,\n\n powers_of_gamma_g,\n\n h,\n\n beta_h,\n\n prepared_h,\n\n prepared_beta_h,\n\n };\n\n Ok(pp)\n\n }\n\n\n\n pub fn trim(\n\n pp: &UniversalParams<E>,\n\n supported_degree: usize,\n\n ) -> Result<(CommitterKey<E>, VerifierKey<E>), Error> {\n\n let max_degree = pp.max_degree();\n\n if supported_degree > max_degree {\n\n return Err(Error::TrimmingDegreeTooLarge);\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 95, "score": 48546.55234174803 }, { "content": " || xl_value.ok_or(SynthesisError::AssignmentMissing),\n\n index,\n\n )?;\n\n\n\n // Allocate the second component of the preimage.\n\n let mut xr_value = self.xr;\n\n let mut xr = cs.alloc(\n\n || \"preimage xr\",\n\n || xr_value.ok_or(SynthesisError::AssignmentMissing),\n\n index,\n\n )?;\n\n\n\n for i in 0..MIMC_ROUNDS {\n\n // xL, xR := xR + (xL + Ci)^3, xL\n\n let cs = &mut cs.ns(|| format!(\"round {}\", i));\n\n\n\n // tmp = (xL + Ci)^2\n\n let tmp_value = xl_value.map(|mut e| {\n\n e.add_assign(&self.constants[i]);\n\n e.square_in_place();\n", "file_path": "clinkv2/examples/mimc_kzg10.rs", "rank": 96, "score": 48546.55234174803 }, { "content": " }\n\n let powers_of_g = pp.powers_of_g[..=supported_degree].to_vec();\n\n let powers_of_gamma_g = pp.powers_of_gamma_g[..=supported_degree].to_vec();\n\n let vk = VerifierKey::<E> {\n\n g: powers_of_g[0],\n\n gamma_g: powers_of_gamma_g[0],\n\n h: pp.h,\n\n beta_h: pp.beta_h,\n\n supported_degree,\n\n };\n\n\n\n let ck = CommitterKey::<E> {\n\n powers_of_g,\n\n powers_of_gamma_g,\n\n supported_degree,\n\n };\n\n Ok((ck, vk))\n\n }\n\n\n\n pub fn commit<R: RngCore>(\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 97, "score": 48546.55234174803 }, { "content": " let new_xl_value = xl_value.map(|mut e| {\n\n e.add_assign(&self.constants[i]);\n\n e.mul_assign(&tmp_value.unwrap());\n\n e.add_assign(&xr_value.unwrap());\n\n e\n\n });\n\n\n\n let new_xl = if i == (MIMC_ROUNDS - 1) {\n\n // This is the last round, xL is our image and so\n\n // we allocate a public input.\n\n cs.alloc_input(\n\n || \"image\",\n\n || new_xl_value.ok_or(SynthesisError::AssignmentMissing),\n\n index,\n\n )?\n\n } else {\n\n cs.alloc(\n\n || \"new_xl\",\n\n || new_xl_value.ok_or(SynthesisError::AssignmentMissing),\n\n index,\n", "file_path": "clinkv2/examples/mimc_kzg10.rs", "rank": 98, "score": 48546.55234174803 }, { "content": " let scalar_bits = E::Fr::size_in_bits();\n\n let g_table = FixedBaseMSM::get_window_table(scalar_bits, window_size, g);\n\n let powers_of_g =\n\n FixedBaseMSM::multi_scalar_mul(scalar_bits, window_size, &g_table, &powers_of_beta);\n\n\n\n let gamma_g_table = FixedBaseMSM::get_window_table(scalar_bits, window_size, gamma_g);\n\n let powers_of_gamma_g = FixedBaseMSM::multi_scalar_mul(\n\n scalar_bits,\n\n window_size,\n\n &gamma_g_table,\n\n &powers_of_beta,\n\n );\n\n\n\n let powers_of_g = E::G1Projective::batch_normalization_into_affine(&powers_of_g);\n\n let powers_of_gamma_g =\n\n E::G1Projective::batch_normalization_into_affine(&powers_of_gamma_g);\n\n\n\n let beta_h = h.mul(beta.into()).into_affine();\n\n let h = h.into_affine();\n\n let prepared_h = h.into();\n", "file_path": "marlin/src/pc/kzg10.rs", "rank": 99, "score": 48546.55234174803 } ]
Rust
examples/simple/main.rs
Vollkornaffe/VRV
8e71b9c728dbe91d41d563e32bcbf432952fe828
use std::{ collections::HashSet, sync::{ atomic::{AtomicBool, Ordering}, Arc, }, time::Instant, }; use ash::vk::{DynamicState, Extent2D}; use cgmath::{perspective, Deg, EuclideanSpace, Matrix4, Point3, SquareMatrix, Vector3}; use openxr::{EventDataBuffer, SessionState, ViewConfigurationType}; use per_frame::PerFrameWindow; use simplelog::{Config, SimpleLogger}; use vk_shader_macros::include_glsl; use vrv::{ wrap_vulkan::{create_pipeline, create_pipeline_layout, pipeline::create_shader_module}, State, }; use winit::{ event::{ElementState, Event, KeyboardInput, VirtualKeyCode, WindowEvent}, event_loop::{ControlFlow, EventLoop}, window::WindowBuilder, }; use crate::{ camera::{fov_to_projection, pose_to_matrix_inverse, KeyMap, SphereCoords}, per_frame::{PerFrameHMD, UniformMatricesHMD, UniformMatricesWindow}, }; mod camera; mod per_frame; fn main() { let _ = SimpleLogger::init(log::LevelFilter::Warn, Config::default()); let event_loop = EventLoop::new(); let window = WindowBuilder::new().build(&event_loop).unwrap(); let mut state = State::new(&window).unwrap(); let (hmd_per_frame_buffers, hmd_descriptor_related) = PerFrameHMD::new_vec(&state.vulkan, state.get_image_count_hmd()).unwrap(); let (window_per_frame_buffers, window_descriptor_related) = PerFrameWindow::new_vec(&state.vulkan, state.get_image_count_window()).unwrap(); const HMD_VERT: &[u32] = include_glsl!("shaders/example_hmd.vert"); const HMD_FRAG: &[u32] = include_glsl!("shaders/example_hmd.frag"); const WINDOW_VERT: &[u32] = include_glsl!("shaders/example_window.vert"); const WINDOW_FRAG: &[u32] = include_glsl!("shaders/example_window.frag"); let hmd_module_vert = create_shader_module(&state.vulkan, HMD_VERT, "HMDShaderVert".to_string()).unwrap(); let hmd_module_frag = create_shader_module(&state.vulkan, HMD_FRAG, "HMDShaderFrag".to_string()).unwrap(); let window_module_vert = create_shader_module(&state.vulkan, WINDOW_VERT, "WindowShaderVert".to_string()).unwrap(); let window_module_frag = create_shader_module(&state.vulkan, WINDOW_FRAG, "WindowShaderFrag".to_string()).unwrap(); let hmd_pipeline_layout = create_pipeline_layout( &state.vulkan, hmd_descriptor_related.layout, "HMDPipelineLayout".to_string(), ) .unwrap(); let hmd_pipeline = create_pipeline( &state.vulkan, state.hmd_render_pass, hmd_pipeline_layout, hmd_module_vert, hmd_module_frag, state.openxr.get_resolution().unwrap(), &[], "HMDPipeline".to_string(), ) .unwrap(); let window_pipeline_layout = create_pipeline_layout( &state.vulkan, window_descriptor_related.layout, "WindowPipelineLayout".to_string(), ) .unwrap(); let window_pipeline = create_pipeline( &state.vulkan, state.window_render_pass, window_pipeline_layout, window_module_vert, window_module_frag, Extent2D { width: window.inner_size().width, height: window.inner_size().height, }, &[DynamicState::VIEWPORT, DynamicState::SCISSOR], "WindowPipeline".to_string(), ) .unwrap(); unsafe { state .vulkan .device .destroy_shader_module(hmd_module_vert, None); state .vulkan .device .destroy_shader_module(hmd_module_frag, None); state .vulkan .device .destroy_shader_module(window_module_vert, None); state .vulkan .device .destroy_shader_module(window_module_frag, None); } let mut spherical_coords = SphereCoords::new(); let mut pressed_keys: HashSet<VirtualKeyCode> = HashSet::new(); let ctrlc = Arc::new(AtomicBool::new(false)); { let r = ctrlc.clone(); ctrlc::set_handler(move || { r.store(true, Ordering::Relaxed); }) .expect("setting Ctrl-C handler"); } let mut xr_event_storage = EventDataBuffer::new(); let mut xr_session_running = false; let mut xr_focused = false; event_loop.run(move |event, _, control_flow| match event { Event::MainEventsCleared => { if ctrlc.load(Ordering::Relaxed) { log::warn!("Exiting through Ctrl-C"); *control_flow = ControlFlow::Exit; match state.session.request_exit() { Ok(()) => {} Err(openxr::sys::Result::ERROR_SESSION_NOT_RUNNING) => {} Err(e) => panic!("{}", e), } return; } while let Some(event) = state .openxr .instance .poll_event(&mut xr_event_storage) .unwrap() { use openxr::Event::*; match event { SessionStateChanged(e) => { log::warn!("entered state {:?}", e.state()); xr_focused = false; match e.state() { SessionState::READY => { state .session .begin(ViewConfigurationType::PRIMARY_STEREO) .unwrap(); xr_session_running = true; } SessionState::STOPPING => { state.session.end().unwrap(); xr_session_running = false; } SessionState::FOCUSED => { xr_focused = true; } SessionState::EXITING | SessionState::LOSS_PENDING => { *control_flow = ControlFlow::Exit; return; } _ => {} } } InstanceLossPending(_) => { *control_flow = ControlFlow::Exit; return; } EventsLost(e) => { log::error!("lost {} events", e.lost_event_count()); } _ => {} } } let hmd_pre_render_info = state.pre_render_hmd().unwrap(); if hmd_pre_render_info.image_index.is_some() { let image_index = hmd_pre_render_info.image_index.unwrap(); let hmd_current_frame = &hmd_per_frame_buffers[image_index as usize]; state .record_hmd( hmd_pre_render_info, hmd_pipeline_layout, hmd_pipeline, &hmd_current_frame.mesh_buffers, hmd_current_frame.descriptor_set, ) .unwrap(); let views = state .get_views(hmd_pre_render_info.frame_state.predicted_display_time) .unwrap(); hmd_current_frame.matrix_buffer.write(&[UniformMatricesHMD { model: Matrix4::identity(), view_left: pose_to_matrix_inverse(views[0].pose), view_right: pose_to_matrix_inverse(views[1].pose), proj_left: fov_to_projection(views[0].fov), proj_right: fov_to_projection(views[1].fov), }]); state.submit_hmd(hmd_pre_render_info, &views).unwrap(); } let window_pre_render_info = state.pre_render_window().unwrap(); let window_current_frame = &window_per_frame_buffers[window_pre_render_info.image_index as usize]; spherical_coords.update( &pressed_keys .iter() .map(|&k| k.into()) .collect::<Vec<KeyMap>>(), ); window_current_frame .matrix_buffer .write(&[UniformMatricesWindow { model: Matrix4::identity(), view: Matrix4::look_at_rh( spherical_coords.to_coords(), Point3::origin(), Vector3::unit_y(), ), proj: { let mut tmp = perspective( Deg(45.0), window.inner_size().width as f32 / window.inner_size().height as f32, 0.1, 100.0, ); tmp[1][1] *= -1.0; tmp }, }]); state .render_window( window_pre_render_info, window_pipeline_layout, window_pipeline, &window_current_frame.mesh_buffers, window_current_frame.descriptor_set, ) .unwrap(); window.request_redraw(); } Event::WindowEvent { ref event, window_id, } if window_id == window.id() => { match event { WindowEvent::CloseRequested | WindowEvent::KeyboardInput { input: KeyboardInput { state: ElementState::Pressed, virtual_keycode: Some(VirtualKeyCode::Escape), .. }, .. } => *control_flow = ControlFlow::Exit, WindowEvent::Resized(new_inner_size) => { log::info!("Resizing to {:?}", new_inner_size); state.resize(&window).unwrap(); } WindowEvent::ScaleFactorChanged { scale_factor, new_inner_size, } => { log::info!("Changing scale to {}", scale_factor); log::info!("Resizing to {:?}", new_inner_size); state.resize(&window).unwrap(); } WindowEvent::KeyboardInput { input: KeyboardInput { state, virtual_keycode: Some(code), .. }, .. } => { _ = match state { ElementState::Pressed => pressed_keys.insert(*code), ElementState::Released => pressed_keys.remove(code), } } _ => {} } } _ => {} }) }
use std::{ collections::HashSet, sync::{ atomic::{AtomicBool, Ordering}, Arc, }, time::Instant, }; use ash::vk::{DynamicState, Extent2D}; use cgmath::{perspective, Deg, EuclideanSpace, Matrix4, Point3, SquareMatrix, Vector3}; use openxr::{EventDataBuffer, SessionState, ViewConfigurationType}; use per_frame::PerFrameWindow; use simplelog::{Config, SimpleLogger}; use vk_shader_macros::include_glsl; use vrv::{ wrap_vulkan::{create_pipeline, create_pipeline_layout, pipeline::create_shader_module}, State, }; use winit::{ event::{ElementState, Event, KeyboardInput, VirtualKeyCode, WindowEvent}, event_loop::{ControlFlow, EventLoop}, window::WindowBuilder, }; use crate::{ camera::{fov_to_projection, pose_to_matrix_inverse, KeyMap, SphereCoords}, per_frame::{PerFrameHMD, UniformMatricesHMD, UniformMatricesWindow}, }; mod camera; mod per_frame; fn main() { let _ = SimpleLogger::init(log::LevelFilter::Warn, Config::default()); let event_loop = EventLoop::new(); let window = WindowBuilder::new().build(&event_loop).unwrap(); let mut state = State::new(&window).unwrap(); let (hmd_per_frame_buffers, hmd_descriptor_related) = PerFrameHMD::new_vec(&state.vulkan, state.get_image_count_hmd()).unwrap(); let (window_per_frame_buffers, window_descriptor_related) = PerFrameWindow::new_vec(&state.vulkan, state.get_image_count_window()).unwrap(); const HMD_VERT: &[u32] = include_glsl!("shaders/example_hmd.vert"); const HMD_FRAG: &[u32] = include_glsl!("shaders/example_hmd.frag"); const WINDOW_VERT: &[u32] = include_glsl!("shaders/example_window.vert"); const WINDOW_FRAG: &[u32] = include_glsl!("shaders/example_window.frag"); let hmd_module_vert = create_shader_module(&state.vulkan, HMD_VERT, "HMDShaderVert".to_string()).unwrap(); let hmd_module_frag = create_shader_module(&state.vulkan, HMD_FRAG, "HMDShaderFrag".to_string()).unwrap(); let window_module_vert = create_shader_module(&state.vulkan, WINDOW_VERT, "WindowShaderVert".to_string()).unwrap(); let window_module_frag = create_shader_module(&state.vulkan, WINDOW_FRAG, "WindowShaderFrag".to_string()).unwrap(); let hmd_pipeline_layout = create_pipeline_layout( &state.vulkan, hmd_descriptor_related.layout, "HMDPipelineLayout".to_string(), ) .unwrap(); let hmd_pipeline = create_pipeline( &state.vulkan, state.hmd_render_pass, hmd_pipeline_layout, hmd_module_vert, hmd_module_frag, state.openxr.get_resolution().unwrap(), &[], "HMDPipeline".to_string(), ) .unwrap(); let window_pipeline_layout = create_pipeline_layout( &state.vulkan, window_descriptor_related.layout, "WindowPipelineLayout".to_string(), ) .unwrap(); let window_pipeline = create_pipeline( &state.vulkan, state.window_render_pass, window_pipeline_layout, window_module_vert, window_module_frag, Extent2D { width: window.inner_size().width, height: window.inner_size().height, }, &[DynamicState::VIEWPORT, DynamicState::SCISSOR], "WindowPipeline".to_string(), ) .unwrap(); unsafe { state .vulkan .device .destroy_shader_module(hmd_module_vert, None); state .vulkan .device .destroy_shader_module(hmd_module_frag, None); state .vulkan .device .destroy_shader_module(window_module_vert, None); state .vulkan .device .destroy_shader_module(window_module_frag, None); } let mut spherical_coords = SphereCoords::new(); let mut pressed_keys: HashSet<VirtualKeyCode> = HashSet::new(); let ctrlc = Arc::new(AtomicBool::new(false)); { let r = ctrlc.clone(); ctrlc::set_handler(move || { r.store(true, Ordering::Relaxed); }) .expect("setting Ctrl-C handler"); } let mut xr_event_storage = EventDataBuffer::new(); let mut xr_session_running = false; let mut xr_focused = false; event_loop.run(move |event, _, control_flow| match event { Event::MainEventsCleared => { if ctrlc.load(Ordering::Relaxed) { log::warn!("Exiting through Ctrl-C"); *control_flow = ControlFlow::Exit; match state.session.request_exit() { Ok(()) => {} Err(openxr::sys::Result::ERROR_SESSION_NOT_RUNNING) => {} Err(e) => panic!("{}", e), } return; } while let Some(event) = state .openxr .instance .poll_event(&mut xr_event_storage) .unwrap() {
use openxr::Event::*; match event { SessionStateChanged(e) => { log::warn!("entered state {:?}", e.state()); xr_focused = false; match e.state() { SessionState::READY => { state .session .begin(ViewConfigurationType::PRIMARY_STEREO) .unwrap(); xr_session_running = true; } SessionState::STOPPING => { state.session.end().unwrap(); xr_session_running = false; } SessionState::FOCUSED => { xr_focused = true; } SessionState::EXITING | SessionState::LOSS_PENDING => { *control_flow = ControlFlow::Exit; return; } _ => {} } } InstanceLossPending(_) => { *control_flow = ControlFlow::Exit; return; } EventsLost(e) => { log::error!("lost {} events", e.lost_event_count()); } _ => {} } } let hmd_pre_render_info = state.pre_render_hmd().unwrap(); if hmd_pre_render_info.image_index.is_some() { let image_index = hmd_pre_render_info.image_index.unwrap(); let hmd_current_frame = &hmd_per_frame_buffers[image_index as usize]; state .record_hmd( hmd_pre_render_info, hmd_pipeline_layout, hmd_pipeline, &hmd_current_frame.mesh_buffers, hmd_current_frame.descriptor_set, ) .unwrap(); let views = state .get_views(hmd_pre_render_info.frame_state.predicted_display_time) .unwrap(); hmd_current_frame.matrix_buffer.write(&[UniformMatricesHMD { model: Matrix4::identity(), view_left: pose_to_matrix_inverse(views[0].pose), view_right: pose_to_matrix_inverse(views[1].pose), proj_left: fov_to_projection(views[0].fov), proj_right: fov_to_projection(views[1].fov), }]); state.submit_hmd(hmd_pre_render_info, &views).unwrap(); } let window_pre_render_info = state.pre_render_window().unwrap(); let window_current_frame = &window_per_frame_buffers[window_pre_render_info.image_index as usize]; spherical_coords.update( &pressed_keys .iter() .map(|&k| k.into()) .collect::<Vec<KeyMap>>(), ); window_current_frame .matrix_buffer .write(&[UniformMatricesWindow { model: Matrix4::identity(), view: Matrix4::look_at_rh( spherical_coords.to_coords(), Point3::origin(), Vector3::unit_y(), ), proj: { let mut tmp = perspective( Deg(45.0), window.inner_size().width as f32 / window.inner_size().height as f32, 0.1, 100.0, ); tmp[1][1] *= -1.0; tmp }, }]); state .render_window( window_pre_render_info, window_pipeline_layout, window_pipeline, &window_current_frame.mesh_buffers, window_current_frame.descriptor_set, ) .unwrap(); window.request_redraw(); } Event::WindowEvent { ref event, window_id, } if window_id == window.id() => { match event { WindowEvent::CloseRequested | WindowEvent::KeyboardInput { input: KeyboardInput { state: ElementState::Pressed, virtual_keycode: Some(VirtualKeyCode::Escape), .. }, .. } => *control_flow = ControlFlow::Exit, WindowEvent::Resized(new_inner_size) => { log::info!("Resizing to {:?}", new_inner_size); state.resize(&window).unwrap(); } WindowEvent::ScaleFactorChanged { scale_factor, new_inner_size, } => { log::info!("Changing scale to {}", scale_factor); log::info!("Resizing to {:?}", new_inner_size); state.resize(&window).unwrap(); } WindowEvent::KeyboardInput { input: KeyboardInput { state, virtual_keycode: Some(code), .. }, .. } => { _ = match state { ElementState::Pressed => pressed_keys.insert(*code), ElementState::Released => pressed_keys.remove(code), } } _ => {} } } _ => {} }) }
function_block-function_prefixed
[ { "content": "// there are 4 angles to consider instead of one\n\npub fn fov_to_projection(fov: Fovf) -> Matrix4<f32> {\n\n let tan_left = fov.angle_left.tan();\n\n let tan_right = fov.angle_right.tan();\n\n let tan_down = fov.angle_down.tan();\n\n let tan_up = fov.angle_up.tan();\n\n let near = 0.1;\n\n let far = 100.0;\n\n\n\n let tan_width = tan_right - tan_left;\n\n let tan_height = tan_down - tan_up;\n\n\n\n Matrix4::new(\n\n 2.0 / tan_width,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 2.0 / tan_height,\n\n 0.0,\n\n 0.0,\n", "file_path": "examples/simple/camera.rs", "rank": 1, "score": 77139.37810671914 }, { "content": "pub fn pose_to_matrix_inverse(pose: Posef) -> Matrix4<f32> {\n\n Matrix4::from(Quaternion::new(\n\n pose.orientation.w,\n\n -pose.orientation.x,\n\n -pose.orientation.y,\n\n -pose.orientation.z,\n\n )) * Matrix4::from_translation(vec3(-pose.position.x, -pose.position.y, -pose.position.z))\n\n}\n\n\n", "file_path": "examples/simple/camera.rs", "rank": 2, "score": 74774.00252842158 }, { "content": "fn check(instance: &Instance, xr_result: sys::Result) -> Result<()> {\n\n if xr_result != sys::Result::SUCCESS {\n\n bail!(\"{}\", instance.result_to_string(xr_result).unwrap());\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(feature = \"validation_openxr\")]\n\nmod debug {\n\n use anyhow::Result;\n\n use openxr::{\n\n raw::DebugUtilsEXT,\n\n sys::{\n\n Bool32, DebugUtilsMessengerCallbackDataEXT, DebugUtilsMessengerCreateInfoEXT,\n\n DebugUtilsMessengerEXT,\n\n },\n\n DebugUtilsMessageSeverityFlagsEXT, DebugUtilsMessageTypeFlagsEXT, Entry, Instance,\n\n StructureType,\n\n };\n\n\n", "file_path": "src/wrap_openxr.rs", "rank": 3, "score": 70856.40510291097 }, { "content": "// later we can add push constants\n\npub fn create_pipeline_layout(\n\n base: &Base,\n\n set_layout: DescriptorSetLayout,\n\n name: String,\n\n) -> Result<PipelineLayout> {\n\n let layout = unsafe {\n\n base.device.create_pipeline_layout(\n\n &PipelineLayoutCreateInfo::builder().set_layouts(&[set_layout]),\n\n None,\n\n )\n\n }?;\n\n base.name_object(layout, name)?;\n\n Ok(layout)\n\n}\n\n\n", "file_path": "src/wrap_vulkan/pipeline.rs", "rank": 4, "score": 62223.97528811506 }, { "content": "pub fn create_shader_module(base: &Base, spirv: &[u32], name: String) -> Result<ShaderModule> {\n\n let module = unsafe {\n\n base.device\n\n .create_shader_module(&ShaderModuleCreateInfo::builder().code(spirv), None)\n\n }?;\n\n base.name_object(module, name)?;\n\n Ok(module)\n\n}\n\n\n", "file_path": "src/wrap_vulkan/pipeline.rs", "rank": 5, "score": 59544.10157983452 }, { "content": "pub fn wait_and_reset(base: &Base, fence: Fence) -> Result<()> {\n\n unsafe {\n\n base.device.wait_for_fences(\n\n &[fence],\n\n true, // wait all\n\n std::u64::MAX, // don't timeout\n\n )\n\n }?;\n\n unsafe { base.device.reset_fences(&[fence]) }?;\n\n Ok(())\n\n}\n", "file_path": "src/wrap_vulkan/sync.rs", "rank": 6, "score": 57831.58265330776 }, { "content": "pub fn create_semaphore(base: &Base, name: String) -> Result<Semaphore> {\n\n let semaphore = unsafe {\n\n base.device\n\n .create_semaphore(&SemaphoreCreateInfo::builder(), None)\n\n }?;\n\n base.name_object(semaphore, name)?;\n\n Ok(semaphore)\n\n}\n\n\n", "file_path": "src/wrap_vulkan/sync.rs", "rank": 7, "score": 55723.454030251756 }, { "content": "pub mod render_hmd;\n\npub mod render_window;\n\npub mod swapchain;\n\nuse std::mem::ManuallyDrop;\n\n\n\nuse anyhow::{Error, Result};\n\nuse ash::vk::{CommandBuffer, Extent2D, Fence, RenderPass, Semaphore};\n\n\n\nuse openxr::{\n\n FrameState, FrameStream, FrameWaiter, Posef, ReferenceSpaceType, Session, Space, Time, View,\n\n ViewConfigurationType, Vulkan,\n\n};\n\nuse winit::window::Window;\n\n\n\nuse crate::{\n\n wrap_openxr,\n\n wrap_vulkan::{\n\n self, create_render_pass_window,\n\n render_pass::create_render_pass_hmd,\n\n sync::{create_fence, create_semaphore},\n", "file_path": "src/state/mod.rs", "rank": 8, "score": 55497.24628471649 }, { "content": " &self.vulkan,\n\n self.window_render_pass,\n\n Extent2D {\n\n width: window.inner_size().width,\n\n height: window.inner_size().height,\n\n },\n\n )?;\n\n Ok(())\n\n }\n\n\n\n pub fn new(window: &Window) -> Result<Self> {\n\n log::info!(\"Creating new VRV state\");\n\n\n\n let openxr = wrap_openxr::Base::new()?;\n\n let vulkan = wrap_vulkan::Base::new(window, &openxr)?;\n\n\n\n // Setup HMD, from this point SteamVR needs to be available\n\n\n\n let (session, frame_wait, frame_stream) = openxr.init_with_vulkan(&vulkan)?;\n\n let stage = session.create_reference_space(ReferenceSpaceType::STAGE, Posef::IDENTITY)?;\n", "file_path": "src/state/mod.rs", "rank": 9, "score": 55495.05143015828 }, { "content": " self.window_swapchain.destroy(&self.vulkan);\n\n\n\n for &s in &self.window_semaphores_image_acquired {\n\n self.vulkan.device.destroy_semaphore(s, None);\n\n }\n\n for &s in &self.window_semaphores_rendering_finished {\n\n self.vulkan.device.destroy_semaphore(s, None);\n\n }\n\n for &f in &self.window_fences_rendering_finished {\n\n self.vulkan.device.destroy_fence(f, None);\n\n }\n\n\n\n self.vulkan\n\n .device\n\n .destroy_render_pass(self.window_render_pass, None);\n\n\n\n ManuallyDrop::drop(&mut self.vulkan);\n\n ManuallyDrop::drop(&mut self.openxr);\n\n }\n\n }\n", "file_path": "src/state/mod.rs", "rank": 10, "score": 55493.535076561886 }, { "content": "}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct PreRenderInfoWindow {\n\n pub image_index: u32,\n\n image_acquired_semaphore: Semaphore,\n\n}\n\n#[derive(Copy, Clone)]\n\npub struct PreRenderInfoHMD {\n\n pub image_index: Option<u32>,\n\n pub frame_state: FrameState,\n\n}\n\n\n\nimpl State {\n\n pub fn resize(&mut self, window: &Window) -> Result<()> {\n\n self.vulkan.wait_idle()?;\n\n\n\n unsafe { self.window_swapchain.destroy(&self.vulkan) };\n\n\n\n self.window_swapchain = SwapchainWindow::new(\n", "file_path": "src/state/mod.rs", "rank": 11, "score": 55490.39179360721 }, { "content": " // TODO: actions\n\n\n\n // the acquiring semaphores are used round-robin\n\n // because we need to supply a semaphore prior to knowing which frame to use\n\n last_used_acquire_semaphore: usize,\n\n window_semaphores_image_acquired: Vec<Semaphore>,\n\n // these are indexed by the result of acquiring\n\n window_semaphores_rendering_finished: Vec<Semaphore>,\n\n window_fences_rendering_finished: Vec<Fence>,\n\n window_command_buffers: Vec<CommandBuffer>,\n\n\n\n pub window_render_pass: RenderPass,\n\n window_swapchain: SwapchainWindow,\n\n}\n\n\n\nimpl Drop for State {\n\n fn drop(&mut self) {\n\n self.vulkan.wait_idle().unwrap();\n\n\n\n unsafe {\n", "file_path": "src/state/mod.rs", "rank": 12, "score": 55489.719829817 }, { "content": " height: window.inner_size().height,\n\n },\n\n )?;\n\n\n\n let window_command_buffers =\n\n vulkan.alloc_command_buffers(window_image_count, \"WindowCommandBuffers\".to_string())?;\n\n\n\n Ok(Self {\n\n openxr: ManuallyDrop::new(openxr),\n\n vulkan: ManuallyDrop::new(vulkan),\n\n\n\n session,\n\n frame_wait,\n\n frame_stream,\n\n stage,\n\n\n\n hmd_render_pass,\n\n hmd_swapchain,\n\n hmd_command_buffers,\n\n hmd_fences_rendering_finished,\n", "file_path": "src/state/mod.rs", "rank": 13, "score": 55488.62809228067 }, { "content": " )?)\n\n })\n\n .collect::<Result<_, Error>>()?;\n\n\n\n let window_fences_rendering_finished = (0..window_image_count)\n\n .into_iter()\n\n .map(|index| {\n\n Ok(create_fence(\n\n &vulkan,\n\n true, // start in signaled state\n\n format!(\"WindowFenceRenderingFinished_{}\", index),\n\n )?)\n\n })\n\n .collect::<Result<_, Error>>()?;\n\n\n\n let window_swapchain = SwapchainWindow::new(\n\n &vulkan,\n\n window_render_pass,\n\n Extent2D {\n\n width: window.inner_size().width,\n", "file_path": "src/state/mod.rs", "rank": 14, "score": 55488.02933876752 }, { "content": "\n\n let hmd_render_pass = create_render_pass_hmd(&vulkan)?;\n\n\n\n let hmd_swapchain = SwapchainHMD::new(&openxr, &vulkan, hmd_render_pass, &session)?;\n\n let hmd_image_count = hmd_swapchain.elements.len() as u32;\n\n let hmd_command_buffers =\n\n vulkan.alloc_command_buffers(hmd_image_count, \"HMDCommandBuffers\".to_string())?;\n\n let hmd_fences_rendering_finished = (0..hmd_image_count)\n\n .into_iter()\n\n .map(|index| {\n\n Ok(create_fence(\n\n &vulkan,\n\n true, // start in signaled state\n\n format!(\"HMDFenceRenderingFinished_{}\", index),\n\n )?)\n\n })\n\n .collect::<Result<_, Error>>()?;\n\n\n\n // Setup Window\n\n\n", "file_path": "src/state/mod.rs", "rank": 15, "score": 55485.762767178945 }, { "content": " },\n\n};\n\nuse swapchain::{SwapchainHMD, SwapchainWindow};\n\n\n\npub struct State {\n\n pub openxr: ManuallyDrop<wrap_openxr::Base>,\n\n pub vulkan: ManuallyDrop<wrap_vulkan::Base>,\n\n\n\n pub session: Session<Vulkan>,\n\n\n\n frame_wait: FrameWaiter,\n\n frame_stream: FrameStream<Vulkan>,\n\n\n\n stage: Space,\n\n\n\n pub hmd_render_pass: RenderPass,\n\n hmd_swapchain: SwapchainHMD,\n\n hmd_command_buffers: Vec<CommandBuffer>,\n\n hmd_fences_rendering_finished: Vec<Fence>,\n\n\n", "file_path": "src/state/mod.rs", "rank": 16, "score": 55485.69709556395 }, { "content": "\n\n last_used_acquire_semaphore: 0,\n\n window_semaphores_image_acquired,\n\n window_semaphores_rendering_finished,\n\n window_fences_rendering_finished,\n\n window_render_pass,\n\n window_command_buffers,\n\n window_swapchain,\n\n })\n\n }\n\n\n\n pub fn get_image_count_hmd(&self) -> u32 {\n\n self.hmd_swapchain.elements.len() as u32\n\n }\n\n\n\n pub fn get_image_count_window(&self) -> u32 {\n\n self.window_swapchain.elements.len() as u32\n\n }\n\n\n\n pub fn get_views(&self, display_time: Time) -> Result<[View; 2]> {\n\n let (_, view_vec) = self.session.locate_views(\n\n ViewConfigurationType::PRIMARY_STEREO,\n\n display_time,\n\n &self.stage,\n\n )?;\n\n Ok([view_vec[0], view_vec[1]])\n\n }\n\n}\n", "file_path": "src/state/mod.rs", "rank": 17, "score": 55484.55265725181 }, { "content": " let window_render_pass = create_render_pass_window(&vulkan)?;\n\n\n\n let window_image_count = vulkan.get_image_count()?;\n\n\n\n let window_semaphores_image_acquired = (0..window_image_count)\n\n .into_iter()\n\n .map(|index| {\n\n Ok(create_semaphore(\n\n &vulkan,\n\n format!(\"WindowSemaphoreImageAcquired_{}\", index),\n\n )?)\n\n })\n\n .collect::<Result<_, Error>>()?;\n\n\n\n let window_semaphores_rendering_finished = (0..window_image_count)\n\n .into_iter()\n\n .map(|index| {\n\n Ok(create_semaphore(\n\n &vulkan,\n\n format!(\"WindowSemaphoreRenderingFinished_{}\", index),\n", "file_path": "src/state/mod.rs", "rank": 18, "score": 55484.06492154809 }, { "content": "use crate::{\n\n wrap_vulkan::{geometry::MeshBuffers, sync::wait_and_reset},\n\n State,\n\n};\n\nuse anyhow::Result;\n\nuse ash::vk::{\n\n ClearColorValue, ClearDepthStencilValue, ClearValue, CommandBufferBeginInfo,\n\n CommandBufferResetFlags, DescriptorSet, IndexType, Offset2D, Pipeline, PipelineBindPoint,\n\n PipelineLayout, PipelineStageFlags, PresentInfoKHR, Rect2D, RenderPassBeginInfo, SubmitInfo,\n\n SubpassContents, Viewport,\n\n};\n\n\n\nuse super::PreRenderInfoWindow;\n\n\n\nimpl State {\n\n pub fn pre_render_window(&mut self) -> Result<PreRenderInfoWindow> {\n\n // prepare semaphore\n\n let image_acquired_semaphore =\n\n self.window_semaphores_image_acquired[self.last_used_acquire_semaphore];\n\n self.last_used_acquire_semaphore += 1;\n", "file_path": "src/state/render_window.rs", "rank": 19, "score": 52681.13145454962 }, { "content": " self.last_used_acquire_semaphore %= self.window_semaphores_image_acquired.len();\n\n\n\n // acuire image\n\n let (image_index, _suboptimal) = unsafe {\n\n self.window_swapchain.loader.acquire_next_image(\n\n self.window_swapchain.handle,\n\n std::u64::MAX, // don't timeout\n\n image_acquired_semaphore,\n\n ash::vk::Fence::default(),\n\n )\n\n }?;\n\n\n\n Ok(PreRenderInfoWindow {\n\n image_index,\n\n image_acquired_semaphore,\n\n })\n\n }\n\n\n\n pub fn render_window(\n\n &self,\n", "file_path": "src/state/render_window.rs", "rank": 20, "score": 52678.73172309402 }, { "content": " );\n\n d.cmd_draw_indexed(command_buffer, mesh.num_indices() as u32, 1, 0, 0, 0);\n\n d.cmd_end_render_pass(command_buffer);\n\n d.end_command_buffer(command_buffer)?;\n\n\n\n self.vulkan.device.queue_submit(\n\n self.vulkan.queue,\n\n &[SubmitInfo::builder()\n\n .command_buffers(&[command_buffer])\n\n .wait_semaphores(&[image_acquired_semaphore])\n\n .wait_dst_stage_mask(&[PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT])\n\n .signal_semaphores(&[rendering_finished_semaphore])\n\n .build()],\n\n rendering_finished_fence,\n\n )?;\n\n\n\n let _suboptimal = self.window_swapchain.loader.queue_present(\n\n self.vulkan.queue,\n\n &PresentInfoKHR::builder()\n\n .wait_semaphores(&[rendering_finished_semaphore])\n\n .swapchains(&[self.window_swapchain.handle])\n\n .image_indices(&[image_index]),\n\n )?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/state/render_window.rs", "rank": 21, "score": 52676.47906475292 }, { "content": " depth: 1.0,\n\n stencil: 0,\n\n },\n\n },\n\n ]),\n\n SubpassContents::INLINE,\n\n );\n\n d.cmd_bind_pipeline(command_buffer, PipelineBindPoint::GRAPHICS, pipeline);\n\n\n\n // set this here so we don't have to recreate pipeline on window resize\n\n d.cmd_set_viewport(\n\n command_buffer,\n\n 0,\n\n &[Viewport::builder()\n\n .x(0.0)\n\n .y(0.0)\n\n .width(extent.width as f32)\n\n .height(extent.height as f32)\n\n .min_depth(0.0)\n\n .max_depth(1.0)\n", "file_path": "src/state/render_window.rs", "rank": 22, "score": 52675.35376109366 }, { "content": "\n\n // for convenience\n\n let extent = self.window_swapchain.extent;\n\n unsafe {\n\n let d = &self.vulkan.device;\n\n\n\n d.reset_command_buffer(command_buffer, CommandBufferResetFlags::RELEASE_RESOURCES)?;\n\n d.begin_command_buffer(command_buffer, &CommandBufferBeginInfo::builder())?;\n\n d.cmd_begin_render_pass(\n\n command_buffer,\n\n &RenderPassBeginInfo::builder()\n\n .render_pass(self.window_render_pass)\n\n .framebuffer(frame_buffer)\n\n .render_area(*Rect2D::builder().extent(extent))\n\n .clear_values(&[\n\n ClearValue {\n\n color: ClearColorValue::default(),\n\n },\n\n ClearValue {\n\n depth_stencil: ClearDepthStencilValue {\n", "file_path": "src/state/render_window.rs", "rank": 23, "score": 52674.12094520452 }, { "content": " pre_render_info: PreRenderInfoWindow,\n\n pipeline_layout: PipelineLayout,\n\n pipeline: Pipeline,\n\n mesh: &MeshBuffers,\n\n descriptor_set: DescriptorSet,\n\n ) -> Result<()> {\n\n let PreRenderInfoWindow {\n\n image_index,\n\n image_acquired_semaphore,\n\n } = pre_render_info;\n\n\n\n // get the other stuff now that we know the index\n\n let rendering_finished_semaphore =\n\n self.window_semaphores_rendering_finished[image_index as usize];\n\n let rendering_finished_fence = self.window_fences_rendering_finished[image_index as usize];\n\n let command_buffer = self.window_command_buffers[image_index as usize];\n\n let frame_buffer = self.window_swapchain.elements[image_index as usize].frame_buffer;\n\n\n\n // waite before resetting cmd buffer\n\n wait_and_reset(&self.vulkan, rendering_finished_fence)?;\n", "file_path": "src/state/render_window.rs", "rank": 24, "score": 52671.74989437067 }, { "content": " .build()],\n\n );\n\n d.cmd_set_scissor(\n\n command_buffer,\n\n 0,\n\n &[Rect2D::builder()\n\n .offset(Offset2D { x: 0, y: 0 })\n\n .extent(extent)\n\n .build()],\n\n );\n\n\n\n d.cmd_bind_vertex_buffers(command_buffer, 0, &[mesh.vertex.handle()], &[0]);\n\n d.cmd_bind_index_buffer(command_buffer, mesh.index.handle(), 0, IndexType::UINT32);\n\n d.cmd_bind_descriptor_sets(\n\n command_buffer,\n\n PipelineBindPoint::GRAPHICS,\n\n pipeline_layout,\n\n 0,\n\n &[descriptor_set],\n\n &[],\n", "file_path": "src/state/render_window.rs", "rank": 25, "score": 52666.8017721576 }, { "content": "pub fn create_render_pass_window(base: &Base) -> Result<RenderPass> {\n\n let render_pass = unsafe {\n\n base.device.create_render_pass(\n\n &RenderPassCreateInfo::builder()\n\n .attachments(&[\n\n AttachmentDescription::builder()\n\n .format(base.get_surface_format()?)\n\n .samples(SampleCountFlags::TYPE_1)\n\n .load_op(AttachmentLoadOp::CLEAR)\n\n .store_op(AttachmentStoreOp::STORE)\n\n .stencil_load_op(AttachmentLoadOp::DONT_CARE)\n\n .stencil_store_op(AttachmentStoreOp::DONT_CARE)\n\n .initial_layout(ImageLayout::UNDEFINED)\n\n .final_layout(ImageLayout::PRESENT_SRC_KHR)\n\n .build(),\n\n AttachmentDescription::builder()\n\n .format(base.find_supported_depth_stencil_format()?)\n\n .samples(SampleCountFlags::TYPE_1)\n\n .load_op(AttachmentLoadOp::CLEAR)\n\n .store_op(AttachmentStoreOp::DONT_CARE)\n", "file_path": "src/wrap_vulkan/render_pass.rs", "rank": 26, "score": 52457.01133832248 }, { "content": "pub fn create_fence(base: &Base, signaled: bool, name: String) -> Result<Fence> {\n\n let fence = unsafe {\n\n base.device.create_fence(\n\n &FenceCreateInfo::builder().flags(if signaled {\n\n FenceCreateFlags::SIGNALED\n\n } else {\n\n FenceCreateFlags::default()\n\n }),\n\n None,\n\n )\n\n }?;\n\n base.name_object(fence, name)?;\n\n Ok(fence)\n\n}\n\n\n", "file_path": "src/wrap_vulkan/sync.rs", "rank": 27, "score": 51938.62691315978 }, { "content": "use anyhow::Result;\n\nuse ash::vk::{Fence, FenceCreateFlags, FenceCreateInfo, Semaphore, SemaphoreCreateInfo};\n\n\n\nuse super::Base;\n\n\n", "file_path": "src/wrap_vulkan/sync.rs", "rank": 28, "score": 50800.98493043205 }, { "content": "pub mod base;\n\npub mod buffers;\n\n#[cfg(feature = \"validation_vulkan\")]\n\npub mod debug;\n\npub mod descriptors;\n\npub mod device_image;\n\npub mod geometry;\n\npub mod pipeline;\n\npub mod render_pass;\n\npub mod surface;\n\npub mod sync;\n\n\n\npub use base::Base;\n\n#[cfg(feature = \"validation_vulkan\")]\n\npub use debug::Debug;\n\npub use device_image::DeviceImage;\n\npub use geometry::Vertex;\n\npub use pipeline::create_pipeline;\n\npub use pipeline::create_pipeline_layout;\n\npub use render_pass::create_render_pass_window;\n\npub use surface::SurfaceRelated;\n", "file_path": "src/wrap_vulkan/mod.rs", "rank": 29, "score": 50559.62911826953 }, { "content": "\n\n let view = Self::new_view(\n\n base,\n\n image,\n\n settings.format,\n\n settings.aspect_flags,\n\n settings.layer_count,\n\n format!(\"{}View\", settings.name.clone()),\n\n )?;\n\n\n\n Ok(Self {\n\n image,\n\n memory,\n\n view,\n\n })\n\n }\n\n\n\n pub unsafe fn destroy(&self, base: &Base) {\n\n base.device.destroy_image_view(self.view, None);\n\n base.device.destroy_image(self.image, None);\n\n base.device.free_memory(self.memory, None);\n\n }\n\n}\n", "file_path": "src/wrap_vulkan/device_image.rs", "rank": 30, "score": 48195.434910823686 }, { "content": " }\n\n\n\n pub fn new(base: &Base, settings: DeviceImageSettings) -> Result<Self> {\n\n let image = unsafe {\n\n base.device.create_image(\n\n &ImageCreateInfo::builder()\n\n .image_type(ImageType::TYPE_2D)\n\n .extent(Extent3D {\n\n width: settings.extent.width,\n\n height: settings.extent.height,\n\n depth: 1,\n\n })\n\n .mip_levels(1)\n\n .array_layers(settings.layer_count)\n\n .format(settings.format)\n\n .tiling(settings.tiling)\n\n .initial_layout(ImageLayout::UNDEFINED)\n\n .usage(settings.usage)\n\n .sharing_mode(SharingMode::EXCLUSIVE)\n\n .samples(SampleCountFlags::TYPE_1),\n", "file_path": "src/wrap_vulkan/device_image.rs", "rank": 31, "score": 48194.218085827866 }, { "content": " None,\n\n )\n\n }?;\n\n base.name_object(image, format!(\"{}Image\", settings.name.clone()))?;\n\n\n\n let memory_requirements = unsafe { base.device.get_image_memory_requirements(image) };\n\n let memory = unsafe {\n\n base.device.allocate_memory(\n\n &MemoryAllocateInfo::builder()\n\n .allocation_size(memory_requirements.size)\n\n .memory_type_index(base.find_memory_type_index(\n\n MemoryPropertyFlags::from_raw(memory_requirements.memory_type_bits),\n\n settings.properties,\n\n )?),\n\n None,\n\n )?\n\n };\n\n base.name_object(memory, format!(\"{}Memory\", settings.name.clone()))?;\n\n\n\n unsafe { base.device.bind_image_memory(image, memory, 0) }?;\n", "file_path": "src/wrap_vulkan/device_image.rs", "rank": 32, "score": 48193.136008455294 }, { "content": " pub usage: ImageUsageFlags,\n\n pub properties: MemoryPropertyFlags,\n\n pub aspect_flags: ImageAspectFlags,\n\n pub layer_count: u32, // 2 for hmd\n\n pub name: String,\n\n}\n\n\n\nimpl DeviceImage {\n\n pub fn new_view(\n\n base: &Base,\n\n image: Image,\n\n format: Format,\n\n aspect_flags: ImageAspectFlags,\n\n layer_count: u32,\n\n name: String,\n\n ) -> Result<ImageView> {\n\n let view = unsafe {\n\n base.device.create_image_view(\n\n &ImageViewCreateInfo::builder()\n\n .image(image)\n", "file_path": "src/wrap_vulkan/device_image.rs", "rank": 33, "score": 48191.807958718404 }, { "content": "use anyhow::Result;\n\nuse ash::vk::{\n\n DeviceMemory, Extent2D, Extent3D, Format, Image, ImageAspectFlags, ImageCreateInfo,\n\n ImageLayout, ImageSubresourceRange, ImageTiling, ImageType, ImageUsageFlags, ImageView,\n\n ImageViewCreateInfo, ImageViewType, MemoryAllocateInfo, MemoryPropertyFlags, SampleCountFlags,\n\n SharingMode,\n\n};\n\n\n\nuse super::Base;\n\n\n\npub struct DeviceImage {\n\n pub image: Image,\n\n pub memory: DeviceMemory,\n\n pub view: ImageView,\n\n}\n\n\n\npub struct DeviceImageSettings {\n\n pub extent: Extent2D,\n\n pub format: Format,\n\n pub tiling: ImageTiling,\n", "file_path": "src/wrap_vulkan/device_image.rs", "rank": 34, "score": 48190.71542073888 }, { "content": " .view_type(if layer_count == 1 {\n\n ImageViewType::TYPE_2D\n\n } else {\n\n ImageViewType::TYPE_2D_ARRAY\n\n })\n\n .format(format)\n\n .subresource_range(\n\n ImageSubresourceRange::builder()\n\n .aspect_mask(aspect_flags)\n\n .base_mip_level(0)\n\n .level_count(1)\n\n .base_array_layer(0)\n\n .layer_count(layer_count)\n\n .build(),\n\n ),\n\n None,\n\n )\n\n }?;\n\n base.name_object(view, name)?;\n\n Ok(view)\n", "file_path": "src/wrap_vulkan/device_image.rs", "rank": 35, "score": 48188.953490346554 }, { "content": "pub fn create_pipeline(\n\n base: &Base,\n\n render_pass: RenderPass,\n\n layout: PipelineLayout,\n\n module_vert: ShaderModule,\n\n module_frag: ShaderModule,\n\n initial_extent: Extent2D,\n\n dynamic_states: &[DynamicState],\n\n name: String,\n\n) -> Result<Pipeline> {\n\n let vertex_bindings = Vertex::get_binding_description();\n\n let vertex_attributes = Vertex::get_attribute_description();\n\n\n\n let entry_point = CString::new(\"main\").unwrap();\n\n let pipeline = unsafe {\n\n base.device.create_graphics_pipelines(\n\n PipelineCache::default(),\n\n &[GraphicsPipelineCreateInfo::builder()\n\n .stages(&[\n\n PipelineShaderStageCreateInfo::builder()\n", "file_path": "src/wrap_vulkan/pipeline.rs", "rank": 36, "score": 48046.308058680304 }, { "content": "pub fn create_render_pass_hmd(base: &Base) -> Result<RenderPass> {\n\n // sets the 2 least significant bits\n\n let masks = [!(!0 << 2)];\n\n\n\n let render_pass = unsafe {\n\n base.device.create_render_pass(\n\n &RenderPassCreateInfo::builder()\n\n .attachments(&[\n\n AttachmentDescription::builder()\n\n .format(base.find_supported_color_format()?)\n\n .samples(SampleCountFlags::TYPE_1)\n\n .load_op(AttachmentLoadOp::CLEAR)\n\n .store_op(AttachmentStoreOp::STORE)\n\n .stencil_load_op(AttachmentLoadOp::DONT_CARE)\n\n .stencil_store_op(AttachmentStoreOp::DONT_CARE)\n\n .initial_layout(ImageLayout::UNDEFINED)\n\n // final layout isn't PRESENT_SRC_KHR\n\n .final_layout(ImageLayout::COLOR_ATTACHMENT_OPTIMAL)\n\n .build(),\n\n AttachmentDescription::builder()\n", "file_path": "src/wrap_vulkan/render_pass.rs", "rank": 37, "score": 35470.18905925213 }, { "content": "use std::{f32::consts::PI, time::Instant};\n\n\n\nuse cgmath::{vec3, Matrix4, Point3, Quaternion};\n\nuse openxr::{Fovf, Posef};\n\nuse winit::event::VirtualKeyCode;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct SphereCoords {\n\n phi: f32,\n\n theta: f32,\n\n radius: f32,\n\n speed: f32,\n\n check: Instant,\n\n}\n\n\n\npub enum KeyMap {\n\n Up,\n\n Down,\n\n Left,\n\n Right,\n", "file_path": "examples/simple/camera.rs", "rank": 38, "score": 28424.76491940366 }, { "content": "}\n\n\n\nimpl SphereCoords {\n\n pub fn new() -> Self {\n\n Self {\n\n phi: 0.0,\n\n theta: std::f32::consts::FRAC_PI_8,\n\n radius: 4.0,\n\n speed: 2.0,\n\n check: Instant::now(),\n\n }\n\n }\n\n\n\n pub fn update(&mut self, pressed_keys: &[KeyMap]) {\n\n let d = self.check.elapsed().as_secs_f32() * self.speed;\n\n self.check = Instant::now();\n\n\n\n for key in pressed_keys {\n\n match key {\n\n KeyMap::Up => self.theta -= d,\n", "file_path": "examples/simple/camera.rs", "rank": 39, "score": 28414.518350623082 }, { "content": " (tan_right + tan_left) / tan_width,\n\n (tan_up + tan_down) / tan_height,\n\n -far / (far - near),\n\n -1.0,\n\n 0.0,\n\n 0.0,\n\n -(far * near) / (far - near),\n\n 0.0,\n\n )\n\n}\n", "file_path": "examples/simple/camera.rs", "rank": 40, "score": 28413.726241454566 }, { "content": " Closer,\n\n Farther,\n\n UNDEFINED,\n\n}\n\n\n\nimpl From<VirtualKeyCode> for KeyMap {\n\n fn from(code: VirtualKeyCode) -> Self {\n\n match code {\n\n VirtualKeyCode::W => Self::Up,\n\n VirtualKeyCode::S => Self::Down,\n\n VirtualKeyCode::A => Self::Left,\n\n VirtualKeyCode::D => Self::Right,\n\n VirtualKeyCode::Q => Self::Closer,\n\n VirtualKeyCode::E => Self::Farther,\n\n _ => {\n\n log::warn!(\"This key isn't bound\");\n\n Self::UNDEFINED\n\n }\n\n }\n\n }\n", "file_path": "examples/simple/camera.rs", "rank": 41, "score": 28411.74360739509 }, { "content": " KeyMap::Down => self.theta += d,\n\n KeyMap::Left => self.phi += d,\n\n KeyMap::Right => self.phi -= d,\n\n KeyMap::Closer => self.radius -= d,\n\n KeyMap::Farther => self.radius += d,\n\n KeyMap::UNDEFINED => {}\n\n }\n\n }\n\n\n\n log::error!(\"theta: {:.3}, phi: {:.3}\", self.theta, self.phi);\n\n\n\n self.theta = self.theta.clamp(0.1, PI - 0.1);\n\n self.phi %= 2.0 * PI;\n\n self.radius = self.radius.clamp(0.0, 100.0);\n\n }\n\n\n\n pub fn to_coords(&self) -> Point3<f32> {\n\n [\n\n self.radius * self.phi.cos() * self.theta.sin(),\n\n self.radius * self.theta.cos(),\n\n self.radius * self.phi.sin() * self.theta.sin(),\n\n ]\n\n .into()\n\n }\n\n}\n\n\n", "file_path": "examples/simple/camera.rs", "rank": 42, "score": 28409.784182984688 }, { "content": " vk_entry: &ash::Entry,\n\n info: &InstanceCreateInfoBuilder,\n\n ) -> Result<ash::Instance> {\n\n Ok(ash::Instance::load(\n\n vk_entry.static_fn(),\n\n ash::vk::Instance::from_raw(\n\n self.instance\n\n .create_vulkan_instance(\n\n self.system_id,\n\n std::mem::transmute(vk_entry.static_fn().get_instance_proc_addr),\n\n info as *const _ as *const _,\n\n )?\n\n .map_err(vk::Result::from_raw)? as u64,\n\n ),\n\n ))\n\n }\n\n\n\n pub fn get_vulkan_physical_device(\n\n &self,\n\n vk_instance: &ash::Instance,\n", "file_path": "src/wrap_openxr.rs", "rank": 50, "score": 28083.625823985127 }, { "content": " ) -> Result<PhysicalDevice> {\n\n Ok(PhysicalDevice::from_raw(\n\n self.instance\n\n .vulkan_graphics_device(self.system_id, vk_instance.handle().as_raw() as _)?\n\n as u64,\n\n ))\n\n }\n\n\n\n pub unsafe fn get_vulkan_device(\n\n &self,\n\n vk_entry: &ash::Entry,\n\n vk_instance: &ash::Instance,\n\n vk_physical_device: PhysicalDevice,\n\n info: &DeviceCreateInfoBuilder,\n\n ) -> Result<ash::Device> {\n\n Ok(ash::Device::load(\n\n vk_instance.fp_v1_0(),\n\n ash::vk::Device::from_raw(\n\n self.instance\n\n .create_vulkan_device(\n", "file_path": "src/wrap_openxr.rs", "rank": 53, "score": 28081.666719362805 }, { "content": " }\n\n\n\n Ok(Self {\n\n #[cfg(feature = \"validation_openxr\")]\n\n debug,\n\n\n\n entry,\n\n instance,\n\n system_id,\n\n })\n\n }\n\n\n\n pub fn get_graphics_requirements(&self) -> Result<Requirements> {\n\n Ok(self\n\n .instance\n\n .graphics_requirements::<Vulkan>(self.system_id)?)\n\n }\n\n\n\n pub unsafe fn get_vulkan_instance(\n\n &self,\n", "file_path": "src/wrap_openxr.rs", "rank": 54, "score": 28081.433543032865 }, { "content": "\n\n user_callback: Some(openxr_debug_utils_callback),\n\n user_data: std::ptr::null_mut(),\n\n };\n\n let mut debug_messenger = DebugUtilsMessengerEXT::NULL;\n\n check(instance, unsafe {\n\n (debug_utils_loader.create_debug_utils_messenger)(\n\n instance.as_raw(),\n\n &info,\n\n &mut debug_messenger,\n\n )\n\n })?;\n\n Ok(Self {\n\n debug_utils_loader,\n\n debug_messenger,\n\n })\n\n }\n\n }\n\n\n\n impl Drop for Debug {\n", "file_path": "src/wrap_openxr.rs", "rank": 55, "score": 28081.07491888616 }, { "content": "\n\n let mut enabled_extensions = ExtensionSet::default();\n\n enabled_extensions.khr_vulkan_enable2 = true;\n\n if cfg!(feature = \"validation_openxr\") {\n\n enabled_extensions.ext_debug_utils = true;\n\n }\n\n let instance = entry.create_instance(\n\n &ApplicationInfo {\n\n application_name: \"VRV App\",\n\n application_version: env!(\"CARGO_PKG_VERSION_MAJOR\").parse().unwrap(),\n\n engine_name: \"\",\n\n engine_version: 0,\n\n },\n\n &enabled_extensions,\n\n if cfg!(feature = \"validation_openxr\") {\n\n &[VALIDATION_LAYER_NAME]\n\n } else {\n\n &[]\n\n },\n\n )?;\n", "file_path": "src/wrap_openxr.rs", "rank": 58, "score": 28079.883723284907 }, { "content": " self.system_id,\n\n std::mem::transmute(vk_entry.static_fn().get_instance_proc_addr),\n\n vk_physical_device.as_raw() as _,\n\n info as *const _ as *const _,\n\n )?\n\n .map_err(vk::Result::from_raw)? as u64,\n\n ),\n\n ))\n\n }\n\n\n\n pub fn get_resolution(&self) -> Result<Extent2D> {\n\n let views = self.instance.enumerate_view_configuration_views(\n\n self.system_id,\n\n ViewConfigurationType::PRIMARY_STEREO,\n\n )?;\n\n\n\n if views.len() != 2 {\n\n bail!(\"Views are not 2\");\n\n }\n\n if views[0].recommended_image_rect_width != views[1].recommended_image_rect_width\n", "file_path": "src/wrap_openxr.rs", "rank": 59, "score": 28079.666361951524 }, { "content": " }\n\n}\n\n\n\n#[cfg(feature = \"validation_openxr\")]\n\nuse debug::Debug;\n\n\n\nuse crate::wrap_vulkan;\n\n\n\npub struct Base {\n\n #[cfg(feature = \"validation_openxr\")]\n\n pub debug: Debug,\n\n\n\n pub entry: Entry,\n\n pub instance: Instance,\n\n pub system_id: SystemId,\n\n}\n\n\n\nimpl Base {\n\n pub fn new() -> Result<Self> {\n\n const VALIDATION_LAYER_NAME: &'static str = \"XR_APILAYER_LUNARG_core_validation\";\n", "file_path": "src/wrap_openxr.rs", "rank": 60, "score": 28079.3362706431 }, { "content": "use anyhow::{bail, Error, Result};\n\nuse ash::vk::{\n\n self, DeviceCreateInfoBuilder, Extent2D, Format, Handle, InstanceCreateInfoBuilder,\n\n PhysicalDevice,\n\n};\n\nuse openxr::{\n\n sys,\n\n vulkan::{Requirements, SessionCreateInfo},\n\n ApplicationInfo, Entry, EnvironmentBlendMode, ExtensionSet, FormFactor, FrameStream,\n\n FrameWaiter, Instance, Session, Swapchain, SwapchainCreateFlags, SwapchainCreateInfo,\n\n SwapchainUsageFlags, SystemId, ViewConfigurationType, Vulkan,\n\n};\n\n\n", "file_path": "src/wrap_openxr.rs", "rank": 62, "score": 28079.081593824943 }, { "content": " supported_formats\n\n .iter()\n\n .find(|&supported| *supported == wanted.as_raw() as u32)\n\n .is_some()\n\n })\n\n .ok_or(Error::msg(\"Couldn't find supported format\"))\n\n .cloned()\n\n }\n\n\n\n pub fn init_with_vulkan(\n\n &self,\n\n vk_base: &wrap_vulkan::Base,\n\n ) -> Result<(Session<Vulkan>, FrameWaiter, FrameStream<Vulkan>)> {\n\n // A session represents this application's desire to display things! This is where we hook\n\n // up our graphics API. This does not start the session; for that, you'll need a call to Session::begin\n\n Ok(unsafe {\n\n self.instance.create_session::<Vulkan>(\n\n self.system_id,\n\n &SessionCreateInfo {\n\n instance: vk_base.instance.handle().as_raw() as _,\n", "file_path": "src/wrap_openxr.rs", "rank": 63, "score": 28078.864649782365 }, { "content": " physical_device: vk_base.physical_device.as_raw() as _,\n\n device: vk_base.device.handle().as_raw() as _,\n\n queue_family_index: vk_base.queue_family_index,\n\n queue_index: 0,\n\n },\n\n )\n\n }?)\n\n }\n\n\n\n pub fn get_swapchain(\n\n session: &Session<Vulkan>,\n\n extent: Extent2D,\n\n format: Format,\n\n ) -> Result<Swapchain<Vulkan>> {\n\n Ok(session.create_swapchain(&SwapchainCreateInfo {\n\n create_flags: SwapchainCreateFlags::EMPTY,\n\n usage_flags: SwapchainUsageFlags::COLOR_ATTACHMENT | SwapchainUsageFlags::SAMPLED,\n\n format: format.as_raw() as _,\n\n sample_count: 1,\n\n width: extent.width,\n\n height: extent.height,\n\n face_count: 1,\n\n array_size: 2, // Multiview for two eyes\n\n mip_count: 1,\n\n })?)\n\n }\n\n}\n", "file_path": "src/wrap_openxr.rs", "rank": 64, "score": 28078.11727498217 }, { "content": " || views[0].recommended_image_rect_height != views[1].recommended_image_rect_height\n\n {\n\n bail!(\"Views don't have equal resolution?\");\n\n }\n\n\n\n Ok(Extent2D {\n\n width: views[0].recommended_image_rect_width,\n\n height: views[0].recommended_image_rect_height,\n\n })\n\n }\n\n\n\n pub fn find_supported_format(\n\n session: &Session<Vulkan>,\n\n candidates: &[Format],\n\n ) -> Result<Format> {\n\n let supported_formats = session.enumerate_swapchain_formats()?;\n\n\n\n candidates\n\n .iter()\n\n .find(|&wanted| {\n", "file_path": "src/wrap_openxr.rs", "rank": 66, "score": 28076.918558929778 }, { "content": " fn drop(&mut self) {\n\n // not going to check that result\n\n let _ = unsafe {\n\n (self.debug_utils_loader.destroy_debug_utils_messenger)(self.debug_messenger)\n\n };\n\n }\n\n }\n\n\n\n unsafe extern \"system\" fn openxr_debug_utils_callback(\n\n message_severity: DebugUtilsMessageSeverityFlagsEXT,\n\n message_type: DebugUtilsMessageTypeFlagsEXT,\n\n p_callback_data: *const DebugUtilsMessengerCallbackDataEXT,\n\n _p_user_data: *mut std::ffi::c_void,\n\n ) -> Bool32 {\n\n let type_string = match message_type {\n\n DebugUtilsMessageTypeFlagsEXT::GENERAL => \"[General]\",\n\n DebugUtilsMessageTypeFlagsEXT::PERFORMANCE => \"[Performance]\",\n\n DebugUtilsMessageTypeFlagsEXT::VALIDATION => \"[Validation]\",\n\n _ => \"[Unknown]\",\n\n };\n", "file_path": "src/wrap_openxr.rs", "rank": 67, "score": 28075.844277006825 }, { "content": "\n\n #[cfg(feature = \"validation_openxr\")]\n\n let debug = Debug::new(&entry, &instance)?;\n\n\n\n let instance_props = instance.properties()?;\n\n log::info!(\n\n \"loaded OpenXR runtime: {} {}\",\n\n instance_props.runtime_name,\n\n instance_props.runtime_version\n\n );\n\n\n\n // Request a form factor from the device (HMD, Handheld, etc.)\n\n let system_id = instance.system(FormFactor::HEAD_MOUNTED_DISPLAY)?;\n\n if instance\n\n .enumerate_environment_blend_modes(system_id, ViewConfigurationType::PRIMARY_STEREO)?\n\n .into_iter()\n\n .find(|&mode| mode == EnvironmentBlendMode::OPAQUE)\n\n == None\n\n {\n\n bail!(\"Only OPAQUE mode allowed\");\n", "file_path": "src/wrap_openxr.rs", "rank": 69, "score": 28075.451168180043 }, { "content": " let message = std::ffi::CStr::from_ptr((*p_callback_data).message)\n\n .to_str()\n\n .unwrap();\n\n\n\n match message_severity {\n\n DebugUtilsMessageSeverityFlagsEXT::VERBOSE => {\n\n log::debug!(\"OPENXR: {} {}\", type_string, message)\n\n }\n\n DebugUtilsMessageSeverityFlagsEXT::INFO => {\n\n log::info!(\"OPENXR: {} {}\", type_string, message)\n\n }\n\n DebugUtilsMessageSeverityFlagsEXT::WARNING => {\n\n log::warn!(\"OPENXR: {} {}\", type_string, message)\n\n }\n\n DebugUtilsMessageSeverityFlagsEXT::ERROR => {\n\n log::error!(\"OPENXR: {} {}\", type_string, message)\n\n }\n\n _ => {}\n\n };\n\n false.into()\n", "file_path": "src/wrap_openxr.rs", "rank": 70, "score": 28075.37033469905 }, { "content": " use super::check;\n\n\n\n pub struct Debug {\n\n pub debug_utils_loader: DebugUtilsEXT,\n\n pub debug_messenger: DebugUtilsMessengerEXT,\n\n }\n\n impl Debug {\n\n pub fn new(entry: &Entry, instance: &Instance) -> Result<Self> {\n\n let debug_utils_loader = unsafe { DebugUtilsEXT::load(&entry, instance.as_raw()) }?;\n\n let info = DebugUtilsMessengerCreateInfoEXT {\n\n ty: StructureType::DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,\n\n next: std::ptr::null(),\n\n message_severities: DebugUtilsMessageSeverityFlagsEXT::VERBOSE\n\n | DebugUtilsMessageSeverityFlagsEXT::INFO\n\n | DebugUtilsMessageSeverityFlagsEXT::WARNING\n\n | DebugUtilsMessageSeverityFlagsEXT::ERROR,\n\n message_types: DebugUtilsMessageTypeFlagsEXT::GENERAL\n\n | DebugUtilsMessageTypeFlagsEXT::VALIDATION\n\n | DebugUtilsMessageTypeFlagsEXT::PERFORMANCE\n\n | DebugUtilsMessageTypeFlagsEXT::CONFORMANCE,\n", "file_path": "src/wrap_openxr.rs", "rank": 73, "score": 28072.522381643812 }, { "content": "\n\n log::info!(\"Creating new OpenXR Base\");\n\n\n\n let entry = Entry::linked();\n\n let available_extensions = entry.enumerate_extensions()?;\n\n let available_layers = entry.enumerate_layers()?;\n\n\n\n log::trace!(\"OpenXR available extensions: {:?}\", available_extensions);\n\n log::trace!(\"OpenXR available layers: {:?}\", available_layers);\n\n\n\n assert!(available_extensions.khr_vulkan_enable2);\n\n\n\n #[cfg(feature = \"validation_openxr\")]\n\n assert!(\n\n available_layers\n\n .iter()\n\n .find(|l| l.layer_name == VALIDATION_LAYER_NAME)\n\n .is_some(),\n\n \"Validation layer not found, did you set XR_API_LAYER_PATH?\"\n\n );\n", "file_path": "src/wrap_openxr.rs", "rank": 74, "score": 28069.878886010905 }, { "content": " .attachments(&[view, depth_image.view])\n\n .width(extent.width)\n\n .height(extent.height)\n\n .layers(1),\n\n None,\n\n )?\n\n };\n\n base.name_object(frame_buffer, format!(\"WindowSwapchainFrameBuffer_{}\", i))?;\n\n\n\n Ok(SwapElement {\n\n image,\n\n view,\n\n frame_buffer,\n\n })\n\n })\n\n .collect::<Result<Vec<_>, _>>()?;\n\n\n\n Ok(Self {\n\n extent,\n\n depth_image,\n", "file_path": "src/state/swapchain.rs", "rank": 75, "score": 27248.552182975098 }, { "content": " let frame_buffer = unsafe {\n\n vk_base.device.create_framebuffer(\n\n &FramebufferCreateInfo::builder()\n\n .render_pass(render_pass)\n\n .attachments(&[view, depth_image.view])\n\n .width(extent.width)\n\n .height(extent.height)\n\n .layers(1), // multiview dictates this\n\n None,\n\n )\n\n }?;\n\n vk_base.name_object(frame_buffer, format!(\"HMDSwapchainFrameBuffer_{}\", i))?;\n\n\n\n Ok(SwapElement {\n\n image,\n\n view,\n\n frame_buffer,\n\n })\n\n })\n\n .collect::<Result<_, _>>()?;\n", "file_path": "src/state/swapchain.rs", "rank": 76, "score": 27248.329197391915 }, { "content": " loader,\n\n handle,\n\n elements,\n\n })\n\n }\n\n\n\n pub unsafe fn destroy(&self, base: &wrap_vulkan::Base) {\n\n for e in &self.elements {\n\n base.device.destroy_image_view(e.view, None);\n\n base.device.destroy_framebuffer(e.frame_buffer, None);\n\n }\n\n self.loader.destroy_swapchain(self.handle, None);\n\n self.depth_image.destroy(base);\n\n }\n\n}\n\n\n\nimpl SwapchainHMD {\n\n pub fn new(\n\n xr_base: &wrap_openxr::Base,\n\n vk_base: &wrap_vulkan::Base,\n", "file_path": "src/state/swapchain.rs", "rank": 77, "score": 27247.562255436555 }, { "content": "use anyhow::{bail, Error, Result};\n\nuse ash::{\n\n extensions::khr::Swapchain,\n\n vk::{\n\n CompositeAlphaFlagsKHR, Extent2D, Framebuffer, FramebufferCreateInfo, Handle, Image,\n\n ImageAspectFlags, ImageTiling, ImageUsageFlags, ImageView, MemoryPropertyFlags,\n\n PresentModeKHR, RenderPass, SharingMode, SwapchainCreateInfoKHR, SwapchainKHR,\n\n },\n\n};\n\n\n\nuse openxr::{Session, Vulkan};\n\n\n\nuse crate::{\n\n wrap_openxr,\n\n wrap_vulkan::{self, device_image::DeviceImageSettings, surface::Detail, DeviceImage},\n\n};\n\n\n\npub struct SwapElement {\n\n pub image: Image,\n\n pub view: ImageView,\n", "file_path": "src/state/swapchain.rs", "rank": 78, "score": 27247.104787867513 }, { "content": " pub frame_buffer: Framebuffer,\n\n}\n\npub struct SwapchainWindow {\n\n pub extent: Extent2D,\n\n pub depth_image: DeviceImage,\n\n pub loader: Swapchain,\n\n pub handle: SwapchainKHR,\n\n pub elements: Vec<SwapElement>,\n\n}\n\n\n\npub struct SwapchainHMD {\n\n pub extent: Extent2D,\n\n pub swapchain: openxr::Swapchain<Vulkan>,\n\n pub depth_image: DeviceImage,\n\n pub elements: Vec<SwapElement>,\n\n}\n\n\n\nimpl SwapchainWindow {\n\n pub fn new(\n\n base: &wrap_vulkan::Base,\n", "file_path": "src/state/swapchain.rs", "rank": 79, "score": 27244.329832845648 }, { "content": " let Detail {\n\n capabilities,\n\n present_modes,\n\n image_count,\n\n format,\n\n } = base.window_surface_related.get_detail(base)?;\n\n\n\n // we don't want the window to block our rendering\n\n let present_mode = *present_modes\n\n .iter()\n\n .find(|&&m| m == PresentModeKHR::IMMEDIATE)\n\n .ok_or(Error::msg(\"No suitable present mode\"))?;\n\n let loader = Swapchain::new(&base.instance, &base.device);\n\n let handle = unsafe {\n\n loader.create_swapchain(\n\n &SwapchainCreateInfoKHR::builder()\n\n .surface(base.window_surface_related.surface)\n\n .min_image_count(image_count)\n\n .image_color_space(format.color_space)\n\n .image_format(format.format)\n", "file_path": "src/state/swapchain.rs", "rank": 80, "score": 27244.121906431876 }, { "content": " bail!(\"Somehow the number of images in the swapchain doesn't add up\");\n\n }\n\n\n\n let elements = (0..images.len())\n\n .into_iter()\n\n .map(|i| -> Result<SwapElement> {\n\n let image = images[i as usize];\n\n let view = DeviceImage::new_view(\n\n base,\n\n image,\n\n format.format,\n\n ImageAspectFlags::COLOR,\n\n 1,\n\n format!(\"WindowSwapchainView_{}\", i),\n\n )?;\n\n\n\n let frame_buffer = unsafe {\n\n base.device.create_framebuffer(\n\n &FramebufferCreateInfo::builder()\n\n .render_pass(render_pass)\n", "file_path": "src/state/swapchain.rs", "rank": 81, "score": 27241.28947315215 }, { "content": " render_pass: RenderPass,\n\n session: &Session<Vulkan>,\n\n ) -> Result<Self> {\n\n let extent = xr_base.get_resolution()?;\n\n\n\n let format = vk_base.find_supported_color_format()?;\n\n\n\n let swapchain = wrap_openxr::Base::get_swapchain(session, extent, format)?;\n\n\n\n let depth_image = DeviceImage::new(\n\n vk_base,\n\n DeviceImageSettings {\n\n extent: extent,\n\n format: vk_base.find_supported_depth_stencil_format()?,\n\n tiling: ImageTiling::OPTIMAL,\n\n usage: ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT,\n\n properties: MemoryPropertyFlags::DEVICE_LOCAL,\n\n aspect_flags: ImageAspectFlags::DEPTH,\n\n layer_count: 2,\n\n name: \"HMDDepth\".to_string(),\n", "file_path": "src/state/swapchain.rs", "rank": 82, "score": 27240.53768924188 }, { "content": " .image_extent(extent)\n\n .image_usage(ImageUsageFlags::COLOR_ATTACHMENT)\n\n .image_sharing_mode(SharingMode::EXCLUSIVE) // change this if present queue fam. differs\n\n .pre_transform(capabilities.current_transform)\n\n .composite_alpha(CompositeAlphaFlagsKHR::OPAQUE)\n\n .present_mode(present_mode)\n\n .clipped(true)\n\n .image_array_layers(1),\n\n None,\n\n )\n\n }?;\n\n // there is also the HMD swapchain\n\n base.name_object(handle, \"WindowSwapchain\".to_string())?;\n\n\n\n let images = unsafe { loader.get_swapchain_images(handle) }?;\n\n for (i, &image) in images.iter().enumerate() {\n\n base.name_object(image, format!(\"WindowSwapchainImage_{}\", i))?;\n\n }\n\n\n\n if images.len() != image_count as usize {\n", "file_path": "src/state/swapchain.rs", "rank": 83, "score": 27240.230384170532 }, { "content": " render_pass: RenderPass,\n\n wanted: Extent2D,\n\n ) -> Result<Self> {\n\n let depth_format = base.find_supported_depth_stencil_format()?;\n\n let extent = base.get_allowed_extend(wanted)?;\n\n\n\n let depth_image = DeviceImage::new(\n\n base,\n\n DeviceImageSettings {\n\n extent: extent,\n\n format: depth_format,\n\n tiling: ImageTiling::OPTIMAL,\n\n usage: ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT,\n\n properties: MemoryPropertyFlags::DEVICE_LOCAL,\n\n aspect_flags: ImageAspectFlags::DEPTH,\n\n layer_count: 1,\n\n name: \"WindowDepth\".to_string(),\n\n },\n\n )?;\n\n\n", "file_path": "src/state/swapchain.rs", "rank": 84, "score": 27239.26437548926 }, { "content": "\n\n Ok(Self {\n\n extent,\n\n swapchain,\n\n depth_image,\n\n elements,\n\n })\n\n }\n\n}\n", "file_path": "src/state/swapchain.rs", "rank": 85, "score": 27237.86064635525 }, { "content": " },\n\n )?;\n\n\n\n let elements = swapchain\n\n .enumerate_images()?\n\n .into_iter()\n\n .enumerate()\n\n .map(|(i, xr_image_handle)| -> Result<SwapElement> {\n\n let image = Image::from_raw(xr_image_handle);\n\n vk_base.name_object(image, format!(\"HMDSwapchainImage_{}\", i))?;\n\n\n\n let view = DeviceImage::new_view(\n\n vk_base,\n\n image,\n\n format,\n\n ImageAspectFlags::COLOR,\n\n 2,\n\n format!(\"HMDSwapchainView_{}\", i),\n\n )?;\n\n\n", "file_path": "src/state/swapchain.rs", "rank": 86, "score": 27236.64179388046 }, { "content": "use crate::{\n\n wrap_vulkan::{geometry::MeshBuffers, sync::wait_and_reset},\n\n State,\n\n};\n\nuse anyhow::{Error, Result};\n\nuse ash::vk::{\n\n ClearColorValue, ClearDepthStencilValue, ClearValue, CommandBufferBeginInfo,\n\n CommandBufferResetFlags, DescriptorSet, IndexType, Pipeline, PipelineBindPoint, PipelineLayout,\n\n Rect2D, RenderPassBeginInfo, SubmitInfo, SubpassContents,\n\n};\n\n\n\nuse openxr::{\n\n CompositionLayerProjection, CompositionLayerProjectionView, Duration, EnvironmentBlendMode,\n\n Extent2Di, Offset2Di, Rect2Di, SwapchainSubImage, View,\n\n};\n\n\n\nuse super::PreRenderInfoHMD;\n\n\n\nimpl State {\n\n pub fn pre_render_hmd(&mut self) -> Result<PreRenderInfoHMD> {\n", "file_path": "src/state/render_hmd.rs", "rank": 87, "score": 25806.422624907078 }, { "content": " } = pre_render_info;\n\n\n\n let image_index = image_index.ok_or(Error::msg(\"Shouldn't render, says OpenXR\"))?;\n\n let command_buffer = self.hmd_command_buffers[image_index as usize];\n\n let rendering_finished_fence = self.hmd_fences_rendering_finished[image_index as usize];\n\n\n\n unsafe {\n\n self.vulkan.device.queue_submit(\n\n self.vulkan.queue,\n\n &[SubmitInfo::builder()\n\n .command_buffers(&[command_buffer])\n\n .build()],\n\n rendering_finished_fence,\n\n )?;\n\n }\n\n\n\n self.hmd_swapchain.swapchain.release_image()?;\n\n\n\n self.frame_stream.end(\n\n frame_state.predicted_display_time,\n", "file_path": "src/state/render_hmd.rs", "rank": 88, "score": 25803.27160349383 }, { "content": " 0,\n\n &[descriptor_set],\n\n &[],\n\n );\n\n d.cmd_draw_indexed(command_buffer, mesh.num_indices() as u32, 1, 0, 0, 0);\n\n\n\n d.cmd_end_render_pass(command_buffer);\n\n d.end_command_buffer(command_buffer)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn submit_hmd(\n\n &mut self,\n\n pre_render_info: PreRenderInfoHMD,\n\n views: &[View; 2],\n\n ) -> Result<()> {\n\n let PreRenderInfoHMD {\n\n image_index,\n\n frame_state,\n", "file_path": "src/state/render_hmd.rs", "rank": 89, "score": 25802.24422707052 }, { "content": " let frame_state = self.frame_wait.wait()?;\n\n self.frame_stream.begin()?;\n\n\n\n if !frame_state.should_render {\n\n self.frame_stream.end(\n\n frame_state.predicted_display_time,\n\n EnvironmentBlendMode::OPAQUE,\n\n &[],\n\n )?;\n\n }\n\n\n\n let image_index = if frame_state.should_render {\n\n Some(self.hmd_swapchain.swapchain.acquire_image()?)\n\n } else {\n\n None\n\n };\n\n\n\n Ok(PreRenderInfoHMD {\n\n image_index,\n\n frame_state,\n", "file_path": "src/state/render_hmd.rs", "rank": 90, "score": 25801.909683069975 }, { "content": " EnvironmentBlendMode::OPAQUE,\n\n &[&CompositionLayerProjection::new().space(&self.stage).views(\n\n &views\n\n .iter()\n\n .enumerate()\n\n .map(|(i, view)| {\n\n CompositionLayerProjectionView::new()\n\n .pose(view.pose)\n\n .fov(view.fov)\n\n .sub_image(\n\n SwapchainSubImage::new()\n\n .swapchain(&self.hmd_swapchain.swapchain)\n\n .image_array_index(i as u32)\n\n .image_rect(Rect2Di {\n\n offset: Offset2Di::default(),\n\n extent: Extent2Di {\n\n width: self.hmd_swapchain.extent.width as i32,\n\n height: self.hmd_swapchain.extent.height as i32,\n\n },\n\n }),\n", "file_path": "src/state/render_hmd.rs", "rank": 91, "score": 25800.9078685705 }, { "content": "\n\n let rendering_finished_fence = self.hmd_fences_rendering_finished[image_index as usize];\n\n let command_buffer = self.hmd_command_buffers[image_index as usize];\n\n let frame_buffer = self.hmd_swapchain.elements[image_index as usize].frame_buffer;\n\n let extent = self.hmd_swapchain.extent;\n\n\n\n // wait for rendering operations\n\n wait_and_reset(&self.vulkan, rendering_finished_fence)?;\n\n\n\n unsafe {\n\n let d = &self.vulkan.device;\n\n\n\n d.reset_command_buffer(command_buffer, CommandBufferResetFlags::RELEASE_RESOURCES)?;\n\n d.begin_command_buffer(command_buffer, &CommandBufferBeginInfo::builder())?;\n\n d.cmd_begin_render_pass(\n\n command_buffer,\n\n &RenderPassBeginInfo::builder()\n\n .render_pass(self.hmd_render_pass)\n\n .framebuffer(frame_buffer)\n\n .render_area(*Rect2D::builder().extent(extent))\n", "file_path": "src/state/render_hmd.rs", "rank": 92, "score": 25798.689694476358 }, { "content": " })\n\n }\n\n\n\n pub fn record_hmd(\n\n &mut self,\n\n pre_render_info: PreRenderInfoHMD,\n\n pipeline_layout: PipelineLayout,\n\n pipeline: Pipeline,\n\n mesh: &MeshBuffers,\n\n descriptor_set: DescriptorSet,\n\n ) -> Result<()> {\n\n let PreRenderInfoHMD { image_index, .. } = pre_render_info;\n\n\n\n let image_index = image_index.ok_or(Error::msg(\"Shouldn't render, says OpenXR\"))?;\n\n\n\n // Wait until the image is available to render to. The compositor could still be\n\n // reading from it.\n\n self.hmd_swapchain\n\n .swapchain\n\n .wait_image(Duration::INFINITE)?;\n", "file_path": "src/state/render_hmd.rs", "rank": 93, "score": 25797.349218332893 }, { "content": " )\n\n })\n\n .collect::<Vec<_>>(),\n\n )],\n\n )?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/state/render_hmd.rs", "rank": 94, "score": 25797.111636528978 }, { "content": " .clear_values(&[\n\n ClearValue {\n\n color: ClearColorValue::default(),\n\n },\n\n ClearValue {\n\n depth_stencil: ClearDepthStencilValue {\n\n depth: 1.0,\n\n stencil: 0,\n\n },\n\n },\n\n ]),\n\n SubpassContents::INLINE,\n\n );\n\n d.cmd_bind_pipeline(command_buffer, PipelineBindPoint::GRAPHICS, pipeline);\n\n d.cmd_bind_vertex_buffers(command_buffer, 0, &[mesh.vertex.handle()], &[0]);\n\n d.cmd_bind_index_buffer(command_buffer, mesh.index.handle(), 0, IndexType::UINT32);\n\n d.cmd_bind_descriptor_sets(\n\n command_buffer,\n\n PipelineBindPoint::GRAPHICS,\n\n pipeline_layout,\n", "file_path": "src/state/render_hmd.rs", "rank": 95, "score": 25793.669587713033 }, { "content": " .iter()\n\n .enumerate()\n\n .find_map(|(queue_family_index, suitable)| {\n\n if *suitable {\n\n Some(queue_family_index as u32)\n\n } else {\n\n None\n\n }\n\n })\n\n .ok_or(Error::msg(\"Vulkan device has no suitable queue\"))?;\n\n\n\n log::trace!(\"Using queue nr. {}\", queue_family_index);\n\n\n\n let device = unsafe {\n\n wrap_openxr.get_vulkan_device(\n\n &entry,\n\n &instance,\n\n physical_device,\n\n &DeviceCreateInfo::builder()\n\n .queue_create_infos(&[DeviceQueueCreateInfo::builder()\n", "file_path": "src/wrap_vulkan/base.rs", "rank": 96, "score": 23818.46191360577 }, { "content": "use crate::wrap_openxr;\n\n\n\n#[cfg(feature = \"validation_vulkan\")]\n\nuse super::Debug;\n\nuse super::{surface::Detail, SurfaceRelated};\n\n\n\npub struct Base {\n\n pub entry: ManuallyDrop<Entry>,\n\n pub instance: ManuallyDrop<Instance>,\n\n pub physical_device: ManuallyDrop<PhysicalDevice>,\n\n pub device: ManuallyDrop<Device>,\n\n\n\n #[cfg(feature = \"validation_vulkan\")]\n\n pub debug: ManuallyDrop<Debug>,\n\n\n\n pub queue_family_index: u32,\n\n pub window_surface_related: ManuallyDrop<SurfaceRelated>,\n\n\n\n pub pool: CommandPool,\n\n pub queue: Queue,\n", "file_path": "src/wrap_vulkan/base.rs", "rank": 97, "score": 23816.222727047687 }, { "content": "}\n\n\n\nimpl Drop for Base {\n\n fn drop(&mut self) {\n\n unsafe {\n\n ManuallyDrop::drop(&mut self.window_surface_related);\n\n ManuallyDrop::drop(&mut self.device);\n\n ManuallyDrop::drop(&mut self.physical_device);\n\n #[cfg(feature = \"validation_vulkan\")]\n\n ManuallyDrop::drop(&mut self.debug);\n\n ManuallyDrop::drop(&mut self.instance);\n\n ManuallyDrop::drop(&mut self.entry);\n\n }\n\n }\n\n}\n\n\n\nimpl Base {\n\n pub fn new(window: &Window, wrap_openxr: &wrap_openxr::Base) -> Result<Base> {\n\n #[cfg(feature = \"validation_vulkan\")]\n\n const VALIDATION_LAYER_NAME: &'static str = \"VK_LAYER_KHRONOS_validation\";\n", "file_path": "src/wrap_vulkan/base.rs", "rank": 98, "score": 23815.82673113618 }, { "content": "\n\n log::trace!(\"Vulkan device extensions: {:?}\", device_extensions);\n\n\n\n for req_ext in &device_extensions {\n\n if physical_device_extension_properties\n\n .iter()\n\n .find(|prop| unsafe { CStr::from_ptr(prop.extension_name.as_ptr()) } == req_ext.as_c_str())\n\n .is_none()\n\n {\n\n bail!(\"Physical device doesn't support extension: {:?}\", req_ext);\n\n }\n\n }\n\n let physical_device_properties =\n\n unsafe { instance.get_physical_device_properties(physical_device) };\n\n if physical_device_properties.api_version < vk_target_version {\n\n unsafe { instance.destroy_instance(None) };\n\n bail!(\"Vulkan phyiscal device doesn't support target version\");\n\n }\n\n\n\n let surface_related = SurfaceRelated::new(&entry, &instance, window)?;\n", "file_path": "src/wrap_vulkan/base.rs", "rank": 99, "score": 23814.392419123422 } ]
Rust
crates/nu-parser/src/hir.rs
jkatzmewing/nushell
7061af712e3eec7e2bb68bf7ca31e67d6b8b3ae8
pub(crate) mod baseline_parse; pub(crate) mod binary; pub(crate) mod expand_external_tokens; pub(crate) mod external_command; pub(crate) mod named; pub(crate) mod path; pub(crate) mod range; pub mod syntax_shape; pub(crate) mod tokens_iterator; use crate::hir::syntax_shape::Member; use crate::parse::operator::CompareOperator; use crate::parse::parser::Number; use crate::parse::unit::Unit; use derive_new::new; use getset::Getters; use nu_protocol::{PathMember, ShellTypeName}; use nu_source::{ b, DebugDocBuilder, HasSpan, IntoSpanned, PrettyDebug, PrettyDebugRefineKind, PrettyDebugWithSource, Span, Spanned, }; use serde::{Deserialize, Serialize}; use std::path::PathBuf; use crate::parse::number::RawNumber; pub(crate) use self::binary::Binary; pub(crate) use self::path::Path; pub(crate) use self::range::Range; pub(crate) use self::tokens_iterator::TokensIterator; pub use self::external_command::ExternalCommand; pub use self::named::{NamedArguments, NamedValue}; #[derive(Debug, Clone)] pub struct Signature { unspanned: nu_protocol::Signature, span: Span, } impl Signature { pub fn new(unspanned: nu_protocol::Signature, span: impl Into<Span>) -> Signature { Signature { unspanned, span: span.into(), } } } impl HasSpan for Signature { fn span(&self) -> Span { self.span } } impl PrettyDebugWithSource for Signature { fn pretty_debug(&self, source: &str) -> DebugDocBuilder { self.unspanned.pretty_debug(source) } } #[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)] pub struct Call { #[get = "pub(crate)"] pub head: Box<SpannedExpression>, #[get = "pub(crate)"] pub positional: Option<Vec<SpannedExpression>>, #[get = "pub(crate)"] pub named: Option<NamedArguments>, pub span: Span, } impl Call { pub fn switch_preset(&self, switch: &str) -> bool { self.named .as_ref() .and_then(|n| n.get(switch)) .map(|t| match t { NamedValue::PresentSwitch(_) => true, _ => false, }) .unwrap_or(false) } } impl PrettyDebugWithSource for Call { fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder { match refine { PrettyDebugRefineKind::ContextFree => self.pretty_debug(source), PrettyDebugRefineKind::WithContext => { self.head .refined_pretty_debug(PrettyDebugRefineKind::WithContext, source) + b::preceded_option( Some(b::space()), self.positional.as_ref().map(|pos| { b::intersperse( pos.iter().map(|expr| { expr.refined_pretty_debug( PrettyDebugRefineKind::WithContext, source, ) }), b::space(), ) }), ) + b::preceded_option( Some(b::space()), self.named.as_ref().map(|named| { named.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source) }), ) } } } fn pretty_debug(&self, source: &str) -> DebugDocBuilder { b::typed( "call", self.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source), ) } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Expression { Literal(Literal), ExternalWord, Synthetic(Synthetic), Variable(Variable), Binary(Box<Binary>), Range(Box<Range>), Block(Vec<SpannedExpression>), List(Vec<SpannedExpression>), Path(Box<Path>), FilePath(PathBuf), ExternalCommand(ExternalCommand), Command(Span), Boolean(bool), } impl ShellTypeName for Expression { fn type_name(&self) -> &'static str { match self { Expression::Literal(literal) => literal.type_name(), Expression::Synthetic(synthetic) => synthetic.type_name(), Expression::Command(..) => "command", Expression::ExternalWord => "external word", Expression::FilePath(..) => "file path", Expression::Variable(..) => "variable", Expression::List(..) => "list", Expression::Binary(..) => "binary", Expression::Range(..) => "range", Expression::Block(..) => "block", Expression::Path(..) => "variable path", Expression::Boolean(..) => "boolean", Expression::ExternalCommand(..) => "external", } } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Synthetic { String(String), } impl ShellTypeName for Synthetic { fn type_name(&self) -> &'static str { match self { Synthetic::String(_) => "string", } } } impl IntoSpanned for Expression { type Output = SpannedExpression; fn into_spanned(self, span: impl Into<Span>) -> Self::Output { SpannedExpression { expr: self, span: span.into(), } } } impl Expression { pub fn into_expr(self, span: impl Into<Span>) -> SpannedExpression { self.into_spanned(span) } pub fn into_unspanned_expr(self) -> SpannedExpression { SpannedExpression { expr: self, span: Span::unknown(), } } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub struct SpannedExpression { pub expr: Expression, pub span: Span, } impl std::ops::Deref for SpannedExpression { type Target = Expression; fn deref(&self) -> &Expression { &self.expr } } impl HasSpan for SpannedExpression { fn span(&self) -> Span { self.span } } impl ShellTypeName for SpannedExpression { fn type_name(&self) -> &'static str { self.expr.type_name() } } impl PrettyDebugWithSource for SpannedExpression { fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder { match refine { PrettyDebugRefineKind::ContextFree => self.refined_pretty_debug(refine, source), PrettyDebugRefineKind::WithContext => match &self.expr { Expression::Literal(literal) => literal .clone() .into_spanned(self.span) .refined_pretty_debug(refine, source), Expression::ExternalWord => { b::delimit("e\"", b::primitive(self.span.slice(source)), "\"").group() } Expression::Synthetic(s) => match s { Synthetic::String(_) => { b::delimit("s\"", b::primitive(self.span.slice(source)), "\"").group() } }, Expression::Variable(Variable::Other(_)) => b::keyword(self.span.slice(source)), Expression::Variable(Variable::It(_)) => b::keyword("$it"), Expression::Binary(binary) => binary.pretty_debug(source), Expression::Range(range) => range.pretty_debug(source), Expression::Block(_) => b::opaque("block"), Expression::List(list) => b::delimit( "[", b::intersperse( list.iter() .map(|item| item.refined_pretty_debug(refine, source)), b::space(), ), "]", ), Expression::Path(path) => path.pretty_debug(source), Expression::FilePath(path) => b::typed("path", b::primitive(path.display())), Expression::ExternalCommand(external) => { b::keyword("^") + b::keyword(external.name.slice(source)) } Expression::Command(command) => b::keyword(command.slice(source)), Expression::Boolean(boolean) => match boolean { true => b::primitive("$yes"), false => b::primitive("$no"), }, }, } } fn pretty_debug(&self, source: &str) -> DebugDocBuilder { match &self.expr { Expression::Literal(literal) => { literal.clone().into_spanned(self.span).pretty_debug(source) } Expression::ExternalWord => { b::typed("external word", b::primitive(self.span.slice(source))) } Expression::Synthetic(s) => match s { Synthetic::String(s) => b::typed("synthetic", b::primitive(format!("{:?}", s))), }, Expression::Variable(Variable::Other(_)) => b::keyword(self.span.slice(source)), Expression::Variable(Variable::It(_)) => b::keyword("$it"), Expression::Binary(binary) => binary.pretty_debug(source), Expression::Range(range) => range.pretty_debug(source), Expression::Block(_) => b::opaque("block"), Expression::List(list) => b::delimit( "[", b::intersperse( list.iter().map(|item| item.pretty_debug(source)), b::space(), ), "]", ), Expression::Path(path) => path.pretty_debug(source), Expression::FilePath(path) => b::typed("path", b::primitive(path.display())), Expression::ExternalCommand(external) => b::typed( "command", b::keyword("^") + b::primitive(external.name.slice(source)), ), Expression::Command(command) => { b::typed("command", b::primitive(command.slice(source))) } Expression::Boolean(boolean) => match boolean { true => b::primitive("$yes"), false => b::primitive("$no"), }, } } } impl Expression { pub fn number(i: impl Into<Number>) -> Expression { Expression::Literal(Literal::Number(i.into())) } pub fn size(i: impl Into<Number>, unit: impl Into<Unit>) -> Expression { Expression::Literal(Literal::Size(i.into(), unit.into())) } pub fn string(inner: impl Into<Span>) -> Expression { Expression::Literal(Literal::String(inner.into())) } pub fn synthetic_string(string: impl Into<String>) -> Expression { Expression::Synthetic(Synthetic::String(string.into())) } pub fn column_path(members: Vec<Member>) -> Expression { Expression::Literal(Literal::ColumnPath(members)) } pub fn path(head: SpannedExpression, tail: Vec<impl Into<PathMember>>) -> Expression { let tail = tail.into_iter().map(|t| t.into()).collect(); Expression::Path(Box::new(Path::new(head, tail))) } pub fn dot_member(head: SpannedExpression, next: impl Into<PathMember>) -> Expression { let SpannedExpression { expr: item, span } = head; let next = next.into(); match item { Expression::Path(path) => { let (head, mut tail) = path.parts(); tail.push(next); Expression::path(head, tail) } other => Expression::path(other.into_expr(span), vec![next]), } } pub fn infix( left: SpannedExpression, op: Spanned<impl Into<CompareOperator>>, right: SpannedExpression, ) -> Expression { Expression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right))) } pub fn range(left: SpannedExpression, op: Span, right: SpannedExpression) -> Expression { Expression::Range(Box::new(Range::new(left, op, right))) } pub fn file_path(path: impl Into<PathBuf>) -> Expression { Expression::FilePath(path.into()) } pub fn list(list: Vec<SpannedExpression>) -> Expression { Expression::List(list) } pub fn bare() -> Expression { Expression::Literal(Literal::Bare) } pub fn pattern(inner: impl Into<String>) -> Expression { Expression::Literal(Literal::GlobPattern(inner.into())) } pub fn variable(inner: impl Into<Span>) -> Expression { Expression::Variable(Variable::Other(inner.into())) } pub fn external_command(inner: impl Into<Span>) -> Expression { Expression::ExternalCommand(ExternalCommand::new(inner.into())) } pub fn it_variable(inner: impl Into<Span>) -> Expression { Expression::Variable(Variable::It(inner.into())) } } impl From<Spanned<Path>> for SpannedExpression { fn from(path: Spanned<Path>) -> SpannedExpression { Expression::Path(Box::new(path.item)).into_expr(path.span) } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Literal { Number(Number), Size(Number, Unit), String(Span), GlobPattern(String), ColumnPath(Vec<Member>), Bare, } impl Literal { pub fn into_spanned(self, span: impl Into<Span>) -> SpannedLiteral { SpannedLiteral { literal: self, span: span.into(), } } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub struct SpannedLiteral { pub literal: Literal, pub span: Span, } impl ShellTypeName for Literal { fn type_name(&self) -> &'static str { match &self { Literal::Number(..) => "number", Literal::Size(..) => "size", Literal::String(..) => "string", Literal::ColumnPath(..) => "column path", Literal::Bare => "string", Literal::GlobPattern(_) => "pattern", } } } impl PrettyDebugWithSource for SpannedLiteral { fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder { match refine { PrettyDebugRefineKind::ContextFree => self.pretty_debug(source), PrettyDebugRefineKind::WithContext => match &self.literal { Literal::Number(number) => number.pretty(), Literal::Size(number, unit) => (number.pretty() + unit.pretty()).group(), Literal::String(string) => b::primitive(format!("{:?}", string.slice(source))), Literal::GlobPattern(pattern) => b::primitive(pattern), Literal::ColumnPath(path) => { b::intersperse_with_source(path.iter(), b::space(), source) } Literal::Bare => b::delimit("b\"", b::primitive(self.span.slice(source)), "\""), }, } } fn pretty_debug(&self, source: &str) -> DebugDocBuilder { match &self.literal { Literal::Number(number) => number.pretty(), Literal::Size(number, unit) => { b::typed("size", (number.pretty() + unit.pretty()).group()) } Literal::String(string) => b::typed( "string", b::primitive(format!("{:?}", string.slice(source))), ), Literal::GlobPattern(pattern) => b::typed("pattern", b::primitive(pattern)), Literal::ColumnPath(path) => b::typed( "column path", b::intersperse_with_source(path.iter(), b::space(), source), ), Literal::Bare => b::typed("bare", b::primitive(self.span.slice(source))), } } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Variable { It(Span), Other(Span), }
pub(crate) mod baseline_parse; pub(crate) mod binary; pub(crate) mod expand_external_tokens; pub(crate) mod external_command; pub(crate) mod named; pub(crate) mod path; pub(crate) mod range; pub mod syntax_shape; pub(crate) mod tokens_iterator; use crate::hir::syntax_shape::Member; use crate::parse::operator::CompareOperator; use crate::parse::parser::Number; use crate::parse::unit::Unit; use derive_new::new; use getset::Getters; use nu_protocol::{PathMember, ShellTypeName}; use nu_source::{ b, DebugDocBuilder, HasSpan, IntoSpanned, PrettyDebug, PrettyDebugRefineKind, PrettyDebugWithSource, Span, Spanned, }; use serde::{Deserialize, Serialize}; use std::path::PathBuf; use crate::parse::number::RawNumber; pub(crate) use self::binary::Binary; pub(crate) use self::path::Path; pub(crate) use self::range::Range; pub(crate) use self::tokens_iterator::TokensIterator; pub use self::external_command::ExternalCommand; pub use self::named::{NamedArguments, NamedValue}; #[derive(Debug, Clone)] pub struct Signature { unspanned: nu_protocol::Signature, span: Span, } impl Signature { pub fn new(unspanned: nu_protocol::Signature, span: impl Into<Span>) -> Signature { Signature { unspanned, span: span.into(), } } } impl HasSpan for Signature { fn span(&self) -> Span { self.span } } impl PrettyDebugWithSource for Signature { fn pretty_debug(&self, source: &str) -> DebugDocBuilder { self.unspanned.pretty_debug(source) } } #[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)] pub struct Call { #[get = "pub(crate)"] pub head: Box<SpannedExpression>, #[get = "pub(crate)"] pub positional: Option<Vec<SpannedExpression>>, #[get = "pub(crate)"] pub named: Option<NamedArguments>, pub span: Span, } impl Call { pub fn switch_preset(&self, switch: &str) -> bool { self.named .as_ref() .and_then(|n| n.get(switch)) .map(|t| match t { NamedValue::PresentSwitch(_) => true, _ => false, }) .unwrap_or(false) } } impl PrettyDebugWithSource for Call { fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder { match refine { PrettyDebugRefineKind::ContextFree => self.pretty_debug(source), PrettyDebugRefineKind::WithContext => { self.head .refined_pretty_debug(PrettyDebugRefineKind::WithContext, source) + b::preceded_option( Some(b::space()), self.positional.as_ref().map(|pos| { b::intersperse( pos.iter().map(|expr| { expr.refined_pretty_debug( PrettyDebugRefineKind::WithContext, source, ) }), b::spac
fn pretty_debug(&self, source: &str) -> DebugDocBuilder { b::typed( "call", self.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source), ) } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Expression { Literal(Literal), ExternalWord, Synthetic(Synthetic), Variable(Variable), Binary(Box<Binary>), Range(Box<Range>), Block(Vec<SpannedExpression>), List(Vec<SpannedExpression>), Path(Box<Path>), FilePath(PathBuf), ExternalCommand(ExternalCommand), Command(Span), Boolean(bool), } impl ShellTypeName for Expression { fn type_name(&self) -> &'static str { match self { Expression::Literal(literal) => literal.type_name(), Expression::Synthetic(synthetic) => synthetic.type_name(), Expression::Command(..) => "command", Expression::ExternalWord => "external word", Expression::FilePath(..) => "file path", Expression::Variable(..) => "variable", Expression::List(..) => "list", Expression::Binary(..) => "binary", Expression::Range(..) => "range", Expression::Block(..) => "block", Expression::Path(..) => "variable path", Expression::Boolean(..) => "boolean", Expression::ExternalCommand(..) => "external", } } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Synthetic { String(String), } impl ShellTypeName for Synthetic { fn type_name(&self) -> &'static str { match self { Synthetic::String(_) => "string", } } } impl IntoSpanned for Expression { type Output = SpannedExpression; fn into_spanned(self, span: impl Into<Span>) -> Self::Output { SpannedExpression { expr: self, span: span.into(), } } } impl Expression { pub fn into_expr(self, span: impl Into<Span>) -> SpannedExpression { self.into_spanned(span) } pub fn into_unspanned_expr(self) -> SpannedExpression { SpannedExpression { expr: self, span: Span::unknown(), } } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub struct SpannedExpression { pub expr: Expression, pub span: Span, } impl std::ops::Deref for SpannedExpression { type Target = Expression; fn deref(&self) -> &Expression { &self.expr } } impl HasSpan for SpannedExpression { fn span(&self) -> Span { self.span } } impl ShellTypeName for SpannedExpression { fn type_name(&self) -> &'static str { self.expr.type_name() } } impl PrettyDebugWithSource for SpannedExpression { fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder { match refine { PrettyDebugRefineKind::ContextFree => self.refined_pretty_debug(refine, source), PrettyDebugRefineKind::WithContext => match &self.expr { Expression::Literal(literal) => literal .clone() .into_spanned(self.span) .refined_pretty_debug(refine, source), Expression::ExternalWord => { b::delimit("e\"", b::primitive(self.span.slice(source)), "\"").group() } Expression::Synthetic(s) => match s { Synthetic::String(_) => { b::delimit("s\"", b::primitive(self.span.slice(source)), "\"").group() } }, Expression::Variable(Variable::Other(_)) => b::keyword(self.span.slice(source)), Expression::Variable(Variable::It(_)) => b::keyword("$it"), Expression::Binary(binary) => binary.pretty_debug(source), Expression::Range(range) => range.pretty_debug(source), Expression::Block(_) => b::opaque("block"), Expression::List(list) => b::delimit( "[", b::intersperse( list.iter() .map(|item| item.refined_pretty_debug(refine, source)), b::space(), ), "]", ), Expression::Path(path) => path.pretty_debug(source), Expression::FilePath(path) => b::typed("path", b::primitive(path.display())), Expression::ExternalCommand(external) => { b::keyword("^") + b::keyword(external.name.slice(source)) } Expression::Command(command) => b::keyword(command.slice(source)), Expression::Boolean(boolean) => match boolean { true => b::primitive("$yes"), false => b::primitive("$no"), }, }, } } fn pretty_debug(&self, source: &str) -> DebugDocBuilder { match &self.expr { Expression::Literal(literal) => { literal.clone().into_spanned(self.span).pretty_debug(source) } Expression::ExternalWord => { b::typed("external word", b::primitive(self.span.slice(source))) } Expression::Synthetic(s) => match s { Synthetic::String(s) => b::typed("synthetic", b::primitive(format!("{:?}", s))), }, Expression::Variable(Variable::Other(_)) => b::keyword(self.span.slice(source)), Expression::Variable(Variable::It(_)) => b::keyword("$it"), Expression::Binary(binary) => binary.pretty_debug(source), Expression::Range(range) => range.pretty_debug(source), Expression::Block(_) => b::opaque("block"), Expression::List(list) => b::delimit( "[", b::intersperse( list.iter().map(|item| item.pretty_debug(source)), b::space(), ), "]", ), Expression::Path(path) => path.pretty_debug(source), Expression::FilePath(path) => b::typed("path", b::primitive(path.display())), Expression::ExternalCommand(external) => b::typed( "command", b::keyword("^") + b::primitive(external.name.slice(source)), ), Expression::Command(command) => { b::typed("command", b::primitive(command.slice(source))) } Expression::Boolean(boolean) => match boolean { true => b::primitive("$yes"), false => b::primitive("$no"), }, } } } impl Expression { pub fn number(i: impl Into<Number>) -> Expression { Expression::Literal(Literal::Number(i.into())) } pub fn size(i: impl Into<Number>, unit: impl Into<Unit>) -> Expression { Expression::Literal(Literal::Size(i.into(), unit.into())) } pub fn string(inner: impl Into<Span>) -> Expression { Expression::Literal(Literal::String(inner.into())) } pub fn synthetic_string(string: impl Into<String>) -> Expression { Expression::Synthetic(Synthetic::String(string.into())) } pub fn column_path(members: Vec<Member>) -> Expression { Expression::Literal(Literal::ColumnPath(members)) } pub fn path(head: SpannedExpression, tail: Vec<impl Into<PathMember>>) -> Expression { let tail = tail.into_iter().map(|t| t.into()).collect(); Expression::Path(Box::new(Path::new(head, tail))) } pub fn dot_member(head: SpannedExpression, next: impl Into<PathMember>) -> Expression { let SpannedExpression { expr: item, span } = head; let next = next.into(); match item { Expression::Path(path) => { let (head, mut tail) = path.parts(); tail.push(next); Expression::path(head, tail) } other => Expression::path(other.into_expr(span), vec![next]), } } pub fn infix( left: SpannedExpression, op: Spanned<impl Into<CompareOperator>>, right: SpannedExpression, ) -> Expression { Expression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right))) } pub fn range(left: SpannedExpression, op: Span, right: SpannedExpression) -> Expression { Expression::Range(Box::new(Range::new(left, op, right))) } pub fn file_path(path: impl Into<PathBuf>) -> Expression { Expression::FilePath(path.into()) } pub fn list(list: Vec<SpannedExpression>) -> Expression { Expression::List(list) } pub fn bare() -> Expression { Expression::Literal(Literal::Bare) } pub fn pattern(inner: impl Into<String>) -> Expression { Expression::Literal(Literal::GlobPattern(inner.into())) } pub fn variable(inner: impl Into<Span>) -> Expression { Expression::Variable(Variable::Other(inner.into())) } pub fn external_command(inner: impl Into<Span>) -> Expression { Expression::ExternalCommand(ExternalCommand::new(inner.into())) } pub fn it_variable(inner: impl Into<Span>) -> Expression { Expression::Variable(Variable::It(inner.into())) } } impl From<Spanned<Path>> for SpannedExpression { fn from(path: Spanned<Path>) -> SpannedExpression { Expression::Path(Box::new(path.item)).into_expr(path.span) } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Literal { Number(Number), Size(Number, Unit), String(Span), GlobPattern(String), ColumnPath(Vec<Member>), Bare, } impl Literal { pub fn into_spanned(self, span: impl Into<Span>) -> SpannedLiteral { SpannedLiteral { literal: self, span: span.into(), } } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub struct SpannedLiteral { pub literal: Literal, pub span: Span, } impl ShellTypeName for Literal { fn type_name(&self) -> &'static str { match &self { Literal::Number(..) => "number", Literal::Size(..) => "size", Literal::String(..) => "string", Literal::ColumnPath(..) => "column path", Literal::Bare => "string", Literal::GlobPattern(_) => "pattern", } } } impl PrettyDebugWithSource for SpannedLiteral { fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder { match refine { PrettyDebugRefineKind::ContextFree => self.pretty_debug(source), PrettyDebugRefineKind::WithContext => match &self.literal { Literal::Number(number) => number.pretty(), Literal::Size(number, unit) => (number.pretty() + unit.pretty()).group(), Literal::String(string) => b::primitive(format!("{:?}", string.slice(source))), Literal::GlobPattern(pattern) => b::primitive(pattern), Literal::ColumnPath(path) => { b::intersperse_with_source(path.iter(), b::space(), source) } Literal::Bare => b::delimit("b\"", b::primitive(self.span.slice(source)), "\""), }, } } fn pretty_debug(&self, source: &str) -> DebugDocBuilder { match &self.literal { Literal::Number(number) => number.pretty(), Literal::Size(number, unit) => { b::typed("size", (number.pretty() + unit.pretty()).group()) } Literal::String(string) => b::typed( "string", b::primitive(format!("{:?}", string.slice(source))), ), Literal::GlobPattern(pattern) => b::typed("pattern", b::primitive(pattern)), Literal::ColumnPath(path) => b::typed( "column path", b::intersperse_with_source(path.iter(), b::space(), source), ), Literal::Bare => b::typed("bare", b::primitive(self.span.slice(source))), } } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Variable { It(Span), Other(Span), }
e(), ) }), ) + b::preceded_option( Some(b::space()), self.named.as_ref().map(|named| { named.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source) }), ) } } }
function_block-function_prefixed
[ { "content": "pub fn files_exist_at(files: Vec<impl AsRef<Path>>, path: impl AsRef<Path>) -> bool {\n\n files.iter().all(|f| {\n\n let mut loc = PathBuf::from(path.as_ref());\n\n loc.push(f);\n\n loc.exists()\n\n })\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 0, "score": 387970.17854501505 }, { "content": "pub fn in_directory(str: impl AsRef<Path>) -> String {\n\n str.as_ref().display().to_string()\n\n}\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 1, "score": 374996.13010780385 }, { "content": "pub fn matches(cond: fn(char) -> bool) -> impl Fn(NomSpan) -> IResult<NomSpan, NomSpan> + Copy {\n\n move |input: NomSpan| match input.iter_elements().next() {\n\n Option::Some(c) if cond(c) => {\n\n let len_utf8 = c.len_utf8();\n\n Ok((input.slice(len_utf8..), input.slice(0..len_utf8)))\n\n }\n\n _ => Err(nom::Err::Error(nom::error::ParseError::from_error_kind(\n\n input,\n\n nom::error::ErrorKind::Many0,\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 2, "score": 373197.7091704062 }, { "content": "fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into<Tag>) -> Value {\n\n let tag = tag.into();\n\n\n\n let mut spec = TaggedDictBuilder::new(tag);\n\n\n\n spec.insert_untagged(\"name\", UntaggedValue::string(name));\n\n spec.insert_untagged(\"type\", UntaggedValue::string(ty));\n\n spec.insert_untagged(\n\n \"required\",\n\n UntaggedValue::string(if required { \"yes\" } else { \"no\" }),\n\n );\n\n\n\n spec.into_value()\n\n}\n\n\n", "file_path": "src/data/command.rs", "rank": 3, "score": 370645.0066069026 }, { "content": "pub fn get_data_by_key(value: &Value, name: Spanned<&str>) -> Option<Value> {\n\n match &value.value {\n\n UntaggedValue::Row(o) => o.get_data_by_key(name),\n\n UntaggedValue::Table(l) => {\n\n let mut out = vec![];\n\n for item in l {\n\n match item {\n\n Value {\n\n value: UntaggedValue::Row(o),\n\n ..\n\n } => match o.get_data_by_key(name) {\n\n Some(v) => out.push(v),\n\n None => out.push(UntaggedValue::nothing().into_untagged_value()),\n\n },\n\n _ => out.push(UntaggedValue::nothing().into_untagged_value()),\n\n }\n\n }\n\n\n\n if !out.is_empty() {\n\n Some(UntaggedValue::Table(out).into_value(name.span))\n", "file_path": "crates/nu-value-ext/src/lib.rs", "rank": 4, "score": 354828.5459302347 }, { "content": "pub fn nom_input(s: &str) -> NomSpan<'_> {\n\n LocatedSpanEx::new_extra(s, TracableContext::new(TracableInfo::new()))\n\n}\n", "file_path": "crates/nu-source/src/tracable.rs", "rank": 5, "score": 354004.7692700481 }, { "content": "pub fn span_for_spanned_list(mut iter: impl Iterator<Item = Span>) -> Span {\n\n let first = iter.next();\n\n\n\n let first = match first {\n\n None => return Span::unknown(),\n\n Some(first) => first,\n\n };\n\n\n\n let last = iter.last();\n\n\n\n match last {\n\n None => first,\n\n Some(last) => first.until(last),\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)]\n\npub struct Span {\n\n start: usize,\n\n end: usize,\n", "file_path": "crates/nu-source/src/meta.rs", "rank": 6, "score": 353546.56314054265 }, { "content": "pub fn file_contents_binary(full_path: impl AsRef<Path>) -> Vec<u8> {\n\n let mut file = std::fs::File::open(full_path.as_ref()).expect(\"can not open file\");\n\n let mut contents = Vec::new();\n\n file.read_to_end(&mut contents).expect(\"can not read file\");\n\n contents\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 7, "score": 333019.5866394579 }, { "content": "fn did_find_command(name: &str) -> bool {\n\n #[cfg(not(windows))]\n\n {\n\n which::which(name).is_ok()\n\n }\n\n\n\n #[cfg(windows)]\n\n {\n\n if which::which(name).is_ok() {\n\n true\n\n } else {\n\n let cmd_builtins = [\n\n \"call\", \"cls\", \"color\", \"date\", \"dir\", \"echo\", \"find\", \"hostname\", \"pause\",\n\n \"start\", \"time\", \"title\", \"ver\", \"copy\", \"mkdir\", \"rename\", \"rd\", \"rmdir\", \"type\",\n\n ];\n\n\n\n cmd_builtins.contains(&name)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/commands/classified/external.rs", "rank": 8, "score": 331251.99466848304 }, { "content": "pub fn insert_data_at_path(value: &Value, path: &str, new_value: Value) -> Option<Value> {\n\n let mut new_obj = value.clone();\n\n\n\n let split_path: Vec<_> = path.split('.').collect();\n\n\n\n if let UntaggedValue::Row(ref mut o) = new_obj.value {\n\n let mut current = o;\n\n\n\n if split_path.len() == 1 {\n\n // Special case for inserting at the top level\n\n current\n\n .entries\n\n .insert(path.to_string(), new_value.value.into_value(&value.tag));\n\n return Some(new_obj);\n\n }\n\n\n\n for idx in 0..split_path.len() {\n\n match current.entries.get_mut(split_path[idx]) {\n\n Some(next) => {\n\n if idx == (split_path.len() - 2) {\n", "file_path": "crates/nu-value-ext/src/lib.rs", "rank": 9, "score": 317647.300213492 }, { "content": "fn parse_tokens<T: Eq + HasSpan + PrettyDebugWithSource + Clone + Debug + 'static>(\n\n shape: impl ExpandSyntax<Output = Result<T, ParseError>>,\n\n syntax: &str,\n\n tokens: Vec<CurriedToken>,\n\n expected: impl FnOnce(&[SpannedToken]) -> T,\n\n) {\n\n // let parsed_tokens = parse(syntax);\n\n let tokens = b::token_list(tokens);\n\n let (tokens, source) = b::build(tokens);\n\n let text = Text::from(&source);\n\n\n\n assert_eq!(syntax, source);\n\n\n\n with_empty_context(&text, |context| {\n\n let tokens = tokens.expect_list();\n\n let mut iterator = TokensIterator::new(&tokens.item, context, tokens.span);\n\n\n\n let expr = iterator.expand_syntax(shape);\n\n\n\n let expr = match expr {\n", "file_path": "crates/nu-parser/src/hir/baseline_parse/tests.rs", "rank": 10, "score": 315416.5480144091 }, { "content": "pub fn argument_contains_whitespace(argument: &str) -> bool {\n\n argument.chars().any(|c| c.is_whitespace())\n\n}\n\n\n", "file_path": "src/commands/classified/external.rs", "rank": 11, "score": 315224.6989219996 }, { "content": "pub fn delete_file_at(full_path: impl AsRef<Path>) {\n\n let full_path = full_path.as_ref();\n\n\n\n if full_path.exists() {\n\n std::fs::remove_file(full_path).expect(\"can not delete file\");\n\n }\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 12, "score": 312307.9095765329 }, { "content": "pub fn file_contents(full_path: impl AsRef<Path>) -> String {\n\n let mut file = std::fs::File::open(full_path.as_ref()).expect(\"can not open file\");\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)\n\n .expect(\"can not read file\");\n\n contents\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 13, "score": 304162.4420731047 }, { "content": "pub fn copy_file_to(source: &str, destination: &str) {\n\n std::fs::copy(source, destination).expect(\"can not copy file\");\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 14, "score": 289210.1599645475 }, { "content": "pub fn style_leaf<'a>(value: impl Into<&'a UntaggedValue>) -> &'static str {\n\n match value.into() {\n\n UntaggedValue::Primitive(p) => style_primitive(p),\n\n _ => \"\",\n\n }\n\n}\n\n\n", "file_path": "src/data/value.rs", "rank": 15, "score": 287797.7617535219 }, { "content": "fn is_id(input: &str) -> bool {\n\n let source = nu_source::nom_input(input);\n\n match crate::parse::parser::ident(source) {\n\n Err(_) => false,\n\n Ok((input, _)) => input.fragment.is_empty(),\n\n }\n\n}\n\n\n", "file_path": "crates/nu-parser/src/hir/signature.rs", "rank": 16, "score": 287631.30214011145 }, { "content": "pub fn create_file_at(full_path: impl AsRef<Path>) -> Result<(), std::io::Error> {\n\n let full_path = full_path.as_ref();\n\n\n\n if let Some(parent) = full_path.parent() {\n\n panic!(format!(\"{:?} exists\", parent.display()));\n\n }\n\n\n\n std::fs::write(full_path, b\"fake data\")\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 17, "score": 282413.01742038096 }, { "content": "pub fn get_column_path(path: &ColumnPath, obj: &Value) -> Result<Value, ShellError> {\n\n let fields = path.clone();\n\n\n\n get_data_by_column_path(\n\n obj,\n\n path,\n\n Box::new(move |(obj_source, column_path_tried, error)| {\n\n let path_members_span = span_for_spanned_list(fields.members().iter().map(|p| p.span));\n\n\n\n match &obj_source.value {\n\n UntaggedValue::Table(rows) => match column_path_tried {\n\n PathMember {\n\n unspanned: UnspannedPathMember::String(column),\n\n ..\n\n } => {\n\n let primary_label = format!(\"There isn't a column named '{}'\", &column);\n\n\n\n let suggestions: IndexSet<_> = rows\n\n .iter()\n\n .filter_map(|r| did_you_mean(&r, &column_path_tried))\n", "file_path": "src/commands/get.rs", "rank": 18, "score": 279195.12066799565 }, { "content": "/// Shortcuts for creating an entry to the output table\n\nfn entry(arg: impl Into<String>, path: Value, builtin: bool, tag: Tag) -> Value {\n\n let mut map = IndexMap::new();\n\n map.insert(\n\n \"arg\".to_string(),\n\n UntaggedValue::Primitive(Primitive::String(arg.into())).into_value(tag.clone()),\n\n );\n\n map.insert(\"path\".to_string(), path);\n\n map.insert(\n\n \"builtin\".to_string(),\n\n UntaggedValue::Primitive(Primitive::Boolean(builtin)).into_value(tag.clone()),\n\n );\n\n\n\n UntaggedValue::row(map).into_value(tag)\n\n}\n\n\n\nmacro_rules! entry_builtin {\n\n ($arg:expr, $tag:expr) => {\n\n entry(\n\n $arg.clone(),\n\n UntaggedValue::Primitive(Primitive::String(\"nushell built-in command\".to_string()))\n", "file_path": "src/commands/which_.rs", "rank": 19, "score": 278882.2993259089 }, { "content": "pub fn delete_directory_at(full_path: &str) {\n\n std::fs::remove_dir_all(PathBuf::from(full_path)).expect(\"can not remove directory\");\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 20, "score": 277950.0967151895 }, { "content": "pub fn get(\n\n GetArgs { rest: mut fields }: GetArgs,\n\n RunnableContext { input, .. }: RunnableContext,\n\n) -> Result<OutputStream, ShellError> {\n\n if fields.is_empty() {\n\n let stream = async_stream! {\n\n let values = input.values;\n\n pin_mut!(values);\n\n\n\n let mut shapes = Shapes::new();\n\n let mut index = 0;\n\n\n\n while let Some(row) = values.next().await {\n\n shapes.add(&row, index);\n\n index += 1;\n\n }\n\n\n\n for row in shapes.to_values() {\n\n yield ReturnSuccess::value(row);\n\n }\n", "file_path": "src/commands/get.rs", "rank": 21, "score": 277616.79434835986 }, { "content": "pub fn get_data_by_member(value: &Value, name: &PathMember) -> Result<Value, ShellError> {\n\n match &value.value {\n\n // If the value is a row, the member is a column name\n\n UntaggedValue::Row(o) => match &name.unspanned {\n\n // If the member is a string, get the data\n\n UnspannedPathMember::String(string) => o\n\n .get_data_by_key(string[..].spanned(name.span))\n\n .ok_or_else(|| {\n\n ShellError::missing_property(\n\n \"row\".spanned(value.tag.span),\n\n string.spanned(name.span),\n\n )\n\n }),\n\n\n\n // If the member is a number, it's an error\n\n UnspannedPathMember::Int(_) => Err(ShellError::invalid_integer_index(\n\n \"row\".spanned(value.tag.span),\n\n name.span,\n\n )),\n\n },\n", "file_path": "crates/nu-value-ext/src/lib.rs", "rank": 22, "score": 276068.3921612602 }, { "content": "pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf {\n\n let expanded = shellexpand::tilde_with_context(string, || context.homedir());\n\n\n\n PathBuf::from(expanded.as_ref())\n\n}\n", "file_path": "crates/nu-parser/src/hir/syntax_shape/expression.rs", "rank": 23, "score": 257699.27923771332 }, { "content": "pub fn parse_script(input: &str) -> Result<SpannedToken, ShellError> {\n\n let _ = pretty_env_logger::try_init();\n\n\n\n match module(nom_input(input)) {\n\n Ok((_rest, val)) => Ok(val),\n\n Err(err) => Err(ShellError::parse_error(err)),\n\n }\n\n}\n", "file_path": "crates/nu-parser/src/lib.rs", "rank": 24, "score": 256060.99495843292 }, { "content": "pub fn parse_pipeline(input: &str) -> Result<SpannedToken, ShellError> {\n\n let _ = pretty_env_logger::try_init();\n\n\n\n match pipeline(nom_input(input)) {\n\n Ok((_rest, val)) => Ok(val),\n\n Err(err) => Err(ShellError::parse_error(err)),\n\n }\n\n}\n\n\n\npub use parse_pipeline as parse;\n\n\n", "file_path": "crates/nu-parser/src/lib.rs", "rank": 25, "score": 256060.99495843292 }, { "content": "pub fn app_path(app_data_type: AppDataType, display: &str) -> Result<PathBuf, ShellError> {\n\n let path = app_root(app_data_type, &APP_INFO).map_err(|err| {\n\n ShellError::untagged_runtime_error(&format!(\"Couldn't open {} path:\\n{}\", display, err))\n\n })?;\n\n\n\n Ok(path)\n\n}\n\n\n", "file_path": "src/data/config.rs", "rank": 26, "score": 251077.02703446662 }, { "content": "pub fn binaries() -> PathBuf {\n\n PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"))\n\n .parent()\n\n .expect(\"Couldn't find the debug binaries directory\")\n\n .parent()\n\n .expect(\"Couldn't find the debug binaries directory\")\n\n .join(\"target/debug\")\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 27, "score": 249024.12610387278 }, { "content": "pub fn get_data_by_column_path(\n\n value: &Value,\n\n path: &ColumnPath,\n\n callback: Box<dyn FnOnce((&Value, &PathMember, ShellError)) -> ShellError>,\n\n) -> Result<Value, ShellError> {\n\n let mut current = value.clone();\n\n\n\n for p in path.iter() {\n\n let value = get_data_by_member(&current, p);\n\n\n\n match value {\n\n Ok(v) => current = v.clone(),\n\n Err(e) => return Err(callback((&current, &p.clone(), e))),\n\n }\n\n }\n\n\n\n Ok(current)\n\n}\n\n\n", "file_path": "crates/nu-value-ext/src/lib.rs", "rank": 28, "score": 248465.42687739193 }, { "content": "pub fn tag_for_tagged_list(mut iter: impl Iterator<Item = Tag>) -> Tag {\n\n let first = iter.next();\n\n\n\n let first = match first {\n\n None => return Tag::unknown(),\n\n Some(first) => first,\n\n };\n\n\n\n let last = iter.last();\n\n\n\n match last {\n\n None => first,\n\n Some(last) => first.until(last),\n\n }\n\n}\n\n\n", "file_path": "crates/nu-source/src/meta.rs", "rank": 29, "score": 246417.14813731654 }, { "content": "fn argument_is_quoted(argument: &str) -> bool {\n\n if argument.len() < 2 {\n\n return false;\n\n }\n\n\n\n (argument.starts_with('\"') && argument.ends_with('\"')\n\n || (argument.starts_with('\\'') && argument.ends_with('\\'')))\n\n}\n\n\n", "file_path": "src/commands/classified/external.rs", "rank": 30, "score": 245358.90000552498 }, { "content": "#[tracable_parser]\n\npub fn raw_call(input: NomSpan) -> IResult<NomSpan, Spanned<CallNode>> {\n\n let left = input.offset;\n\n let (input, items) = token_list(input)?;\n\n let right = input.offset;\n\n\n\n Ok((\n\n input,\n\n TokenTreeBuilder::spanned_call(items.item, Span::new(left, right)),\n\n ))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 31, "score": 243464.7720800448 }, { "content": "pub fn trace_remaining(desc: &'static str, tail: &hir::TokensIterator<'_>) {\n\n let offset = tail.clone().span_at_cursor();\n\n let source = tail.source();\n\n\n\n trace!(\n\n target: \"nu::parse::trace_remaining\",\n\n \"{} = {}\",\n\n desc,\n\n itertools::join(\n\n tail.debug_remaining()\n\n .iter()\n\n .map(|val| {\n\n if val.span().start() == offset.start() {\n\n format!(\"<|> %{}%\", val.debug(&source))\n\n } else {\n\n format!(\"%{}%\", val.debug(&source))\n\n }\n\n }),\n\n \" \"\n\n )\n\n );\n\n}\n", "file_path": "crates/nu-parser/src/parse_command.rs", "rank": 32, "score": 239096.41814272263 }, { "content": "pub fn shell_os_paths() -> Vec<std::path::PathBuf> {\n\n let mut original_paths = vec![];\n\n\n\n if let Some(paths) = std::env::var_os(\"PATH\") {\n\n original_paths = std::env::split_paths(&paths).collect::<Vec<_>>();\n\n }\n\n\n\n original_paths\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::pipeline;\n\n\n\n #[test]\n\n fn constructs_a_pipeline() {\n\n let actual = pipeline(\n\n r#\"\n\n open los_tres_amigos.txt\n\n | from-csv\n", "file_path": "crates/nu-test-support/src/lib.rs", "rank": 33, "score": 238473.07178985522 }, { "content": "fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (RawNumber, Spanned<Unit>)> {\n\n let (input, digits) = digit1(input)?;\n\n\n\n let (input, dot) = opt(tag(\".\"))(input)?;\n\n\n\n let (input, number) = match dot {\n\n Some(dot) => {\n\n let (input, rest) = digit1(input)?;\n\n (\n\n input,\n\n RawNumber::decimal(Span::new(\n\n bare_span.start(),\n\n bare_span.start() + digits.len() + dot.len() + rest.len(),\n\n )),\n\n )\n\n }\n\n\n\n None => (\n\n input,\n\n RawNumber::int(Span::new(\n", "file_path": "crates/nu-parser/src/hir/syntax_shape/expression/unit.rs", "rank": 34, "score": 238020.24666290692 }, { "content": "fn get_cargo_workspace(manifest_dir: &str) -> Result<Option<&Path>, Box<dyn std::error::Error>> {\n\n let mut workspaces = WORKSPACES.lock()?;\n\n if let Some(rv) = workspaces.get(manifest_dir) {\n\n Ok(Some(rv))\n\n } else {\n\n #[derive(Deserialize)]\n\n struct Manifest {\n\n workspace_root: String,\n\n }\n\n let output = std::process::Command::new(env!(\"CARGO\"))\n\n .arg(\"metadata\")\n\n .arg(\"--format-version=1\")\n\n .current_dir(manifest_dir)\n\n .output()?;\n\n let manifest: Manifest = serde_json::from_slice(&output.stdout)?;\n\n let path = Box::leak(Box::new(PathBuf::from(manifest.workspace_root)));\n\n workspaces.insert(manifest_dir.to_string(), path.as_path());\n\n Ok(workspaces.get(manifest_dir).cloned())\n\n }\n\n}\n\n\n", "file_path": "crates/nu-build/src/lib.rs", "rank": 35, "score": 233857.9854823161 }, { "content": "pub fn pipeline(commands: &str) -> String {\n\n commands\n\n .lines()\n\n .skip(1)\n\n .map(|line| line.trim())\n\n .collect::<Vec<&str>>()\n\n .join(\" \")\n\n .trim_end()\n\n .to_string()\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/lib.rs", "rank": 36, "score": 233059.52647028668 }, { "content": "pub fn get_data<'value>(value: &'value Value, desc: &str) -> MaybeOwned<'value, Value> {\n\n match &value.value {\n\n UntaggedValue::Primitive(_) => MaybeOwned::Borrowed(value),\n\n UntaggedValue::Row(o) => o.get_data(desc),\n\n UntaggedValue::Block(_) | UntaggedValue::Table(_) | UntaggedValue::Error(_) => {\n\n MaybeOwned::Owned(UntaggedValue::nothing().into_untagged_value())\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn get_data_by_index(value: &Value, idx: Spanned<usize>) -> Option<Value> {\n\n match &value.value {\n\n UntaggedValue::Table(value_set) => {\n\n let value = value_set.get(idx.item)?;\n\n Some(\n\n value\n\n .value\n\n .clone()\n\n .into_value(Tag::new(value.anchor(), idx.span)),\n\n )\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "crates/nu-value-ext/src/lib.rs", "rank": 37, "score": 232145.0282527348 }, { "content": "pub fn style_primitive(primitive: &Primitive) -> &'static str {\n\n match primitive {\n\n Primitive::Bytes(0) => \"c\", // centre 'missing' indicator\n\n Primitive::Int(_) | Primitive::Bytes(_) | Primitive::Decimal(_) => \"r\",\n\n _ => \"\",\n\n }\n\n}\n", "file_path": "src/data/primitive.rs", "rank": 38, "score": 231849.12909309525 }, { "content": "pub fn number(number: impl Into<Number>) -> Primitive {\n\n let number = number.into();\n\n\n\n match number {\n\n Number::Int(int) => Primitive::Int(int),\n\n Number::Decimal(decimal) => Primitive::Decimal(decimal),\n\n }\n\n}\n\n\n", "file_path": "src/data/primitive.rs", "rank": 39, "score": 231165.5094186429 }, { "content": "#[tracable_parser]\n\npub fn range_continuation(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {\n\n let original = input;\n\n\n\n let mut result = vec![];\n\n\n\n let (input, dotdot_result) = dotdot(input)?;\n\n result.push(dotdot_result);\n\n let (input, node_result) = tight_node(input)?;\n\n result.extend(node_result);\n\n\n\n Ok((input, result))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 40, "score": 230194.5111242449 }, { "content": "pub fn date_from_str(s: Tagged<&str>) -> Result<UntaggedValue, ShellError> {\n\n let date = DateTime::parse_from_rfc3339(s.item).map_err(|err| {\n\n ShellError::labeled_error(\n\n &format!(\"Date parse error: {}\", err),\n\n \"original value\",\n\n s.tag,\n\n )\n\n })?;\n\n\n\n let date = date.with_timezone(&chrono::offset::Utc);\n\n\n\n Ok(UntaggedValue::Primitive(Primitive::Date(date)))\n\n}\n\n\n", "file_path": "src/data/value.rs", "rank": 41, "score": 229398.96622256294 }, { "content": "pub trait HasSpan {\n\n fn span(&self) -> Span;\n\n}\n\n\n\nimpl<T, E> HasSpan for Result<T, E>\n\nwhere\n\n T: HasSpan,\n\n{\n\n fn span(&self) -> Span {\n\n match self {\n\n Result::Ok(val) => val.span(),\n\n Result::Err(_) => Span::unknown(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> HasSpan for Spanned<T> {\n\n fn span(&self) -> Span {\n\n self.span\n\n }\n\n}\n\n\n", "file_path": "crates/nu-source/src/meta.rs", "rank": 42, "score": 228874.2957639374 }, { "content": "fn count(contents: &str, tag: impl Into<Tag>) -> Value {\n\n let mut lines: i64 = 0;\n\n let mut words: i64 = 0;\n\n let mut chars: i64 = 0;\n\n let bytes = contents.len() as i64;\n\n let mut end_of_word = true;\n\n\n\n for c in contents.chars() {\n\n chars += 1;\n\n\n\n match c {\n\n '\\n' => {\n\n lines += 1;\n\n end_of_word = true;\n\n }\n\n ' ' => end_of_word = true,\n\n _ => {\n\n if end_of_word {\n\n words += 1;\n\n }\n", "file_path": "src/commands/size.rs", "rank": 43, "score": 226560.81165061283 }, { "content": "#[derive(Debug, Clone, new)]\n\nstruct TupleSyntax<A, B> {\n\n first: A,\n\n second: B,\n\n}\n\n\n\nimpl<A, B> PrettyDebugWithSource for TupleSyntax<A, B>\n\nwhere\n\n A: PrettyDebugWithSource,\n\n B: PrettyDebugWithSource,\n\n{\n\n fn pretty_debug(&self, source: &str) -> DebugDocBuilder {\n\n b::typed(\n\n \"pair\",\n\n self.first.pretty_debug(source) + b::space() + self.second.pretty_debug(source),\n\n )\n\n }\n\n}\n\n\n\nimpl<A, B> HasFallibleSpan for TupleSyntax<A, B>\n\nwhere\n", "file_path": "crates/nu-parser/src/hir/signature.rs", "rank": 44, "score": 221101.64495740912 }, { "content": "#[derive(Debug, Copy, Clone, new)]\n\nstruct TupleShape<A, B> {\n\n first: A,\n\n second: B,\n\n}\n\n\n", "file_path": "crates/nu-parser/src/hir/signature.rs", "rank": 45, "score": 221101.55393575638 }, { "content": "fn primitive_doc(name: impl std::fmt::Debug, ty: impl Into<String>) -> DebugDocBuilder {\n\n b::primitive(format!(\"{:?}\", name)) + b::delimit(\"(\", b::kind(ty.into()), \")\")\n\n}\n\n\n", "file_path": "crates/nu-protocol/src/value/debug.rs", "rank": 46, "score": 220565.7556707923 }, { "content": "pub fn int(i: impl Into<BigInt>) -> UntaggedValue {\n\n UntaggedValue::Primitive(Primitive::Int(i.into()))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 47, "score": 220199.63406194182 }, { "content": "pub fn decimal(i: impl Into<BigDecimal>) -> UntaggedValue {\n\n UntaggedValue::Primitive(Primitive::Decimal(i.into()))\n\n}\n\n\n\nimpl Into<UntaggedValue> for &Number {\n\n fn into(self) -> UntaggedValue {\n\n match self {\n\n Number::Int(i) => int(i.clone()),\n\n Number::Decimal(d) => decimal(d.clone()),\n\n }\n\n }\n\n}\n\n\n\nimpl PrettyDebug for Number {\n\n fn pretty(&self) -> DebugDocBuilder {\n\n match self {\n\n Number::Int(int) => b::primitive(int),\n\n Number::Decimal(decimal) => b::primitive(decimal),\n\n }\n\n }\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 48, "score": 220199.63406194182 }, { "content": "fn prim(name: impl std::fmt::Debug) -> DebugDocBuilder {\n\n b::primitive(format!(\"{:?}\", name))\n\n}\n\n\n", "file_path": "crates/nu-protocol/src/value/debug.rs", "rank": 49, "score": 217508.87410395656 }, { "content": "fn ty(name: impl std::fmt::Debug) -> DebugDocBuilder {\n\n b::kind(format!(\"{:?}\", name))\n\n}\n", "file_path": "crates/nu-protocol/src/value/debug.rs", "rank": 50, "score": 217508.87410395656 }, { "content": "/// Helper to create a pretty-print for the type\n\nfn ty(name: impl std::fmt::Display) -> DebugDocBuilder {\n\n b::kind(format!(\"{}\", name))\n\n}\n\n\n", "file_path": "crates/nu-protocol/src/type_shape.rs", "rank": 51, "score": 217508.87410395656 }, { "content": "/// Prepares a list of \"sounds like\" matches for the string you're trying to find\n\npub fn did_you_mean(obj_source: &Value, field_tried: &PathMember) -> Option<Vec<(usize, String)>> {\n\n let field_tried = match &field_tried.unspanned {\n\n UnspannedPathMember::String(string) => string.clone(),\n\n UnspannedPathMember::Int(int) => format!(\"{}\", int),\n\n };\n\n\n\n let possibilities = obj_source.data_descriptors();\n\n\n\n let mut possible_matches: Vec<_> = possibilities\n\n .into_iter()\n\n .map(|x| {\n\n let word = x;\n\n let distance = natural::distance::levenshtein_distance(&word, &field_tried);\n\n\n\n (distance, word)\n\n })\n\n .collect();\n\n\n\n if !possible_matches.is_empty() {\n\n possible_matches.sort();\n\n Some(possible_matches)\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "crates/nu-protocol/src/value/column_path.rs", "rank": 52, "score": 214148.44249314093 }, { "content": "pub fn format_leaf<'a>(value: impl Into<&'a UntaggedValue>) -> DebugDocBuilder {\n\n InlineShape::from_value(value.into()).format().pretty()\n\n}\n\n\n", "file_path": "src/data/value.rs", "rank": 53, "score": 213578.50895292693 }, { "content": "pub fn executable_path() -> PathBuf {\n\n let mut path = binaries();\n\n path.push(\"nu\");\n\n path\n\n}\n\n\n", "file_path": "crates/nu-test-support/src/fs.rs", "rank": 54, "score": 213397.68227070334 }, { "content": "pub fn per_item_command(command: impl PerItemCommand + 'static) -> Arc<Command> {\n\n Arc::new(Command::PerItem(Arc::new(command)))\n\n}\n", "file_path": "src/commands/command.rs", "rank": 55, "score": 211919.39769398537 }, { "content": "pub fn whole_stream_command(command: impl WholeStreamCommand + 'static) -> Arc<Command> {\n\n Arc::new(Command::WholeStream(Arc::new(command)))\n\n}\n\n\n", "file_path": "src/commands/command.rs", "rank": 56, "score": 211919.39769398537 }, { "content": "fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Value {\n\n let tag = tag.into();\n\n let mut sig = TaggedListBuilder::new(&tag);\n\n\n\n for arg in signature.positional.iter() {\n\n let is_required = match arg.0 {\n\n PositionalType::Mandatory(_, _) => true,\n\n PositionalType::Optional(_, _) => false,\n\n };\n\n\n\n sig.push_value(for_spec(arg.0.name(), \"argument\", is_required, &tag));\n\n }\n\n\n\n if signature.rest_positional.is_some() {\n\n let is_required = false;\n\n sig.push_value(for_spec(\"rest\", \"argument\", is_required, &tag));\n\n }\n\n\n\n for (name, ty) in signature.named.iter() {\n\n match ty.0 {\n\n NamedType::Mandatory(_) => sig.push_value(for_spec(name, \"flag\", true, &tag)),\n\n NamedType::Optional(_) => sig.push_value(for_spec(name, \"flag\", false, &tag)),\n\n NamedType::Switch => sig.push_value(for_spec(name, \"switch\", false, &tag)),\n\n }\n\n }\n\n\n\n sig.into_value()\n\n}\n", "file_path": "src/data/command.rs", "rank": 57, "score": 208544.82113247743 }, { "content": "fn convert_sqlite_value_to_nu_value(value: ValueRef, tag: impl Into<Tag> + Clone) -> Value {\n\n match value {\n\n ValueRef::Null => {\n\n UntaggedValue::Primitive(Primitive::String(String::from(\"\"))).into_value(tag)\n\n }\n\n ValueRef::Integer(i) => UntaggedValue::int(i).into_value(tag),\n\n ValueRef::Real(f) => UntaggedValue::decimal(f).into_value(tag),\n\n ValueRef::Text(s) => {\n\n // this unwrap is safe because we know the ValueRef is Text.\n\n UntaggedValue::Primitive(Primitive::String(String::from_utf8_lossy(s).to_string()))\n\n .into_value(tag)\n\n }\n\n ValueRef::Blob(u) => UntaggedValue::binary(u.to_owned()).into_value(tag),\n\n }\n\n}\n\n\n", "file_path": "src/commands/from_sqlite.rs", "rank": 58, "score": 208521.309562498 }, { "content": "pub fn format_type<'a>(value: impl Into<&'a UntaggedValue>, width: usize) -> String {\n\n Type::from_value(value.into()).colored_string(width)\n\n}\n\n\n", "file_path": "src/data/value.rs", "rank": 59, "score": 207825.95157962752 }, { "content": "pub fn config_path() -> Result<PathBuf, ShellError> {\n\n app_path(AppDataType::UserConfig, \"config\")\n\n}\n\n\n", "file_path": "src/data/config.rs", "rank": 60, "score": 207684.45975878712 }, { "content": "pub fn default_path() -> Result<PathBuf, ShellError> {\n\n default_path_for(&None)\n\n}\n\n\n", "file_path": "src/data/config.rs", "rank": 61, "score": 207684.45975878712 }, { "content": "pub fn pipeline_shapes(line: &str, expand_context: ExpandContext) -> Vec<ShapeResult> {\n\n let tokens = parse_pipeline(line);\n\n\n\n match tokens {\n\n Err(_) => vec![],\n\n Ok(v) => {\n\n let pipeline = match v.as_pipeline() {\n\n Err(_) => return vec![],\n\n Ok(v) => v,\n\n };\n\n\n\n let tokens = vec![Token::Pipeline(pipeline).into_spanned(v.span())];\n\n let mut tokens = TokensIterator::new(&tokens[..], expand_context, v.span());\n\n\n\n let shapes = {\n\n // We just constructed a token list that only contains a pipeline, so it can't fail\n\n let result = tokens.expand_infallible(PipelineShape);\n\n\n\n if let Some(failure) = result.failed {\n\n errln!(\n", "file_path": "crates/nu-parser/src/lib.rs", "rank": 62, "score": 207344.05091534852 }, { "content": "pub fn continue_parsing_positionals(\n\n config: &Signature,\n\n tail: &mut TokensIterator,\n\n rest_signature: &mut Signature,\n\n command_span: Span,\n\n) -> Result<Vec<SpannedExpression>, ParseError> {\n\n let mut positional = vec![];\n\n\n\n for arg in &config.positional {\n\n trace!(target: \"nu::parse::trace_remaining\", \"Processing positional {:?}\", arg);\n\n\n\n tail.move_to(0);\n\n\n\n let result = expand_spaced_expr(arg.0.syntax_type(), tail);\n\n\n\n match result {\n\n Err(_) => match &arg.0 {\n\n PositionalType::Mandatory(..) => {\n\n return Err(ParseError::argument_error(\n\n config.name.clone().spanned(command_span),\n", "file_path": "crates/nu-parser/src/parse_command.rs", "rank": 63, "score": 207229.02023331902 }, { "content": "fn evaluate_literal(literal: &hir::Literal, span: Span, source: &Text) -> Value {\n\n match &literal {\n\n hir::Literal::ColumnPath(path) => {\n\n let members = path\n\n .iter()\n\n .map(|member| member.to_path_member(source))\n\n .collect();\n\n\n\n UntaggedValue::Primitive(Primitive::ColumnPath(ColumnPath::new(members)))\n\n .into_value(span)\n\n }\n\n hir::Literal::Number(int) => match int {\n\n nu_parser::Number::Int(i) => UntaggedValue::int(i.clone()).into_value(span),\n\n nu_parser::Number::Decimal(d) => UntaggedValue::decimal(d.clone()).into_value(span),\n\n },\n\n hir::Literal::Size(int, unit) => unit.compute(&int).into_value(span),\n\n hir::Literal::String(tag) => UntaggedValue::string(tag.slice(source)).into_value(span),\n\n hir::Literal::GlobPattern(pattern) => UntaggedValue::pattern(pattern).into_value(span),\n\n hir::Literal::Bare => UntaggedValue::string(span.slice(source)).into_value(span),\n\n }\n\n}\n\n\n", "file_path": "src/evaluate/evaluator.rs", "rank": 64, "score": 206665.43246293886 }, { "content": "fn is_dir_empty(d: &PathBuf) -> bool {\n\n match d.read_dir() {\n\n Err(_e) => true,\n\n Ok(mut s) => s.next().is_none(),\n\n }\n\n}\n", "file_path": "src/shell/filesystem_shell.rs", "rank": 65, "score": 205947.65911318347 }, { "content": "pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> Value {\n\n let tag = tag.into();\n\n\n\n match v {\n\n toml::Value::Boolean(b) => UntaggedValue::boolean(*b).into_value(tag),\n\n toml::Value::Integer(n) => UntaggedValue::int(*n).into_value(tag),\n\n toml::Value::Float(n) => UntaggedValue::decimal(*n).into_value(tag),\n\n toml::Value::String(s) => {\n\n UntaggedValue::Primitive(Primitive::String(String::from(s))).into_value(tag)\n\n }\n\n toml::Value::Array(a) => UntaggedValue::Table(\n\n a.iter()\n\n .map(|x| convert_toml_value_to_nu_value(x, &tag))\n\n .collect(),\n\n )\n\n .into_value(tag),\n\n toml::Value::Datetime(dt) => {\n\n UntaggedValue::Primitive(Primitive::String(dt.to_string())).into_value(tag)\n\n }\n\n toml::Value::Table(t) => {\n", "file_path": "src/commands/from_toml.rs", "rank": 66, "score": 205755.44799180716 }, { "content": "pub fn replace_data_at_column_path(\n\n value: &Value,\n\n split_path: &ColumnPath,\n\n replaced_value: Value,\n\n) -> Option<Value> {\n\n let mut new_obj: Value = value.clone();\n\n let mut current = &mut new_obj;\n\n let split_path = split_path.members();\n\n\n\n for idx in 0..split_path.len() {\n\n match get_mut_data_by_member(current, &split_path[idx]) {\n\n Some(next) => {\n\n if idx == (split_path.len() - 1) {\n\n *next = replaced_value.value.into_value(&value.tag);\n\n return Some(new_obj);\n\n } else {\n\n current = next;\n\n }\n\n }\n\n None => {\n\n return None;\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "crates/nu-value-ext/src/lib.rs", "rank": 67, "score": 204226.0073625684 }, { "content": "pub fn insert_data_at_column_path(\n\n value: &Value,\n\n split_path: &ColumnPath,\n\n new_value: Value,\n\n) -> Result<Value, ShellError> {\n\n if let Some((last, front)) = split_path.split_last() {\n\n let mut original = value.clone();\n\n\n\n let mut current: &mut Value = &mut original;\n\n\n\n for member in front {\n\n let type_name = current.spanned_type_name();\n\n\n\n current = get_mut_data_by_member(current, &member).ok_or_else(|| {\n\n ShellError::missing_property(\n\n member.plain_string(std::usize::MAX).spanned(member.span),\n\n type_name,\n\n )\n\n })?\n\n }\n", "file_path": "crates/nu-value-ext/src/lib.rs", "rank": 68, "score": 204226.0073625684 }, { "content": "fn is_path_separator(c: char) -> bool {\n\n match c {\n\n '\\\\' | '/' | ':' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 69, "score": 203025.04234455203 }, { "content": "pub fn from_json_string_to_value(s: String, tag: impl Into<Tag>) -> serde_hjson::Result<Value> {\n\n let v: serde_hjson::Value = serde_hjson::from_str(&s)?;\n\n Ok(convert_json_value_to_nu_value(&v, tag))\n\n}\n\n\n", "file_path": "src/commands/from_json.rs", "rank": 70, "score": 202976.69621918298 }, { "content": "pub fn from_yaml_string_to_value(s: String, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n let tag = tag.into();\n\n let v: serde_yaml::Value = serde_yaml::from_str(&s).map_err(|x| {\n\n ShellError::labeled_error(\n\n format!(\"Could not load yaml: {}\", x),\n\n \"could not load yaml from text\",\n\n &tag,\n\n )\n\n })?;\n\n Ok(convert_yaml_value_to_nu_value(&v, tag)?)\n\n}\n\n\n", "file_path": "src/commands/from_yaml.rs", "rank": 71, "score": 202976.69621918298 }, { "content": "pub fn from_xml_string_to_value(s: String, tag: impl Into<Tag>) -> Result<Value, roxmltree::Error> {\n\n let parsed = roxmltree::Document::parse(&s)?;\n\n Ok(from_document_to_value(&parsed, tag))\n\n}\n\n\n", "file_path": "src/commands/from_xml.rs", "rank": 72, "score": 200374.41709087836 }, { "content": "struct Match {\n\n column: String,\n\n regex: Regex,\n\n}\n\n\n\nimpl Match {\n\n #[allow(clippy::trivial_regex)]\n\n fn new() -> Result<Self, Box<dyn std::error::Error>> {\n\n Ok(Match {\n\n column: String::new(),\n\n regex: Regex::new(\"\")?,\n\n })\n\n }\n\n}\n\n\n\nimpl Plugin for Match {\n\n fn config(&mut self) -> Result<Signature, ShellError> {\n\n Ok(Signature::build(\"match\")\n\n .desc(\"filter rows by regex\")\n\n .required(\"member\", SyntaxShape::Member, \"the column name to match\")\n", "file_path": "crates/nu_plugin_match/src/main.rs", "rank": 73, "score": 198840.70920218347 }, { "content": "// NOTE: could this be useful more widely and implemented on Value ?\n\npub fn clone_tagged_value(v: &Value) -> Value {\n\n match &v.value {\n\n UntaggedValue::Primitive(Primitive::String(s)) => {\n\n UntaggedValue::Primitive(Primitive::String(s.clone()))\n\n }\n\n UntaggedValue::Primitive(Primitive::Nothing) => {\n\n UntaggedValue::Primitive(Primitive::Nothing)\n\n }\n\n UntaggedValue::Primitive(Primitive::Boolean(b)) => {\n\n UntaggedValue::Primitive(Primitive::Boolean(*b))\n\n }\n\n UntaggedValue::Primitive(Primitive::Decimal(f)) => {\n\n UntaggedValue::Primitive(Primitive::Decimal(f.clone()))\n\n }\n\n UntaggedValue::Primitive(Primitive::Int(i)) => {\n\n UntaggedValue::Primitive(Primitive::Int(i.clone()))\n\n }\n\n UntaggedValue::Primitive(Primitive::Path(x)) => {\n\n UntaggedValue::Primitive(Primitive::Path(x.clone()))\n\n }\n", "file_path": "src/commands/to_delimited_data.rs", "rank": 74, "score": 198254.92605509717 }, { "content": "#[tracable_parser]\n\npub fn whitespace(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let left = input.offset;\n\n let (input, ws1) = space1(input)?;\n\n let right = input.offset;\n\n\n\n Ok((input, TokenTreeBuilder::spanned_ws(Span::new(left, right))))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 75, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn var(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let start = input.offset;\n\n let (input, _) = tag(\"$\")(input)?;\n\n let (input, name) = one_of(tag(\"it\"), ident)(input)?;\n\n let end = input.offset;\n\n\n\n match name {\n\n OneOf::First(it) => Ok((input, TokenTreeBuilder::spanned_it_var(it, (start, end)))),\n\n OneOf::Second(name) => Ok((input, TokenTreeBuilder::spanned_var(name, (start, end)))),\n\n }\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 76, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn external(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let start = input.offset;\n\n let (input, _) = tag(\"^\")(input)?;\n\n let (input, bare) = take_while(is_file_char)(input)?;\n\n let end = input.offset;\n\n\n\n Ok((\n\n input,\n\n TokenTreeBuilder::spanned_external_command(bare, Span::new(start, end)),\n\n ))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 77, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n alt((sq_string, dq_string))(input)\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 78, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn any_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n alt((int_member, string, bare_member, garbage_member))(input)\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 79, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn pipeline(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let start = input.offset;\n\n let (input, head) = spaced_token_list(input)?;\n\n let (input, items) = many0(tuple((tag(\"|\"), spaced_token_list)))(input)?;\n\n\n\n if input.input_len() != 0 {\n\n return Err(Err::Error(error_position!(\n\n input,\n\n nom::error::ErrorKind::Eof\n\n )));\n\n }\n\n\n\n let end = input.offset;\n\n\n\n let head_span = head.span;\n\n let mut all_items: Vec<PipelineElement> = vec![PipelineElement::new(None, head)];\n\n\n\n all_items.extend(items.into_iter().map(|(pipe, items)| {\n\n let items_span = items.span;\n\n PipelineElement::new(Some(Span::from(pipe)), items)\n\n }));\n\n\n\n Ok((\n\n input,\n\n TokenTreeBuilder::spanned_pipeline(all_items, Span::new(start, end)),\n\n ))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 80, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn module(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let (input, tokens) = spaced_token_list(input)?;\n\n\n\n if input.input_len() != 0 {\n\n return Err(Err::Error(error_position!(\n\n input,\n\n nom::error::ErrorKind::Eof\n\n )));\n\n }\n\n\n\n Ok((\n\n input,\n\n TokenTreeBuilder::spanned_token_list(tokens.item, tokens.span),\n\n ))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 81, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn leaf(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let (input, node) = alt((number, string, operator, flag, shorthand, var, external))(input)?;\n\n\n\n Ok((input, node))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 82, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn separator(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let left = input.offset;\n\n let (input, ws1) = alt((tag(\";\"), tag(\"\\n\")))(input)?;\n\n let right = input.offset;\n\n\n\n Ok((input, TokenTreeBuilder::spanned_sep(Span::new(left, right))))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 83, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn nodes(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let (input, tokens) = token_list(input)?;\n\n let span = tokens.span;\n\n\n\n Ok((\n\n input,\n\n TokenTreeBuilder::spanned_pipeline(vec![PipelineElement::new(None, tokens)], span),\n\n ))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 84, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn operator(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let (input, operator) = alt((gte, lte, neq, gt, lt, eq, cont, ncont))(input)?;\n\n\n\n Ok((input, operator))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 85, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn pattern(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n word(\n\n start_pattern,\n\n matches(is_glob_char),\n\n TokenTreeBuilder::spanned_pattern,\n\n )(input)\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 86, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn filename(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let start_pos = input.offset;\n\n\n\n let (mut input, mut saw_special) = match start_file_char(input) {\n\n Err(err) => return Err(err),\n\n Ok((input, special)) => (input, special),\n\n };\n\n\n\n loop {\n\n if saw_special.is_empty() {\n\n match continue_file_char(input) {\n\n Err(_) => {\n\n return Ok((\n\n input,\n\n TokenTreeBuilder::spanned_bare((start_pos, input.offset)),\n\n ))\n\n }\n\n Ok((next_input, special)) => {\n\n saw_special |= special;\n\n input = next_input;\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 87, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn flag(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let start = input.offset;\n\n let (input, _) = tag(\"--\")(input)?;\n\n let (input, bare) = filename(input)?;\n\n let end = input.offset;\n\n\n\n Ok((\n\n input,\n\n TokenTreeBuilder::spanned_flag(bare.span(), Span::new(start, end)),\n\n ))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 88, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn shorthand(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let start = input.offset;\n\n let (input, _) = tag(\"-\")(input)?;\n\n let (input, bare) = filename(input)?;\n\n let end = input.offset;\n\n\n\n Ok((\n\n input,\n\n TokenTreeBuilder::spanned_shorthand(bare.span(), Span::new(start, end)),\n\n ))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 89, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn number(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let (input, number) = raw_number(input)?;\n\n\n\n Ok((\n\n input,\n\n TokenTreeBuilder::spanned_number(number, number.span()),\n\n ))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 90, "score": 197662.67689971917 }, { "content": "#[tracable_parser]\n\npub fn comment(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let left = input.offset;\n\n let (input, start) = tag(\"#\")(input)?;\n\n let (input, rest) = not_line_ending(input)?;\n\n let right = input.offset;\n\n\n\n let span = (start.offset + 1, right);\n\n\n\n Ok((\n\n input,\n\n TokenTreeBuilder::spanned_comment(span, Span::new(left, right)),\n\n ))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 91, "score": 197662.67689971917 }, { "content": "pub fn default_path_for(file: &Option<PathBuf>) -> Result<PathBuf, ShellError> {\n\n let filename = &mut config_path()?;\n\n let filename = match file {\n\n None => {\n\n filename.push(\"config.toml\");\n\n filename\n\n }\n\n Some(file) => {\n\n filename.push(file);\n\n filename\n\n }\n\n };\n\n\n\n Ok(filename.clone())\n\n}\n\n\n", "file_path": "src/data/config.rs", "rank": 92, "score": 197474.73056538013 }, { "content": "pub fn from_bson_bytes_to_value(bytes: Vec<u8>, tag: impl Into<Tag>) -> Result<Value, ShellError> {\n\n let mut docs = Vec::new();\n\n let mut b_reader = BytesReader::new(bytes);\n\n while let Ok(v) = decode_document(&mut b_reader) {\n\n docs.push(Bson::Document(v));\n\n }\n\n\n\n convert_bson_value_to_nu_value(&Bson::Array(docs), tag)\n\n}\n\n\n", "file_path": "src/commands/from_bson.rs", "rank": 93, "score": 195884.93204658886 }, { "content": "#[tracable_parser]\n\npub fn int_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let start = input.offset;\n\n let (input, head) = digit1(input)?;\n\n\n\n match input.fragment.chars().next() {\n\n None | Some('.') => Ok((\n\n input,\n\n Token::Number(RawNumber::int((start, input.offset)))\n\n .into_spanned((start, input.offset)),\n\n )),\n\n other if is_boundary(other) => Ok((\n\n input,\n\n Token::Number(RawNumber::int((start, input.offset)))\n\n .into_spanned((start, input.offset)),\n\n )),\n\n _ => Err(nom::Err::Error(nom::error::make_error(\n\n input,\n\n nom::error::ErrorKind::Tag,\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 94, "score": 195649.86139223908 }, { "content": "#[tracable_parser]\n\npub fn dq_string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let start = input.offset;\n\n let (input, _) = char('\"')(input)?;\n\n let start1 = input.offset;\n\n let (input, _) = many0(none_of(\"\\\"\"))(input)?;\n\n let end1 = input.offset;\n\n let (input, _) = char('\"')(input)?;\n\n let end = input.offset;\n\n Ok((\n\n input,\n\n TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)),\n\n ))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 95, "score": 195649.86139223908 }, { "content": "#[tracable_parser]\n\npub fn start_pattern(input: NomSpan) -> IResult<NomSpan, NomSpan> {\n\n alt((take_while1(is_dot), matches(is_start_glob_char)))(input)\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 96, "score": 195649.86139223908 }, { "content": "#[tracable_parser]\n\npub fn bare_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n word(\n\n matches(is_start_member_char),\n\n matches(is_member_char),\n\n TokenTreeBuilder::spanned_bare,\n\n )(input)\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 97, "score": 195649.86139223908 }, { "content": "#[tracable_parser]\n\npub fn sq_string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {\n\n let start = input.offset;\n\n let (input, _) = char('\\'')(input)?;\n\n let start1 = input.offset;\n\n let (input, _) = many0(none_of(\"\\'\"))(input)?;\n\n let end1 = input.offset;\n\n let (input, _) = char('\\'')(input)?;\n\n let end = input.offset;\n\n\n\n Ok((\n\n input,\n\n TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)),\n\n ))\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 98, "score": 195649.86139223908 }, { "content": "pub fn start_filename(input: NomSpan) -> IResult<NomSpan, NomSpan> {\n\n alt((take_while1(is_dot), matches(is_start_file_char)))(input)\n\n}\n\n\n", "file_path": "crates/nu-parser/src/parse/parser.rs", "rank": 99, "score": 195649.86139223908 } ]
Rust
src/token.rs
illumination-k/rs9cc
6c2570e5d250435267a331b0a4d66ca052cb477d
use std::iter::Peekable; use std::collections::HashSet; #[derive(Debug, Clone)] struct OpWords { op_words: HashSet<String>, max_length: usize, } impl OpWords { fn new(op_words: Vec<&str>) -> Self { let max_length = op_words.iter().map(|x| x.len()).max().unwrap(); Self { op_words: op_words.into_iter().map(|x| x.to_string()).collect(), max_length: max_length, } } fn contains(&self, x: &str) -> bool { self.op_words.contains(x) } fn contains_u8(&self, x: &[u8]) -> bool { unsafe { self.contains(String::from_utf8_unchecked(x.to_vec()).as_ref()) } } fn ops(&self, len: usize) -> Vec<String> { self.op_words.iter().filter(|x| x.len() == len).map(|x| x.clone()).collect() } } impl Default for OpWords { fn default() -> Self { let op_words = vec!["+", "-", "/", "*", "==", "=!", ">=", "<=", "<", ">", "(", ")"]; OpWords::new(op_words) } } #[derive(Debug, Clone, PartialEq, Eq)] pub enum TokenKind { TkReserved, TkNum, } #[derive(Debug, Clone)] pub struct Token { pub token_kind: TokenKind, pub val: String, } impl Token { pub fn new(token_kind: TokenKind, val: String) -> Self { Self { token_kind, val } } } pub struct TokenIter { s: String, op_words: OpWords, } impl Iterator for TokenIter { type Item = Token; fn next(&mut self) -> Option<Self::Item> { self.s = self.s.trim().to_string(); if self.s.is_empty() { return None } let mut bytes = std::collections::VecDeque::from(self.s.to_owned().as_bytes().to_vec()); let mut val = vec![]; let token_kind = if bytes[0].is_ascii_digit() { TokenKind::TkNum } else { TokenKind::TkReserved }; match token_kind { TokenKind::TkNum => { while let Some(byte) = bytes.pop_front() { if byte.is_ascii_digit() { val.push(byte) } else { bytes.push_front(byte); break; } } }, TokenKind::TkReserved => { while let Some(byte) = bytes.pop_front() { if !byte.is_ascii_digit() && byte != b' ' { val.push(byte); } else { bytes.push_front(byte); break; } } let mut now_length = self.op_words.max_length; while now_length > 0 { if now_length <= val.len() { if self.op_words.contains_u8(&val[..now_length]) { for &v in val[now_length..].iter().rev() { bytes.push_front(v) } val = val[..now_length].to_vec(); break; } } now_length -= 1; } } } unsafe { self.s = String::from_utf8_unchecked(bytes.into_iter().collect()); Some(Token::new(token_kind, String::from_utf8_unchecked(val))) } } } pub trait TokenExt { fn tokenize(&self) -> TokenIter; } impl TokenExt for String { fn tokenize(&self) -> TokenIter { TokenIter { s: self.to_owned(), op_words: Default::default(), } } } pub fn consume(op: &str, iter: &mut Peekable<TokenIter>) -> bool { match iter.peek() { Some(t) => { if t.token_kind == TokenKind::TkReserved && &t.val == op { iter.next(); true } else { false } }, None => false } } #[cfg(test)] mod test { use super::*; #[test] fn test_op_words() { let mut op_words: OpWords = Default::default(); assert!(op_words.contains("*")); assert!(op_words.contains("(")); assert!(op_words.contains_u8(b"(")); assert!(!op_words.contains_u8(b"!=)")); } #[test] fn test_tokenizer() { let s = "13 + 2 - 3".to_string(); let vals = vec!["13", "+", "2", "-", "3"].iter().map(|x| x.to_string()).collect::<Vec<String>>(); let kinds = vec![TokenKind::TkNum, TokenKind::TkReserved, TokenKind::TkNum, TokenKind::TkReserved, TokenKind::TkNum]; let mut dvals = vec![]; let mut dkinds = vec![]; for t in s.tokenize() { dvals.push(t.val); dkinds.push(t.token_kind) } assert_eq!(vals, dvals); assert_eq!(kinds, dkinds); } }
use std::iter::Peekable; use std::collections::HashSet; #[derive(Debug, Clone)] struct OpWords { op_words: HashSet<String>, max_length: usize, } impl OpWords { fn new(op_words: Vec<&str>) -> Self { let max_length = op_words.iter().map(|x| x.len()).max().unwrap(); Self { op_words: op_words.into_iter().map(|x| x.to_string()).collect(), max_length: max_length, } } fn contains(&self, x: &str) -> bool { self.op_words.contains(x) } fn contains_u8(&self, x: &[u8]) -> bool { unsafe { self.contains(String::from_utf8_unchecked(x.to_vec()).as_ref()) } } fn ops(&self, len: usize) -> Vec<String> { self.op_words.iter().filter(|x| x.len() == len).map(|x| x.clone()).collect() } } impl Default for OpWords { fn default() -> Self { let op_words = vec!["+", "-", "/", "*", "==", "=!", ">=", "<=", "<", ">", "(", ")"]; OpWords::new(op_words) } } #[derive(Debug, Clone, PartialEq, Eq)] pub enum TokenKind { TkReserved, TkNum, } #[derive(Debug, Clone)] pub struct Token { pub token_kind: TokenKind, pub val: String, } impl Token { pub fn new(token_kind: TokenKind, val: String) -> Self { Self { token_kind, val } } } pub struct TokenIter { s: String, op_words: OpWords, } impl Iterator for TokenIter { type Item = Token; fn next(&mut self) -> Option<Self::Item> { self.s = self.s.trim().to_string(); if self.s.is_empty() { return None } let mut bytes = std::collections::VecDeque::from(self.s.to_owned().as_bytes().to_vec()); let mut val = vec![]; let token_kind =
&v in val[now_length..].iter().rev() { bytes.push_front(v) } val = val[..now_length].to_vec(); break; } } now_length -= 1; } } } unsafe { self.s = String::from_utf8_unchecked(bytes.into_iter().collect()); Some(Token::new(token_kind, String::from_utf8_unchecked(val))) } } } pub trait TokenExt { fn tokenize(&self) -> TokenIter; } impl TokenExt for String { fn tokenize(&self) -> TokenIter { TokenIter { s: self.to_owned(), op_words: Default::default(), } } } pub fn consume(op: &str, iter: &mut Peekable<TokenIter>) -> bool { match iter.peek() { Some(t) => { if t.token_kind == TokenKind::TkReserved && &t.val == op { iter.next(); true } else { false } }, None => false } } #[cfg(test)] mod test { use super::*; #[test] fn test_op_words() { let mut op_words: OpWords = Default::default(); assert!(op_words.contains("*")); assert!(op_words.contains("(")); assert!(op_words.contains_u8(b"(")); assert!(!op_words.contains_u8(b"!=)")); } #[test] fn test_tokenizer() { let s = "13 + 2 - 3".to_string(); let vals = vec!["13", "+", "2", "-", "3"].iter().map(|x| x.to_string()).collect::<Vec<String>>(); let kinds = vec![TokenKind::TkNum, TokenKind::TkReserved, TokenKind::TkNum, TokenKind::TkReserved, TokenKind::TkNum]; let mut dvals = vec![]; let mut dkinds = vec![]; for t in s.tokenize() { dvals.push(t.val); dkinds.push(t.token_kind) } assert_eq!(vals, dvals); assert_eq!(kinds, dkinds); } }
if bytes[0].is_ascii_digit() { TokenKind::TkNum } else { TokenKind::TkReserved }; match token_kind { TokenKind::TkNum => { while let Some(byte) = bytes.pop_front() { if byte.is_ascii_digit() { val.push(byte) } else { bytes.push_front(byte); break; } } }, TokenKind::TkReserved => { while let Some(byte) = bytes.pop_front() { if !byte.is_ascii_digit() && byte != b' ' { val.push(byte); } else { bytes.push_front(byte); break; } } let mut now_length = self.op_words.max_length; while now_length > 0 { if now_length <= val.len() { if self.op_words.contains_u8(&val[..now_length]) { for
function_block-random_span
[ { "content": "pub fn primary(tokenizer: &mut Peekable<TokenIter>) -> Box<Node> {\n\n if consume(\"(\", tokenizer) {\n\n let node = expr(tokenizer);\n\n let _expect = consume(\")\", tokenizer);\n\n return node \n\n }\n\n\n\n match tokenizer.peek() {\n\n Some(t) => {\n\n if t.token_kind != TokenKind::TkNum {\n\n return expr(tokenizer)\n\n }\n\n }\n\n None => { unreachable!() }\n\n }\n\n\n\n Box::new(Node::new_num_node(tokenizer.next().expect(\"NdNum is expected\").val))\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 1, "score": 95131.5213937948 }, { "content": "pub fn mul(tokenizer: &mut Peekable<TokenIter>) -> Box<Node> {\n\n let mut node = unary(tokenizer);\n\n loop {\n\n if consume(\"*\", tokenizer) {\n\n node = Box::new(Node::new(\n\n NodeKind::NdMul, Some(node), Some(unary(tokenizer)), None\n\n ))\n\n } else if consume(\"/\", tokenizer) {\n\n node = Box::new(Node::new(\n\n NodeKind::NdDiv, Some(node), Some(unary(tokenizer)), None\n\n ))\n\n } else {\n\n return node\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 2, "score": 95131.5213937948 }, { "content": "pub fn add(tokenizer: &mut Peekable<TokenIter>) -> Box<Node> {\n\n let mut node = mul(tokenizer);\n\n loop {\n\n if consume(\"+\", tokenizer) {\n\n node = Node::new_binary_with_box(NodeKind::NdADD, node, mul(tokenizer))\n\n } else if consume(\"-\", tokenizer) {\n\n node = Node::new_binary_with_box(NodeKind::NdSub, node, mul(tokenizer))\n\n } else {\n\n return node\n\n }\n\n }\n\n\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 3, "score": 95131.5213937948 }, { "content": "pub fn relational(tokenizer: &mut Peekable<TokenIter>) -> Box<Node> {\n\n let mut node = add(tokenizer);\n\n\n\n loop {\n\n if consume(\"<\", tokenizer) {\n\n node = Node::new_binary_with_box(NodeKind::NdLt, node, add(tokenizer))\n\n } else if consume(\"<=\", tokenizer) {\n\n node = Node::new_binary_with_box(NodeKind::NdLe, node, add(tokenizer))\n\n } else if consume(\">\", tokenizer) {\n\n node = Node::new_binary_with_box(NodeKind::NdLt, add(tokenizer), node)\n\n } else if consume(\"<=\", tokenizer) {\n\n node = Node::new_binary_with_box(NodeKind::NdLe, add(tokenizer), node)\n\n } else {\n\n return node\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 4, "score": 95131.5213937948 }, { "content": "pub fn equaility(tokenizer: &mut Peekable<TokenIter>) -> Box<Node> {\n\n let mut node = relational(tokenizer);\n\n\n\n loop {\n\n if consume(\"==\", tokenizer) {\n\n node = Node::new_binary_with_box(NodeKind::NdEq, node, relational(tokenizer))\n\n } else if consume(\"!=\", tokenizer) {\n\n node = Node::new_binary_with_box(NodeKind::NdNe, node, relational(tokenizer))\n\n } else {\n\n return node\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 5, "score": 95131.52139379477 }, { "content": "pub fn unary(tokenizer: &mut Peekable<TokenIter>) -> Box<Node> {\n\n if consume(\"+\", tokenizer) {\n\n return primary(tokenizer)\n\n }\n\n\n\n if consume(\"-\", tokenizer) {\n\n return Node::new_binary_with_box(NodeKind::NdSub, Node::new_num_node_with_box(0), primary(tokenizer))\n\n }\n\n\n\n primary(tokenizer)\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 6, "score": 95131.52139379477 }, { "content": "pub fn expr(tokenizer: &mut Peekable<TokenIter>) -> Box<Node> {\n\n equaility(tokenizer)\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 7, "score": 95131.5213937948 }, { "content": "#[allow(unreachable_patterns)]\n\npub fn get_val(node: &Node) -> String {\n\n match node.kind() {\n\n NodeKind::NdADD => { \"plus\".to_string() },\n\n NodeKind::NdSub => { \"sub\".to_string() },\n\n NodeKind::NdDiv => { \"div\".to_string()},\n\n NodeKind::NdMul => { \"mul\".to_string() },\n\n NodeKind::NdEq => { \"eq\".to_string() },\n\n NodeKind::NdNe => { \"ne\".to_string() },\n\n NodeKind::NdLe => { \"le\".to_string() },\n\n NodeKind::NdLt => { \"lt\".to_string() },\n\n NodeKind::NdNum => { node.val().unwrap() },\n\n _ => { unimplemented!() }\n\n }\n\n} \n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n \n\n #[test]\n", "file_path": "src/node.rs", "rank": 8, "score": 92894.94294537319 }, { "content": "fn make_dot_node(node_cnt: usize, node_val: String) -> String {\n\n format!(\" {} [ label = {} ];\", node_cnt, node_val)\n\n}\n\n\n", "file_path": "src/dot.rs", "rank": 9, "score": 85165.8528214278 }, { "content": "fn make_dot_edge(source_cnt: usize, target_cnt: usize) -> String {\n\n format!(\" {} -> {};\", source_cnt, target_cnt)\n\n}\n\n\n\nimpl Dot {\n\n pub fn new() -> Self{\n\n Self {\n\n counter: 0,\n\n node_vec: vec![],\n\n edge_vec: vec![]\n\n }\n\n }\n\n \n\n fn update_count(&mut self) -> usize {\n\n self.counter += 1;\n\n self.counter\n\n }\n\n\n\n fn _rec_write(&mut self, node: &Box<Node>, pre_node_cnt: usize) {\n\n if node.lhs().is_none() && node.rhs().is_none() { return }\n", "file_path": "src/dot.rs", "rank": 10, "score": 67131.61197117658 }, { "content": "pub fn gen(node: &Box<Node>) {\n\n // dbg!(node);\n\n if node.deref().node_kind == NodeKind::NdNum {\n\n // println!(\"NdNum\");\n\n // dbg!(node);\n\n println!(\" push {}\", node.val.clone().expect(\"Not val in NdNum node\"));\n\n return;\n\n }\n\n\n\n gen(&node.deref().lhs.clone().expect(\"msg\"));\n\n gen(&node.deref().rhs.clone().expect(\"msg\"));\n\n \n\n println!(\" pop rdi\");\n\n println!(\" pop rax\");\n\n\n\n match node.deref().node_kind {\n\n NodeKind::NdADD => { println!(\" add rax, rdi\")},\n\n NodeKind::NdSub => { println!(\" sub rax, rdi\")},\n\n NodeKind::NdMul => { println!(\" imul rax, rdi\")},\n\n NodeKind::NdDiv => {\n\n println!(\" cqo\");\n\n println!(\" idiv rdi\");\n\n },\n\n _ => {}\n\n }\n\n\n\n println!(\" push rax\");\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 12, "score": 48369.90846591429 }, { "content": "use std::ops::Deref;\n\n\n\nuse crate::node::*;\n\n\n\npub struct Dot {\n\n counter: usize,\n\n node_vec: Vec<String>,\n\n edge_vec: Vec<String>,\n\n}\n\n\n", "file_path": "src/dot.rs", "rank": 22, "score": 8.36629744186588 }, { "content": "\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::token::*;\n\n #[test]\n\n fn test_dot_1() {\n\n let s = \"2*3+4*5\".to_string();\n\n let mut tokenizer = s.tokenize().peekable();\n\n\n\n let node = expr(&mut tokenizer);\n\n let mut dot = Dot::new();\n\n let s = dot.write(&node);\n\n assert_eq!(\n\n s,\n\n vec![\n\n \"digraph ast_tree {\",\n\n \" 0 [ label = plus ];\",\n\n \" 2 [ label = mul ];\",\n\n \" 1 [ label = mul ];\",\n", "file_path": "src/dot.rs", "rank": 23, "score": 7.74289808227947 }, { "content": "use std::{iter::Peekable, ops::Deref};\n\n\n\nuse crate::token::*;\n\n\n\n\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub enum NodeKind {\n\n NdADD, // +\n\n NdSub, // -\n\n NdMul, // *\n\n NdDiv, // /\n\n NdEq, // ==\n\n NdNe, // !=\n\n NdLt, // <\n\n NdLe, // <=\n\n NdNum, // Interger\n\n}\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "src/node.rs", "rank": 24, "score": 7.368018237696758 }, { "content": " }\n\n\n\n pub fn val(&self) -> Option<String> {\n\n self.val.clone()\n\n }\n\n\n\n pub fn rhs(&self) -> Option<Box<Node>> {\n\n self.rhs.clone()\n\n }\n\n\n\n pub fn lhs(&self) -> Option<Box<Node>> {\n\n self.lhs.clone()\n\n }\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 25, "score": 6.882184804281346 }, { "content": " fn test_unary_node() {\n\n let s = \"- - 10\".to_string();\n\n let mut tokenizer= s.tokenize().peekable();\n\n let node = expr(&mut tokenizer);\n\n dbg!(&node);\n\n gen(&node);\n\n }\n\n\n\n #[test]\n\n fn test_eq() {\n\n let s = \"9+(-1+2)==10\".to_string();\n\n let mut tokenizer = s.tokenize().peekable();\n\n for t in s.tokenize() {\n\n println!(\"{:?}\", t);\n\n }\n\n let node = expr(&mut tokenizer);\n\n dbg!(&node);\n\n }\n\n}", "file_path": "src/node.rs", "rank": 26, "score": 6.519873176592611 }, { "content": "\n\n fn new_binary_with_box(node_kind: NodeKind, lhs: Box<Node>, rhs: Box<Node>) -> Box<Self> {\n\n Box::new(Node::new_binary(node_kind, lhs, rhs))\n\n }\n\n\n\n fn new_num_node<S: ToString>(val: S) -> Self {\n\n Self {\n\n node_kind: NodeKind::NdNum,\n\n lhs: None,\n\n rhs: None,\n\n val: Some(val.to_string())\n\n }\n\n }\n\n\n\n fn new_num_node_with_box<S: ToString>(val: S) -> Box<Self> {\n\n Box::new(Node::new_num_node(val))\n\n }\n\n\n\n pub fn kind(&self) -> &NodeKind {\n\n &self.node_kind\n", "file_path": "src/node.rs", "rank": 27, "score": 6.146619599289329 }, { "content": "pub struct Node {\n\n node_kind: NodeKind,\n\n lhs: Option<Box<Node>>,\n\n rhs: Option<Box<Node>>,\n\n val: Option<String>,\n\n}\n\n\n\nimpl Node {\n\n fn new(node_kind: NodeKind, lhs: Option<Box<Node>>, rhs: Option<Box<Node>>, val: Option<String>) -> Self {\n\n Self {\n\n node_kind,\n\n lhs,\n\n rhs,\n\n val: val\n\n }\n\n }\n\n\n\n fn new_binary(node_kind: NodeKind, lhs: Box<Node>, rhs: Box<Node>) -> Self {\n\n Node::new(node_kind, Some(lhs), Some(rhs), None)\n\n }\n", "file_path": "src/node.rs", "rank": 28, "score": 6.093837128188463 }, { "content": " fn test_node() {\n\n let s = \"1 + 2 * (3 - 1)\".to_string();\n\n let mut tokenizer = s.tokenize().peekable();\n\n\n\n let node = expr(&mut tokenizer);\n\n dbg!(node);\n\n }\n\n\n\n #[test]\n\n fn test_gen() {\n\n let s = \"-2*3+4*5\".to_string();\n\n let mut tokenizer = s.tokenize().peekable();\n\n\n\n let node = expr(&mut tokenizer);\n\n \n\n gen(&node);\n\n\n\n }\n\n\n\n #[test]\n", "file_path": "src/node.rs", "rank": 29, "score": 5.533299910423369 }, { "content": " let edge_lhs = make_dot_edge(pre_node_cnt, lhs_cnt);\n\n self.edge_vec.push(edge_rhs);\n\n self.edge_vec.push(edge_lhs);\n\n\n\n self._rec_write(&node.rhs().unwrap(), rhs_cnt);\n\n self._rec_write(&node.lhs().unwrap(), lhs_cnt);\n\n }\n\n\n\n pub fn write(&mut self, node: &Box<Node>) -> String {\n\n let mut res = vec![\n\n \"digraph ast_tree {\".to_string(),\n\n \n\n ];\n\n self._rec_write(node, 0);\n\n res.push(self.node_vec.join(\"\\n\").to_string());\n\n res.push(self.edge_vec.join(\"\\n\").to_string());\n\n res.push(\"}\".to_string());\n\n res.join(\"\\n\")\n\n }\n\n}\n", "file_path": "src/dot.rs", "rank": 30, "score": 4.854444936073927 }, { "content": "/*!\n\nThe purpose of this crate is to make **C** compiler by rust with reference to [9cc](www.sigbus.info/compilerbook)\n\n*/\n\n\n\npub mod node;\n\npub mod token;\n\npub mod dot;", "file_path": "src/lib.rs", "rank": 31, "score": 3.234034259218986 }, { "content": " let node_cnt = self.counter;\n\n if self.node_vec.is_empty() {\n\n let node_val = get_val(node.deref());\n\n let node_dot = make_dot_node(node_cnt, node_val);\n\n self.node_vec.push(node_dot);\n\n // self.counter += 1;\n\n }\n\n\n\n let rhs_val = get_val(&node.deref().rhs().unwrap());\n\n let rhs_cnt = self.update_count();\n\n let rhs_dot = make_dot_node(rhs_cnt, rhs_val);\n\n \n\n let lhs_val = get_val(&node.deref().lhs().unwrap());\n\n let lhs_cnt = self.update_count();\n\n let lhs_dot = make_dot_node(lhs_cnt, lhs_val);\n\n\n\n self.node_vec.push(lhs_dot);\n\n self.node_vec.push(rhs_dot);\n\n\n\n let edge_rhs = make_dot_edge(pre_node_cnt, rhs_cnt);\n", "file_path": "src/dot.rs", "rank": 32, "score": 3.23063123120105 } ]
Rust
beacon_node/store/src/block_at_slot.rs
JustinDrake/lighthouse
0694d1d0ec488d2d9f448a2bf5c6f742e1c5a5ed
use super::*; use ssz::{Decode, DecodeError}; fn get_block_bytes<T: Store<E>, E: EthSpec>( store: &T, root: Hash256, ) -> Result<Option<Vec<u8>>, Error> { store.get_bytes(BeaconBlock::<E>::db_column().into(), &root[..]) } fn read_slot_from_block_bytes(bytes: &[u8]) -> Result<Slot, DecodeError> { let end = std::cmp::min(Slot::ssz_fixed_len(), bytes.len()); Slot::from_ssz_bytes(&bytes[0..end]) } fn read_parent_root_from_block_bytes(bytes: &[u8]) -> Result<Hash256, DecodeError> { let previous_bytes = Slot::ssz_fixed_len(); let slice = bytes .get(previous_bytes..previous_bytes + Hash256::ssz_fixed_len()) .ok_or_else(|| DecodeError::BytesInvalid("Not enough bytes.".to_string()))?; Hash256::from_ssz_bytes(slice) } pub fn get_block_at_preceeding_slot<T: Store<E>, E: EthSpec>( store: &T, slot: Slot, start_root: Hash256, ) -> Result<Option<(Hash256, BeaconBlock<E>)>, Error> { Ok( match get_at_preceeding_slot::<_, E>(store, slot, start_root)? { Some((hash, bytes)) => Some((hash, BeaconBlock::<E>::from_ssz_bytes(&bytes)?)), None => None, }, ) } fn get_at_preceeding_slot<T: Store<E>, E: EthSpec>( store: &T, slot: Slot, mut root: Hash256, ) -> Result<Option<(Hash256, Vec<u8>)>, Error> { loop { if let Some(bytes) = get_block_bytes::<_, E>(store, root)? { let this_slot = read_slot_from_block_bytes(&bytes)?; if this_slot == slot { break Ok(Some((root, bytes))); } else if this_slot < slot { break Ok(None); } else { root = read_parent_root_from_block_bytes(&bytes)?; } } else { break Ok(None); } } } #[cfg(test)] mod tests { use super::*; use ssz::Encode; use tree_hash::TreeHash; type BeaconBlock = types::BeaconBlock<MinimalEthSpec>; #[test] fn read_slot() { let spec = MinimalEthSpec::default_spec(); let test_slot = |slot: Slot| { let mut block = BeaconBlock::empty(&spec); block.slot = slot; let bytes = block.as_ssz_bytes(); assert_eq!(read_slot_from_block_bytes(&bytes).unwrap(), slot); }; test_slot(Slot::new(0)); test_slot(Slot::new(1)); test_slot(Slot::new(42)); test_slot(Slot::new(u64::max_value())); } #[test] fn bad_slot() { for i in 0..8 { assert!(read_slot_from_block_bytes(&vec![0; i]).is_err()); } } #[test] fn read_parent_root() { let spec = MinimalEthSpec::default_spec(); let test_root = |root: Hash256| { let mut block = BeaconBlock::empty(&spec); block.parent_root = root; let bytes = block.as_ssz_bytes(); assert_eq!(read_parent_root_from_block_bytes(&bytes).unwrap(), root); }; test_root(Hash256::random()); test_root(Hash256::random()); test_root(Hash256::random()); } fn build_chain( store: &impl Store<MinimalEthSpec>, slots: &[usize], spec: &ChainSpec, ) -> Vec<(Hash256, BeaconBlock)> { let mut blocks_and_roots: Vec<(Hash256, BeaconBlock)> = vec![]; for (i, slot) in slots.iter().enumerate() { let mut block = BeaconBlock::empty(spec); block.slot = Slot::from(*slot); if i > 0 { block.parent_root = blocks_and_roots[i - 1].0; } let root = Hash256::from_slice(&block.tree_hash_root()); store.put(&root, &block).unwrap(); blocks_and_roots.push((root, block)); } blocks_and_roots } #[test] fn chain_without_skips() { let n: usize = 10; let store = MemoryStore::open(); let spec = MinimalEthSpec::default_spec(); let slots: Vec<usize> = (0..n).collect(); let blocks_and_roots = build_chain(&store, &slots, &spec); for source in 1..n { for target in 0..=source { let (source_root, _source_block) = &blocks_and_roots[source]; let (target_root, target_block) = &blocks_and_roots[target]; let (found_root, found_block) = store .get_block_at_preceeding_slot(*source_root, target_block.slot) .unwrap() .unwrap(); assert_eq!(found_root, *target_root); assert_eq!(found_block, *target_block); } } } #[test] fn chain_with_skips() { let store = MemoryStore::<MinimalEthSpec>::open(); let spec = MinimalEthSpec::default_spec(); let slots = vec![0, 1, 2, 5]; let blocks_and_roots = build_chain(&store, &slots, &spec); for target in 0..3 { let (source_root, _source_block) = &blocks_and_roots[3]; let (target_root, target_block) = &blocks_and_roots[target]; let (found_root, found_block) = store .get_block_at_preceeding_slot(*source_root, target_block.slot) .unwrap() .unwrap(); assert_eq!(found_root, *target_root); assert_eq!(found_block, *target_block); } let (source_root, _source_block) = &blocks_and_roots[3]; assert!(store .get_block_at_preceeding_slot(*source_root, Slot::new(3)) .unwrap() .is_none()); let (source_root, _source_block) = &blocks_and_roots[3]; assert!(store .get_block_at_preceeding_slot(*source_root, Slot::new(3)) .unwrap() .is_none()); } }
use super::*; use ssz::{Decode, DecodeError}; fn get_block_bytes<T: Store<E>, E: EthSpec>( store: &T, root: Hash256, ) -> Result<Option<Vec<u8>>, Error> { store.get_bytes(BeaconBlock::<E>::db_column().into(), &root[..]) } fn read_slot_from_block_bytes(bytes: &[u8]) -> Result<Slot, DecodeError> { let end = std::cmp::min(Slot::ssz_fixed_len(), bytes.len()); Slot::from_ssz_bytes(&bytes[0..end]) } fn read_parent_root_from_block_byte
pub fn get_block_at_preceeding_slot<T: Store<E>, E: EthSpec>( store: &T, slot: Slot, start_root: Hash256, ) -> Result<Option<(Hash256, BeaconBlock<E>)>, Error> { Ok( match get_at_preceeding_slot::<_, E>(store, slot, start_root)? { Some((hash, bytes)) => Some((hash, BeaconBlock::<E>::from_ssz_bytes(&bytes)?)), None => None, }, ) } fn get_at_preceeding_slot<T: Store<E>, E: EthSpec>( store: &T, slot: Slot, mut root: Hash256, ) -> Result<Option<(Hash256, Vec<u8>)>, Error> { loop { if let Some(bytes) = get_block_bytes::<_, E>(store, root)? { let this_slot = read_slot_from_block_bytes(&bytes)?; if this_slot == slot { break Ok(Some((root, bytes))); } else if this_slot < slot { break Ok(None); } else { root = read_parent_root_from_block_bytes(&bytes)?; } } else { break Ok(None); } } } #[cfg(test)] mod tests { use super::*; use ssz::Encode; use tree_hash::TreeHash; type BeaconBlock = types::BeaconBlock<MinimalEthSpec>; #[test] fn read_slot() { let spec = MinimalEthSpec::default_spec(); let test_slot = |slot: Slot| { let mut block = BeaconBlock::empty(&spec); block.slot = slot; let bytes = block.as_ssz_bytes(); assert_eq!(read_slot_from_block_bytes(&bytes).unwrap(), slot); }; test_slot(Slot::new(0)); test_slot(Slot::new(1)); test_slot(Slot::new(42)); test_slot(Slot::new(u64::max_value())); } #[test] fn bad_slot() { for i in 0..8 { assert!(read_slot_from_block_bytes(&vec![0; i]).is_err()); } } #[test] fn read_parent_root() { let spec = MinimalEthSpec::default_spec(); let test_root = |root: Hash256| { let mut block = BeaconBlock::empty(&spec); block.parent_root = root; let bytes = block.as_ssz_bytes(); assert_eq!(read_parent_root_from_block_bytes(&bytes).unwrap(), root); }; test_root(Hash256::random()); test_root(Hash256::random()); test_root(Hash256::random()); } fn build_chain( store: &impl Store<MinimalEthSpec>, slots: &[usize], spec: &ChainSpec, ) -> Vec<(Hash256, BeaconBlock)> { let mut blocks_and_roots: Vec<(Hash256, BeaconBlock)> = vec![]; for (i, slot) in slots.iter().enumerate() { let mut block = BeaconBlock::empty(spec); block.slot = Slot::from(*slot); if i > 0 { block.parent_root = blocks_and_roots[i - 1].0; } let root = Hash256::from_slice(&block.tree_hash_root()); store.put(&root, &block).unwrap(); blocks_and_roots.push((root, block)); } blocks_and_roots } #[test] fn chain_without_skips() { let n: usize = 10; let store = MemoryStore::open(); let spec = MinimalEthSpec::default_spec(); let slots: Vec<usize> = (0..n).collect(); let blocks_and_roots = build_chain(&store, &slots, &spec); for source in 1..n { for target in 0..=source { let (source_root, _source_block) = &blocks_and_roots[source]; let (target_root, target_block) = &blocks_and_roots[target]; let (found_root, found_block) = store .get_block_at_preceeding_slot(*source_root, target_block.slot) .unwrap() .unwrap(); assert_eq!(found_root, *target_root); assert_eq!(found_block, *target_block); } } } #[test] fn chain_with_skips() { let store = MemoryStore::<MinimalEthSpec>::open(); let spec = MinimalEthSpec::default_spec(); let slots = vec![0, 1, 2, 5]; let blocks_and_roots = build_chain(&store, &slots, &spec); for target in 0..3 { let (source_root, _source_block) = &blocks_and_roots[3]; let (target_root, target_block) = &blocks_and_roots[target]; let (found_root, found_block) = store .get_block_at_preceeding_slot(*source_root, target_block.slot) .unwrap() .unwrap(); assert_eq!(found_root, *target_root); assert_eq!(found_block, *target_block); } let (source_root, _source_block) = &blocks_and_roots[3]; assert!(store .get_block_at_preceeding_slot(*source_root, Slot::new(3)) .unwrap() .is_none()); let (source_root, _source_block) = &blocks_and_roots[3]; assert!(store .get_block_at_preceeding_slot(*source_root, Slot::new(3)) .unwrap() .is_none()); } }
s(bytes: &[u8]) -> Result<Hash256, DecodeError> { let previous_bytes = Slot::ssz_fixed_len(); let slice = bytes .get(previous_bytes..previous_bytes + Hash256::ssz_fixed_len()) .ok_or_else(|| DecodeError::BytesInvalid("Not enough bytes.".to_string()))?; Hash256::from_ssz_bytes(slice) }
function_block-function_prefixed
[ { "content": "/// Fetch the next state to use whilst backtracking in `*RootsIterator`.\n\nfn next_historical_root_backtrack_state<E: EthSpec, S: Store<E>>(\n\n store: &S,\n\n current_state: &BeaconState<E>,\n\n) -> Option<BeaconState<E>> {\n\n // For compatibility with the freezer database's restore points, we load a state at\n\n // a restore point slot (thus avoiding replaying blocks). In the case where we're\n\n // not frozen, this just means we might not jump back by the maximum amount on\n\n // our first jump (i.e. at most 1 extra state load).\n\n let new_state_slot = slot_of_prev_restore_point::<E>(current_state.slot);\n\n let new_state_root = current_state.get_state_root(new_state_slot).ok()?;\n\n store.get_state(new_state_root, Some(new_state_slot)).ok()?\n\n}\n\n\n", "file_path": "beacon_node/store/src/iter.rs", "rank": 1, "score": 314707.7291563644 }, { "content": "pub fn store_full_state<S: Store<E>, E: EthSpec>(\n\n store: &S,\n\n state_root: &Hash256,\n\n state: &BeaconState<E>,\n\n) -> Result<(), Error> {\n\n let total_timer = metrics::start_timer(&metrics::BEACON_STATE_WRITE_TIMES);\n\n let overhead_timer = metrics::start_timer(&metrics::BEACON_STATE_WRITE_OVERHEAD_TIMES);\n\n\n\n let bytes = StorageContainer::new(state).as_ssz_bytes();\n\n metrics::stop_timer(overhead_timer);\n\n\n\n let result = store.put_bytes(DBColumn::BeaconState.into(), state_root.as_bytes(), &bytes);\n\n\n\n metrics::stop_timer(total_timer);\n\n metrics::inc_counter(&metrics::BEACON_STATE_WRITE_COUNT);\n\n metrics::inc_counter_by(&metrics::BEACON_STATE_WRITE_BYTES, bytes.len() as i64);\n\n\n\n result\n\n}\n\n\n", "file_path": "beacon_node/store/src/impls/beacon_state.rs", "rank": 4, "score": 274173.12013586 }, { "content": "pub fn store_updated_vector<F: Field<E>, E: EthSpec, S: Store<E>>(\n\n field: F,\n\n store: &S,\n\n state: &BeaconState<E>,\n\n spec: &ChainSpec,\n\n) -> Result<(), Error> {\n\n let chunk_size = F::chunk_size();\n\n let (start_vindex, end_vindex) = F::start_and_end_vindex(state.slot, spec);\n\n let start_cindex = start_vindex / chunk_size;\n\n let end_cindex = end_vindex / chunk_size;\n\n\n\n // Store the genesis value if we have access to it, and it hasn't been stored already.\n\n if F::slot_needs_genesis_value(state.slot, spec) {\n\n let genesis_value = F::extract_genesis_value(state, spec)?;\n\n F::check_and_store_genesis_value(store, genesis_value)?;\n\n }\n\n\n\n // Start by iterating backwards from the last chunk, storing new chunks in the database.\n\n // Stop once a chunk in the database matches what we were about to store, this indicates\n\n // that a previously stored state has already filled-in a portion of the indices covered.\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 5, "score": 269201.7012977853 }, { "content": "pub fn get_full_state<S: Store<E>, E: EthSpec>(\n\n store: &S,\n\n state_root: &Hash256,\n\n) -> Result<Option<BeaconState<E>>, Error> {\n\n let total_timer = metrics::start_timer(&metrics::BEACON_STATE_READ_TIMES);\n\n\n\n match store.get_bytes(DBColumn::BeaconState.into(), state_root.as_bytes())? {\n\n Some(bytes) => {\n\n let overhead_timer = metrics::start_timer(&metrics::BEACON_STATE_READ_OVERHEAD_TIMES);\n\n let container = StorageContainer::from_ssz_bytes(&bytes)?;\n\n\n\n metrics::stop_timer(overhead_timer);\n\n metrics::stop_timer(total_timer);\n\n metrics::inc_counter(&metrics::BEACON_STATE_READ_COUNT);\n\n metrics::inc_counter_by(&metrics::BEACON_STATE_READ_BYTES, bytes.len() as i64);\n\n\n\n Ok(Some(container.try_into()?))\n\n }\n\n None => Ok(None),\n\n }\n", "file_path": "beacon_node/store/src/impls/beacon_state.rs", "rank": 7, "score": 267271.263670629 }, { "content": "pub fn load_vector_from_db<F: FixedLengthField<E>, E: EthSpec, S: Store<E>>(\n\n store: &S,\n\n slot: Slot,\n\n spec: &ChainSpec,\n\n) -> Result<FixedVector<F::Value, F::Length>, Error> {\n\n // Do a range query\n\n let chunk_size = F::chunk_size();\n\n let (start_vindex, end_vindex) = F::start_and_end_vindex(slot, spec);\n\n let start_cindex = start_vindex / chunk_size;\n\n let end_cindex = end_vindex / chunk_size;\n\n\n\n let chunks = range_query(store, F::column(), start_cindex, end_cindex)?;\n\n\n\n let default = if F::slot_needs_genesis_value(slot, spec) {\n\n F::load_genesis_value(store)?\n\n } else {\n\n F::Value::default()\n\n };\n\n\n\n let result = stitch(\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 8, "score": 258358.22587780264 }, { "content": "// Chunks at the end index are included.\n\n// TODO: could be more efficient with a real range query (perhaps RocksDB)\n\nfn range_query<S: Store<E>, E: EthSpec, T: Decode + Encode>(\n\n store: &S,\n\n column: DBColumn,\n\n start_index: usize,\n\n end_index: usize,\n\n) -> Result<Vec<Chunk<T>>, Error> {\n\n let mut result = vec![];\n\n\n\n for chunk_index in start_index..=end_index {\n\n let key = &chunk_key(chunk_index as u64)[..];\n\n let chunk = Chunk::load(store, column, key)?.ok_or(ChunkError::Missing { chunk_index })?;\n\n result.push(chunk);\n\n }\n\n\n\n Ok(result)\n\n}\n\n\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 9, "score": 257410.09528602887 }, { "content": "/// The historical roots are stored in vector chunks, despite not actually being a vector.\n\npub fn load_variable_list_from_db<F: VariableLengthField<E>, E: EthSpec, S: Store<E>>(\n\n store: &S,\n\n slot: Slot,\n\n spec: &ChainSpec,\n\n) -> Result<VariableList<F::Value, F::Length>, Error> {\n\n let chunk_size = F::chunk_size();\n\n let (start_vindex, end_vindex) = F::start_and_end_vindex(slot, spec);\n\n let start_cindex = start_vindex / chunk_size;\n\n let end_cindex = end_vindex / chunk_size;\n\n\n\n let chunks: Vec<Chunk<F::Value>> = range_query(store, F::column(), start_cindex, end_cindex)?;\n\n\n\n let mut result = Vec::with_capacity(chunk_size * chunks.len());\n\n\n\n for (chunk_index, chunk) in chunks.into_iter().enumerate() {\n\n for (i, value) in chunk.values.into_iter().enumerate() {\n\n let vindex = chunk_index * chunk_size + i;\n\n\n\n if vindex >= start_vindex && vindex < end_vindex {\n\n result.push(value);\n\n }\n\n }\n\n }\n\n\n\n Ok(result.into())\n\n}\n\n\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 10, "score": 256569.41184657917 }, { "content": "fn root_as_string(root: Hash256) -> String {\n\n format!(\"0x{:?}\", root)\n\n}\n\n\n", "file_path": "eth2/utils/remote_beacon_node/src/lib.rs", "rank": 12, "score": 240001.79683963372 }, { "content": "pub fn check_tree_hash(expected_str: &str, actual_root: &[u8]) -> Result<(), Error> {\n\n let expected_root = hex::decode(&expected_str[2..])\n\n .map_err(|e| Error::FailedToParseTest(format!(\"{:?}\", e)))?;\n\n let expected_root = Hash256::from_slice(&expected_root);\n\n let tree_hash_root = Hash256::from_slice(actual_root);\n\n compare_result::<Hash256, Error>(&Ok(tree_hash_root), &Some(expected_root))\n\n}\n\n\n\nimpl<T: SszStaticType> Case for SszStatic<T> {\n\n fn result(&self, _case_index: usize) -> Result<(), Error> {\n\n check_serialization(&self.value, &self.serialized)?;\n\n check_tree_hash(&self.roots.root, &self.value.tree_hash_root())?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<T: SszStaticType + SignedRoot> Case for SszStaticSR<T> {\n\n fn result(&self, _case_index: usize) -> Result<(), Error> {\n\n check_serialization(&self.value, &self.serialized)?;\n\n check_tree_hash(&self.roots.root, &self.value.tree_hash_root())?;\n", "file_path": "tests/ef_tests/src/cases/ssz_static.rs", "rank": 13, "score": 228293.78360275697 }, { "content": "fn get_state<E: EthSpec>(validator_count: usize) -> BeaconState<E> {\n\n let spec = &E::default_spec();\n\n let eth1_data = Eth1Data {\n\n deposit_root: Hash256::zero(),\n\n deposit_count: 0,\n\n block_hash: Hash256::zero(),\n\n };\n\n\n\n let mut state = BeaconState::new(0, eth1_data, spec);\n\n\n\n for i in 0..validator_count {\n\n state.balances.push(i as u64).expect(\"should add balance\");\n\n }\n\n\n\n state.validators = (0..validator_count)\n\n .into_iter()\n\n .collect::<Vec<_>>()\n\n .par_iter()\n\n .map(|&i| Validator {\n\n pubkey: generate_deterministic_keypair(i).pk.into(),\n", "file_path": "beacon_node/store/benches/benches.rs", "rank": 14, "score": 225090.747798947 }, { "content": "/// Returns the node created by hashing `root` and `length`.\n\n///\n\n/// Used in `TreeHash` for inserting the length of a list above it's root.\n\npub fn mix_in_length(root: &[u8], length: usize) -> Vec<u8> {\n\n let mut length_bytes = length.to_le_bytes().to_vec();\n\n length_bytes.resize(BYTES_PER_CHUNK, 0);\n\n\n\n eth2_hashing::hash_concat(root, &length_bytes)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum TreeHashType {\n\n Basic,\n\n Vector,\n\n List,\n\n Container,\n\n}\n\n\n", "file_path": "eth2/utils/tree_hash/src/lib.rs", "rank": 15, "score": 224925.45692534908 }, { "content": "fn get_state<E: EthSpec>(validator_count: usize) -> BeaconState<E> {\n\n let spec = &E::default_spec();\n\n let eth1_data = Eth1Data {\n\n deposit_root: Hash256::zero(),\n\n deposit_count: 0,\n\n block_hash: Hash256::zero(),\n\n };\n\n\n\n let mut state = BeaconState::new(0, eth1_data, spec);\n\n\n\n for i in 0..validator_count {\n\n state.balances.push(i as u64).expect(\"should add balance\");\n\n }\n\n\n\n state.validators = (0..validator_count)\n\n .into_iter()\n\n .collect::<Vec<_>>()\n\n .par_iter()\n\n .map(|&i| Validator {\n\n pubkey: generate_deterministic_keypair(i).pk.into(),\n", "file_path": "beacon_node/store/examples/ssz_encode_state.rs", "rank": 16, "score": 220663.61867110714 }, { "content": "/// Check that the DiskStore's split_slot is equal to the start slot of the last finalized epoch.\n\nfn check_split_slot(harness: &TestHarness, store: Arc<DiskStore<E>>) {\n\n let split_slot = store.get_split_slot();\n\n assert_eq!(\n\n harness\n\n .chain\n\n .head()\n\n .beacon_state\n\n .finalized_checkpoint\n\n .epoch\n\n .start_slot(E::slots_per_epoch()),\n\n split_slot\n\n );\n\n assert_ne!(split_slot, 0);\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/tests/store_tests.rs", "rank": 17, "score": 217521.09936216057 }, { "content": "/// Parse a root from a `0x` preixed string.\n\n///\n\n/// E.g., `\"0x0000000000000000000000000000000000000000000000000000000000000000\"`\n\npub fn parse_root(string: &str) -> Result<Hash256, ApiError> {\n\n const PREFIX: &str = \"0x\";\n\n\n\n if string.starts_with(PREFIX) {\n\n let trimmed = string.trim_start_matches(PREFIX);\n\n trimmed\n\n .parse()\n\n .map_err(|e| ApiError::BadRequest(format!(\"Unable to parse root: {:?}\", e)))\n\n } else {\n\n Err(ApiError::BadRequest(\n\n \"Root must have a 0x prefix\".to_string(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/helpers.rs", "rank": 18, "score": 214061.35098745956 }, { "content": "/// Reads a `BYTES_PER_LENGTH_OFFSET`-byte length from `bytes`, where `bytes.len() >=\n\n/// BYTES_PER_LENGTH_OFFSET`.\n\nfn read_offset(bytes: &[u8]) -> Result<usize, DecodeError> {\n\n decode_offset(bytes.get(0..BYTES_PER_LENGTH_OFFSET).ok_or_else(|| {\n\n DecodeError::InvalidLengthPrefix {\n\n len: bytes.len(),\n\n expected: BYTES_PER_LENGTH_OFFSET,\n\n }\n\n })?)\n\n}\n\n\n", "file_path": "eth2/utils/ssz/src/decode.rs", "rank": 19, "score": 213568.86330662732 }, { "content": "/// Decode bytes as a little-endian usize, returning an `Err` if `bytes.len() !=\n\n/// BYTES_PER_LENGTH_OFFSET`.\n\nfn decode_offset(bytes: &[u8]) -> Result<usize, DecodeError> {\n\n let len = bytes.len();\n\n let expected = BYTES_PER_LENGTH_OFFSET;\n\n\n\n if len != expected {\n\n Err(DecodeError::InvalidLengthPrefix { len, expected })\n\n } else {\n\n let mut array: [u8; BYTES_PER_LENGTH_OFFSET] = std::default::Default::default();\n\n array.clone_from_slice(bytes);\n\n\n\n Ok(u32::from_le_bytes(array) as usize)\n\n }\n\n}\n", "file_path": "eth2/utils/ssz/src/decode.rs", "rank": 20, "score": 213568.86330662732 }, { "content": "fn get_harness(store: Arc<DiskStore<E>>, validator_count: usize) -> TestHarness {\n\n let harness = BeaconChainHarness::new_with_disk_store(\n\n MinimalEthSpec,\n\n store,\n\n KEYPAIRS[0..validator_count].to_vec(),\n\n );\n\n harness.advance_slot();\n\n harness\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/tests/store_tests.rs", "rank": 21, "score": 211652.55445226945 }, { "content": "fn write_block<T: EthSpec>(block: &BeaconBlock<T>, root: Hash256, log: &Logger) {\n\n if WRITE_BLOCK_PROCESSING_SSZ {\n\n let filename = format!(\"block_slot_{}_root{}.ssz\", block.slot, root);\n\n let mut path = std::env::temp_dir().join(\"lighthouse\");\n\n let _ = fs::create_dir_all(path.clone());\n\n path = path.join(filename);\n\n\n\n match fs::File::create(path.clone()) {\n\n Ok(mut file) => {\n\n let _ = file.write_all(&block.as_ssz_bytes());\n\n }\n\n Err(e) => error!(\n\n log,\n\n \"Failed to log block\";\n\n \"path\" => format!(\"{:?}\", path),\n\n \"error\" => format!(\"{:?}\", e)\n\n ),\n\n }\n\n }\n\n}\n", "file_path": "beacon_node/beacon_chain/src/beacon_chain.rs", "rank": 22, "score": 209026.6746584543 }, { "content": "fn load_from_dir<T: SszStaticType>(path: &Path) -> Result<(SszStaticRoots, Vec<u8>, T), Error> {\n\n let roots = yaml_decode_file(&path.join(\"roots.yaml\"))?;\n\n let serialized = fs::read(&path.join(\"serialized.ssz\")).expect(\"serialized.ssz exists\");\n\n let value = yaml_decode_file(&path.join(\"value.yaml\"))?;\n\n\n\n Ok((roots, serialized, value))\n\n}\n\n\n\nimpl<T: SszStaticType> LoadCase for SszStatic<T> {\n\n fn load_from_dir(path: &Path) -> Result<Self, Error> {\n\n load_from_dir(path).map(|(roots, serialized, value)| Self {\n\n roots,\n\n serialized,\n\n value,\n\n })\n\n }\n\n}\n\n\n\nimpl<T: SszStaticType + SignedRoot> LoadCase for SszStaticSR<T> {\n\n fn load_from_dir(path: &Path) -> Result<Self, Error> {\n", "file_path": "tests/ef_tests/src/cases/ssz_static.rs", "rank": 23, "score": 208124.21238708962 }, { "content": "/// Compute the slot of the last guaranteed restore point in the freezer database.\n\nfn slot_of_prev_restore_point<E: EthSpec>(current_slot: Slot) -> Slot {\n\n let slots_per_historical_root = E::SlotsPerHistoricalRoot::to_u64();\n\n (current_slot - 1) / slots_per_historical_root * slots_per_historical_root\n\n}\n\n\n\npub type ReverseBlockRootIterator<'a, E, S> =\n\n ReverseHashAndSlotIterator<BlockRootsIterator<'a, E, S>>;\n\npub type ReverseStateRootIterator<'a, E, S> =\n\n ReverseHashAndSlotIterator<StateRootsIterator<'a, E, S>>;\n\n\n\npub type ReverseHashAndSlotIterator<I> = ReverseChainIterator<(Hash256, Slot), I>;\n\n\n\n/// Provides a wrapper for an iterator that returns a given `T` before it starts returning results of\n\n/// the `Iterator`.\n\npub struct ReverseChainIterator<T, I> {\n\n first_value_used: bool,\n\n first_value: T,\n\n iter: I,\n\n}\n\n\n", "file_path": "beacon_node/store/src/iter.rs", "rank": 24, "score": 204771.19102146095 }, { "content": "fn int_hashes(start: u64, end: u64) -> Vec<Hash256> {\n\n (start..end).map(Hash256::from_low_u64_le).collect()\n\n}\n\n\n", "file_path": "eth2/utils/cached_tree_hash/src/test.rs", "rank": 25, "score": 204767.47609878264 }, { "content": "/// Reads a `BYTES_PER_LENGTH_OFFSET`-byte union index from `bytes`, where `bytes.len() >=\n\n/// BYTES_PER_LENGTH_OFFSET`.\n\npub fn read_union_index(bytes: &[u8]) -> Result<usize, DecodeError> {\n\n read_offset(bytes)\n\n}\n\n\n", "file_path": "eth2/utils/ssz/src/decode.rs", "rank": 26, "score": 204158.58063503294 }, { "content": "/// Ensures that the finalized root can be set to all values in `roots`.\n\nfn test_update_finalized_root(roots: &[(Hash256, Slot)]) {\n\n let harness = &FORKED_HARNESS;\n\n\n\n let lmd = harness.new_fork_choice();\n\n\n\n for (root, _slot) in roots.iter().rev() {\n\n let block = harness\n\n .store_clone()\n\n .get::<BeaconBlock<TestEthSpec>>(root)\n\n .expect(\"block should exist\")\n\n .expect(\"db should not error\");\n\n lmd.update_finalized_root(&block, *root)\n\n .expect(\"finalized root should update for faulty fork\");\n\n\n\n assert_eq!(\n\n lmd.verify_integrity(),\n\n Ok(()),\n\n \"Tree integrity should be maintained after updating the finalized root\"\n\n );\n\n }\n\n}\n\n\n\n/// Iterates from low-to-high slot through the faulty roots, updating the finalized root.\n", "file_path": "eth2/lmd_ghost/tests/test.rs", "rank": 27, "score": 203774.29992413605 }, { "content": "fn store_range<F, E, S, I>(\n\n _: F,\n\n range: I,\n\n start_vindex: usize,\n\n end_vindex: usize,\n\n store: &S,\n\n state: &BeaconState<E>,\n\n spec: &ChainSpec,\n\n) -> Result<bool, Error>\n\nwhere\n\n F: Field<E>,\n\n E: EthSpec,\n\n S: Store<E>,\n\n I: Iterator<Item = usize>,\n\n{\n\n for chunk_index in range {\n\n let chunk_key = &chunk_key(chunk_index as u64)[..];\n\n\n\n let existing_chunk =\n\n Chunk::<F::Value>::load(store, F::column(), chunk_key)?.unwrap_or_else(Chunk::default);\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 28, "score": 202304.79912669616 }, { "content": "/// Implemented for types that have ancestors (e.g., blocks, states) that may be iterated over.\n\n///\n\n/// ## Note\n\n///\n\n/// It is assumed that all ancestors for this object are stored in the database. If this is not the\n\n/// case, the iterator will start returning `None` prior to genesis.\n\npub trait AncestorIter<U: Store<E>, E: EthSpec, I: Iterator> {\n\n /// Returns an iterator over the roots of the ancestors of `self`.\n\n fn try_iter_ancestor_roots(&self, store: Arc<U>) -> Option<I>;\n\n}\n\n\n\nimpl<'a, U: Store<E>, E: EthSpec> AncestorIter<U, E, BlockRootsIterator<'a, E, U>>\n\n for BeaconBlock<E>\n\n{\n\n /// Iterates across all available prior block roots of `self`, starting at the most recent and ending\n\n /// at genesis.\n\n fn try_iter_ancestor_roots(&self, store: Arc<U>) -> Option<BlockRootsIterator<'a, E, U>> {\n\n let state = store.get_state(&self.state_root, Some(self.slot)).ok()??;\n\n\n\n Some(BlockRootsIterator::owned(store, state))\n\n }\n\n}\n\n\n\nimpl<'a, U: Store<E>, E: EthSpec> AncestorIter<U, E, StateRootsIterator<'a, E, U>>\n\n for BeaconState<E>\n\n{\n", "file_path": "beacon_node/store/src/iter.rs", "rank": 29, "score": 201950.1264072439 }, { "content": "fn keypair_from_bytes(mut bytes: Vec<u8>) -> error::Result<Keypair> {\n\n libp2p::core::identity::secp256k1::SecretKey::from_bytes(&mut bytes)\n\n .map(|secret| {\n\n let keypair: libp2p::core::identity::secp256k1::Keypair = secret.into();\n\n Keypair::Secp256k1(keypair)\n\n })\n\n .map_err(|e| format!(\"Unable to parse p2p secret key: {:?}\", e).into())\n\n}\n\n\n", "file_path": "beacon_node/eth2-libp2p/src/service.rs", "rank": 30, "score": 200078.14711233153 }, { "content": "pub fn u8_from_hex_str<'de, D>(deserializer: D) -> Result<u8, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let s: String = Deserialize::deserialize(deserializer)?;\n\n\n\n let start = match s.as_str().get(2..) {\n\n Some(start) => start,\n\n None => return Err(D::Error::custom(\"string length too small\")),\n\n };\n\n u8::from_str_radix(&start, 16).map_err(D::Error::custom)\n\n}\n\n\n", "file_path": "eth2/types/src/utils/serde_utils.rs", "rank": 31, "score": 200050.26769244095 }, { "content": "/// Return the database key for the genesis value.\n\nfn genesis_value_key() -> [u8; 8] {\n\n 0u64.to_be_bytes()\n\n}\n\n\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 32, "score": 199346.03633581952 }, { "content": "/// Compares `result` with `expected`.\n\n///\n\n/// If `expected.is_none()` then `result` is expected to be `Err`. Otherwise, `T` in `result` and\n\n/// `expected` must be equal.\n\npub fn compare_result<T, E>(result: &Result<T, E>, expected: &Option<T>) -> Result<(), Error>\n\nwhere\n\n T: PartialEq<T> + Debug,\n\n E: Debug,\n\n{\n\n match (result, expected) {\n\n // Pass: The should have failed and did fail.\n\n (Err(_), None) => Ok(()),\n\n // Fail: The test failed when it should have produced a result (fail).\n\n (Err(e), Some(expected)) => Err(Error::NotEqual(format!(\n\n \"Got {:?} | Expected {:?}\",\n\n e,\n\n fmt_val(expected)\n\n ))),\n\n // Fail: The test produced a result when it should have failed (fail).\n\n (Ok(result), None) => Err(Error::DidntFail(format!(\"Got {:?}\", fmt_val(result)))),\n\n // Potential Pass: The test should have produced a result, and it did.\n\n (Ok(result), Some(expected)) => {\n\n if result == expected {\n\n Ok(())\n", "file_path": "tests/ef_tests/src/case_result.rs", "rank": 33, "score": 198777.52413959047 }, { "content": "fn run<E: EthSpec>(\n\n environment_builder: EnvironmentBuilder<E>,\n\n matches: &ArgMatches,\n\n) -> Result<(), String> {\n\n let debug_level = matches\n\n .value_of(\"debug-level\")\n\n .ok_or_else(|| \"Expected --debug-level flag\".to_string())?;\n\n\n\n let mut environment = environment_builder\n\n .async_logger(debug_level)?\n\n .multi_threaded_tokio_runtime()?\n\n .build()?;\n\n\n\n let log = environment.core_context().log;\n\n\n\n if let Some(log_path) = matches.value_of(\"logfile\") {\n\n let path = log_path\n\n .parse::<PathBuf>()\n\n .map_err(|e| format!(\"Failed to parse log path: {:?}\", e))?;\n\n environment.log_to_json_file(path, debug_level)?;\n", "file_path": "lighthouse/src/main.rs", "rank": 34, "score": 198435.60357761735 }, { "content": "/// Request the version from the node, looping back and trying again on failure. Exit once the node\n\n/// has been contacted.\n\nfn wait_for_node<E: EthSpec>(\n\n beacon_node: RemoteBeaconNode<E>,\n\n log: Logger,\n\n) -> impl Future<Item = RemoteBeaconNode<E>, Error = String> {\n\n // Try to get the version string from the node, looping until success is returned.\n\n loop_fn(beacon_node.clone(), move |beacon_node| {\n\n let log = log.clone();\n\n beacon_node\n\n .clone()\n\n .http\n\n .node()\n\n .get_version()\n\n .map_err(|e| format!(\"{:?}\", e))\n\n .then(move |result| {\n\n let future: Box<dyn Future<Item = Loop<_, _>, Error = String> + Send> = match result\n\n {\n\n Ok(version) => {\n\n info!(\n\n log,\n\n \"Connected to beacon node\";\n", "file_path": "validator_client/src/lib.rs", "rank": 35, "score": 193940.5291855587 }, { "content": "/// For each `ValidatorDirectory`, submit a deposit transaction to the `eth1_endpoint`.\n\n///\n\n/// Returns success as soon as the eth1 endpoint accepts the transaction (i.e., does not wait for\n\n/// transaction success/revert).\n\nfn deposit_validators<E: EthSpec>(\n\n context: RuntimeContext<E>,\n\n eth1_endpoint: String,\n\n deposit_contract: Address,\n\n validators: Vec<ValidatorDirectory>,\n\n account_index: usize,\n\n deposit_value: u64,\n\n password: Option<String>,\n\n) -> impl Future<Item = (), Error = ()> {\n\n let log_1 = context.log.clone();\n\n let log_2 = context.log.clone();\n\n\n\n Http::new(&eth1_endpoint)\n\n .map_err(move |e| {\n\n error!(\n\n log_1,\n\n \"Failed to start web3 HTTP transport\";\n\n \"error\" => format!(\"{:?}\", e)\n\n )\n\n })\n", "file_path": "account_manager/src/lib.rs", "rank": 36, "score": 193940.5291855587 }, { "content": "pub fn eth1_tx_data(deposit_data: &DepositData) -> Result<Vec<u8>, Error> {\n\n let params = vec![\n\n Token::Bytes(deposit_data.pubkey.as_ssz_bytes()),\n\n Token::Bytes(deposit_data.withdrawal_credentials.as_ssz_bytes()),\n\n Token::Bytes(deposit_data.signature.as_ssz_bytes()),\n\n Token::FixedBytes(deposit_data.tree_hash_root().as_ssz_bytes()),\n\n ];\n\n\n\n // Here we make an assumption that the `crate::testnet::ABI` has a superset of the features of\n\n // the crate::ABI`.\n\n let abi = Contract::load(ABI)?;\n\n let function = abi.function(\"deposit\")?;\n\n function.encode_input(&params)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use types::{\n\n test_utils::generate_deterministic_keypair, ChainSpec, EthSpec, Hash256, Keypair,\n", "file_path": "eth2/utils/deposit_contract/src/lib.rs", "rank": 37, "score": 193082.57628556498 }, { "content": "/// Create a new client with the default configuration.\n\nfn init_new_client<E: EthSpec>(\n\n client_config: &mut ClientConfig,\n\n eth2_config: &mut Eth2Config,\n\n) -> Result<()> {\n\n let eth2_testnet_config: Eth2TestnetConfig<E> =\n\n if let Some(testnet_dir) = &client_config.testnet_dir {\n\n Eth2TestnetConfig::load(testnet_dir.clone())\n\n .map_err(|e| format!(\"Unable to open testnet dir at {:?}: {}\", testnet_dir, e))?\n\n } else {\n\n Eth2TestnetConfig::hard_coded()\n\n .map_err(|e| format!(\"Unable to load hard-coded testnet dir: {}\", e))?\n\n };\n\n\n\n eth2_config.spec = eth2_testnet_config\n\n .yaml_config\n\n .as_ref()\n\n .ok_or_else(|| \"The testnet directory must contain a spec config\".to_string())?\n\n .apply_to_chain_spec::<E>(&eth2_config.spec)\n\n .ok_or_else(|| {\n\n format!(\n", "file_path": "beacon_node/src/config.rs", "rank": 38, "score": 191847.76205609023 }, { "content": "fn get_store(db_path: &TempDir) -> Arc<DiskStore<E>> {\n\n let spec = MinimalEthSpec::default_spec();\n\n let hot_path = db_path.path().join(\"hot_db\");\n\n let cold_path = db_path.path().join(\"cold_db\");\n\n let slots_per_restore_point = MinimalEthSpec::slots_per_historical_root() as u64;\n\n let log = NullLoggerBuilder.build().expect(\"logger should build\");\n\n Arc::new(\n\n DiskStore::open(&hot_path, &cold_path, slots_per_restore_point, spec, log)\n\n .expect(\"disk store should initialize\"),\n\n )\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/tests/store_tests.rs", "rank": 39, "score": 191370.50452052473 }, { "content": "/// Alias to `merkleize_padded(&bytes, minimum_chunk_count)`\n\n///\n\n/// If `minimum_chunk_count < bytes / BYTES_PER_CHUNK`, padding will be added for the difference\n\n/// between the two.\n\npub fn merkle_root(bytes: &[u8], minimum_chunk_count: usize) -> Vec<u8> {\n\n merkleize_padded(&bytes, minimum_chunk_count)\n\n}\n\n\n", "file_path": "eth2/utils/tree_hash/src/lib.rs", "rank": 40, "score": 190738.02713409247 }, { "content": "#[allow(clippy::trivially_copy_pass_by_ref)] // Serde requires the `byte` to be a ref.\n\npub fn u8_to_hex_str<S>(byte: &u8, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let mut hex: String = \"0x\".to_string();\n\n hex.push_str(&hex::encode(&[*byte]));\n\n\n\n serializer.serialize_str(&hex)\n\n}\n\n\n", "file_path": "eth2/types/src/utils/serde_utils.rs", "rank": 41, "score": 190237.58069047943 }, { "content": "fn build_node<E: EthSpec>(env: &mut Environment<E>) -> LocalBeaconNode<E> {\n\n let context = env.core_context();\n\n env.runtime()\n\n .block_on(LocalBeaconNode::production(\n\n context,\n\n testing_client_config(),\n\n ))\n\n .expect(\"should block until node created\")\n\n}\n\n\n", "file_path": "beacon_node/tests/test.rs", "rank": 42, "score": 190069.18935907588 }, { "content": "/// Verifies that all beacon nodes in the given network have a head state that has a finalized\n\n/// epoch of `epoch`.\n\nfn verify_all_finalized_at<E: EthSpec>(\n\n network: LocalNetwork<E>,\n\n epoch: Epoch,\n\n) -> impl Future<Item = (), Error = String> {\n\n network\n\n .remote_nodes()\n\n .into_future()\n\n .and_then(|remote_nodes| {\n\n stream::unfold(remote_nodes.into_iter(), |mut iter| {\n\n iter.next().map(|remote_node| {\n\n remote_node\n\n .http\n\n .beacon()\n\n .get_head()\n\n .map(|head| head.finalized_slot.epoch(E::slots_per_epoch()))\n\n .map(|epoch| (epoch, iter))\n\n .map_err(|e| format!(\"Get head via http failed: {:?}\", e))\n\n })\n\n })\n\n .collect()\n", "file_path": "tests/beacon_chain_sim/src/checks.rs", "rank": 43, "score": 189848.91395971042 }, { "content": "/// Verifies that all beacon nodes in the given `network` have a head state that contains\n\n/// `expected_count` validators.\n\nfn verify_validator_count<E: EthSpec>(\n\n network: LocalNetwork<E>,\n\n expected_count: usize,\n\n) -> impl Future<Item = (), Error = String> {\n\n network\n\n .remote_nodes()\n\n .into_future()\n\n .and_then(|remote_nodes| {\n\n stream::unfold(remote_nodes.into_iter(), |mut iter| {\n\n iter.next().map(|remote_node| {\n\n let beacon = remote_node.http.beacon();\n\n beacon\n\n .get_head()\n\n .map_err(|e| format!(\"Get head via http failed: {:?}\", e))\n\n .and_then(move |head| {\n\n beacon\n\n .get_state_by_root(head.state_root)\n\n .map(|(state, _root)| state)\n\n .map_err(|e| format!(\"Get state root via http failed: {:?}\", e))\n\n })\n", "file_path": "tests/beacon_chain_sim/src/checks.rs", "rank": 44, "score": 187937.80131348872 }, { "content": "/// Helper: returns all the ancestor roots and slots for a given block_root.\n\nfn get_ancestor_roots<U: Store<TestEthSpec>>(\n\n store: Arc<U>,\n\n block_root: Hash256,\n\n) -> Vec<(Hash256, Slot)> {\n\n let block = store\n\n .get::<BeaconBlock<TestEthSpec>>(&block_root)\n\n .expect(\"block should exist\")\n\n .expect(\"store should not error\");\n\n\n\n <BeaconBlock<TestEthSpec> as AncestorIter<_, _, _>>::try_iter_ancestor_roots(&block, store)\n\n .expect(\"should be able to create ancestor iter\")\n\n .collect()\n\n}\n\n\n", "file_path": "eth2/lmd_ghost/tests/test.rs", "rank": 45, "score": 187473.01484893606 }, { "content": "fn get_state<E: EthSpec>(validator_count: usize) -> BeaconState<E> {\n\n let spec = &E::default_spec();\n\n let eth1_data = Eth1Data {\n\n deposit_root: Hash256::zero(),\n\n deposit_count: 0,\n\n block_hash: Hash256::zero(),\n\n };\n\n\n\n let mut state = BeaconState::new(0, eth1_data, spec);\n\n\n\n for i in 0..validator_count {\n\n state.balances.push(i as u64).expect(\"should add balance\");\n\n }\n\n\n\n state.validators = (0..validator_count)\n\n .into_iter()\n\n .collect::<Vec<_>>()\n\n .par_iter()\n\n .map(|&i| Validator {\n\n pubkey: generate_deterministic_keypair(i).pk.into(),\n", "file_path": "eth2/types/benches/benches.rs", "rank": 46, "score": 187311.5613921701 }, { "content": "/// A helper function providing common functionality for finding the Merkle root of some bytes that\n\n/// represent a bitfield.\n\npub fn bitfield_bytes_tree_hash_root<N: Unsigned>(bytes: &[u8]) -> Vec<u8> {\n\n let byte_size = (N::to_usize() + 7) / 8;\n\n let minimum_chunk_count = (byte_size + BYTES_PER_CHUNK - 1) / BYTES_PER_CHUNK;\n\n\n\n merkle_root(bytes, minimum_chunk_count)\n\n}\n", "file_path": "eth2/utils/ssz_types/src/tree_hash.rs", "rank": 47, "score": 186457.5738988692 }, { "content": "/// Gets the fully-initialized global client and eth2 configuration objects.\n\n///\n\n/// The top-level `clap` arguments should be provided as `cli_args`.\n\n///\n\n/// The output of this function depends primarily upon the given `cli_args`, however it's behaviour\n\n/// may be influenced by other external services like the contents of the file system or the\n\n/// response of some remote server.\n\npub fn get_configs<E: EthSpec>(\n\n cli_args: &ArgMatches,\n\n mut eth2_config: Eth2Config,\n\n core_log: Logger,\n\n) -> Result<Config> {\n\n let log = core_log.clone();\n\n\n\n let mut client_config = ClientConfig::default();\n\n\n\n client_config.spec_constants = eth2_config.spec_constants.clone();\n\n\n\n // Read the `--datadir` flag.\n\n //\n\n // If it's not present, try and find the home directory (`~`) and push the default data\n\n // directory onto it.\n\n client_config.data_dir = cli_args\n\n .value_of(\"datadir\")\n\n .map(PathBuf::from)\n\n .or_else(|| dirs::home_dir().map(|home| home.join(\".lighthouse\").join(BEACON_NODE_DIR)))\n\n .unwrap_or_else(|| PathBuf::from(\".\"));\n", "file_path": "beacon_node/src/config.rs", "rank": 48, "score": 186062.68001048203 }, { "content": "/// Helper: returns the slot for some block_root.\n\nfn get_slot_for_block_root(harness: &BeaconChainHarness, block_root: Hash256) -> Slot {\n\n harness\n\n .chain\n\n .store\n\n .get::<BeaconBlock<TestEthSpec>>(&block_root)\n\n .expect(\"head block should exist\")\n\n .expect(\"DB should not error\")\n\n .slot\n\n}\n\n\n\nconst RANDOM_ITERATIONS: usize = 50;\n\nconst RANDOM_ACTIONS_PER_ITERATION: usize = 100;\n\n\n\n/// Create a single LMD instance and have one validator vote in reverse (highest to lowest slot)\n\n/// down the chain.\n", "file_path": "eth2/lmd_ghost/tests/test.rs", "rank": 49, "score": 185284.76637111156 }, { "content": "fn get_store(db_path: &TempDir) -> Arc<DiskStore<E>> {\n\n let spec = E::default_spec();\n\n let hot_path = db_path.path().join(\"hot_db\");\n\n let cold_path = db_path.path().join(\"cold_db\");\n\n let slots_per_restore_point = MinimalEthSpec::slots_per_historical_root() as u64;\n\n let log = NullLoggerBuilder.build().expect(\"logger should build\");\n\n Arc::new(\n\n DiskStore::open(&hot_path, &cold_path, slots_per_restore_point, spec, log)\n\n .expect(\"disk store should initialize\"),\n\n )\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/tests/persistence_tests.rs", "rank": 50, "score": 184468.64805529377 }, { "content": "/// Indicates if the given `state` is in the tail of it's eth1 voting period (i.e., in the later\n\n/// slots).\n\nfn is_period_tail<E: EthSpec>(state: &BeaconState<E>) -> bool {\n\n let slots_per_eth1_voting_period = E::SlotsPerEth1VotingPeriod::to_u64();\n\n let slot = state.slot % slots_per_eth1_voting_period;\n\n\n\n slot >= slots_per_eth1_voting_period.integer_sqrt()\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/src/eth1_chain.rs", "rank": 51, "score": 184309.50468465098 }, { "content": "/// Map a chunk index to bytes that can be used to key the NoSQL database.\n\n///\n\n/// We shift chunks up by 1 to make room for a genesis chunk that is handled separately.\n\npub fn chunk_key(cindex: u64) -> [u8; 8] {\n\n (cindex + 1).to_be_bytes()\n\n}\n\n\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 52, "score": 183527.55813118472 }, { "content": "/// An object capable of storing and retrieving objects implementing `StoreItem`.\n\n///\n\n/// A `Store` is fundamentally backed by a key-value database, however it provides support for\n\n/// columns. A simple column implementation might involve prefixing a key with some bytes unique to\n\n/// each column.\n\npub trait Store<E: EthSpec>: Sync + Send + Sized + 'static {\n\n type ForwardsBlockRootsIterator: Iterator<Item = (Hash256, Slot)>;\n\n\n\n /// Retrieve some bytes in `column` with `key`.\n\n fn get_bytes(&self, column: &str, key: &[u8]) -> Result<Option<Vec<u8>>, Error>;\n\n\n\n /// Store some `value` in `column`, indexed with `key`.\n\n fn put_bytes(&self, column: &str, key: &[u8], value: &[u8]) -> Result<(), Error>;\n\n\n\n /// Return `true` if `key` exists in `column`.\n\n fn key_exists(&self, column: &str, key: &[u8]) -> Result<bool, Error>;\n\n\n\n /// Removes `key` from `column`.\n\n fn key_delete(&self, column: &str, key: &[u8]) -> Result<(), Error>;\n\n\n\n /// Store an item in `Self`.\n\n fn put<I: StoreItem>(&self, key: &Hash256, item: &I) -> Result<(), Error> {\n\n item.db_put(self, key)\n\n }\n\n\n", "file_path": "beacon_node/store/src/lib.rs", "rank": 53, "score": 182811.24046065257 }, { "content": "pub fn check_serialization<T: SszStaticType>(value: &T, serialized: &[u8]) -> Result<(), Error> {\n\n // Check serialization\n\n let serialized_result = value.as_ssz_bytes();\n\n compare_result::<usize, Error>(&Ok(value.ssz_bytes_len()), &Some(serialized.len()))?;\n\n compare_result::<Vec<u8>, Error>(&Ok(serialized_result), &Some(serialized.to_vec()))?;\n\n\n\n // Check deserialization\n\n let deserialized_result = T::from_ssz_bytes(serialized);\n\n compare_result(&deserialized_result, &Some(value.clone()))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/ef_tests/src/cases/ssz_static.rs", "rank": 54, "score": 182135.30149598047 }, { "content": "/// Returns true if there are enough eth1 votes in the given `state` to have updated\n\n/// `state.eth1_data`.\n\nfn eth1_data_change_is_possible<E: EthSpec>(state: &BeaconState<E>) -> bool {\n\n 2 * state.eth1_data_votes.len() > E::SlotsPerEth1VotingPeriod::to_usize()\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/src/eth1_chain.rs", "rank": 55, "score": 181501.27578298532 }, { "content": "/// Scrape the given `state` assuming it's the head state, updating the `DEFAULT_REGISTRY`.\n\nfn scrape_head_state<T: BeaconChainTypes>(state: &BeaconState<T::EthSpec>, state_root: Hash256) {\n\n set_gauge_by_slot(&HEAD_STATE_SLOT, state.slot);\n\n set_gauge_by_hash(&HEAD_STATE_ROOT, state_root);\n\n set_gauge_by_slot(\n\n &HEAD_STATE_LATEST_BLOCK_SLOT,\n\n state.latest_block_header.slot,\n\n );\n\n set_gauge_by_hash(\n\n &HEAD_STATE_CURRENT_JUSTIFIED_ROOT,\n\n state.current_justified_checkpoint.root,\n\n );\n\n set_gauge_by_epoch(\n\n &HEAD_STATE_CURRENT_JUSTIFIED_EPOCH,\n\n state.current_justified_checkpoint.epoch,\n\n );\n\n set_gauge_by_hash(\n\n &HEAD_STATE_PREVIOUS_JUSTIFIED_ROOT,\n\n state.previous_justified_checkpoint.root,\n\n );\n\n set_gauge_by_epoch(\n", "file_path": "beacon_node/beacon_chain/src/metrics.rs", "rank": 56, "score": 181208.02671087705 }, { "content": "/// Checks that the chain has made the first possible finalization.\n\n///\n\n/// Intended to be run as soon as chain starts.\n\npub fn verify_first_finalization<E: EthSpec>(\n\n network: LocalNetwork<E>,\n\n slot_duration: Duration,\n\n) -> impl Future<Item = (), Error = String> {\n\n epoch_delay(Epoch::new(4), slot_duration, E::slots_per_epoch())\n\n .and_then(|()| verify_all_finalized_at(network, Epoch::new(2)))\n\n}\n\n\n", "file_path": "tests/beacon_chain_sim/src/checks.rs", "rank": 57, "score": 180323.68966353752 }, { "content": "/// Checks that all of the validators have on-boarded by the start of the second eth1 voting\n\n/// period.\n\npub fn verify_validator_onboarding<E: EthSpec>(\n\n network: LocalNetwork<E>,\n\n slot_duration: Duration,\n\n expected_validator_count: usize,\n\n) -> impl Future<Item = (), Error = String> {\n\n slot_delay(\n\n Slot::new(E::SlotsPerEth1VotingPeriod::to_u64()),\n\n slot_duration,\n\n )\n\n .and_then(move |()| verify_validator_count(network, expected_validator_count))\n\n}\n\n\n", "file_path": "tests/beacon_chain_sim/src/checks.rs", "rank": 58, "score": 180323.68966353752 }, { "content": "/// Checks that all of the validators have on-boarded by the start of the second eth1 voting\n\n/// period.\n\npub fn verify_initial_validator_count<E: EthSpec>(\n\n network: LocalNetwork<E>,\n\n slot_duration: Duration,\n\n initial_validator_count: usize,\n\n) -> impl Future<Item = (), Error = String> {\n\n slot_delay(Slot::new(1), slot_duration)\n\n .and_then(move |()| verify_validator_count(network, initial_validator_count))\n\n}\n\n\n", "file_path": "tests/beacon_chain_sim/src/checks.rs", "rank": 59, "score": 178571.56603113882 }, { "content": "fn build_node<E: EthSpec>(env: &mut Environment<E>, config: ClientConfig) -> LocalBeaconNode<E> {\n\n let context = env.core_context();\n\n env.runtime()\n\n .block_on(LocalBeaconNode::production(context, config))\n\n .expect(\"should block until node created\")\n\n}\n\n\n", "file_path": "beacon_node/rest_api/tests/test.rs", "rank": 60, "score": 177920.8503802575 }, { "content": "pub fn graffiti_from_hex_str<'de, D>(deserializer: D) -> Result<[u8; GRAFFITI_BYTES_LEN], D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let s: String = Deserialize::deserialize(deserializer)?;\n\n let mut array = [0 as u8; GRAFFITI_BYTES_LEN];\n\n\n\n let start = s\n\n .as_str()\n\n .get(2..)\n\n .ok_or_else(|| D::Error::custom(\"string length too small\"))?;\n\n let decoded: Vec<u8> = hex::decode(&start).map_err(D::Error::custom)?;\n\n\n\n if decoded.len() > GRAFFITI_BYTES_LEN {\n\n return Err(D::Error::custom(\"Fork length too long\"));\n\n }\n\n\n\n for (i, item) in array.iter_mut().enumerate() {\n\n if i > decoded.len() {\n\n break;\n\n }\n\n *item = decoded[i];\n\n }\n\n Ok(array)\n\n}\n", "file_path": "eth2/types/src/utils/serde_utils.rs", "rank": 61, "score": 177184.87128035168 }, { "content": "pub fn fork_from_hex_str<'de, D>(deserializer: D) -> Result<[u8; FORK_BYTES_LEN], D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let s: String = Deserialize::deserialize(deserializer)?;\n\n let mut array = [0 as u8; FORK_BYTES_LEN];\n\n\n\n let start = s\n\n .as_str()\n\n .get(2..)\n\n .ok_or_else(|| D::Error::custom(\"string length too small\"))?;\n\n let decoded: Vec<u8> = hex::decode(&start).map_err(D::Error::custom)?;\n\n\n\n if decoded.len() != FORK_BYTES_LEN {\n\n return Err(D::Error::custom(\"Fork length too long\"));\n\n }\n\n\n\n for (i, item) in array.iter_mut().enumerate() {\n\n if i > decoded.len() {\n\n break;\n\n }\n\n *item = decoded[i];\n\n }\n\n Ok(array)\n\n}\n\n\n", "file_path": "eth2/types/src/utils/serde_utils.rs", "rank": 62, "score": 177184.87128035168 }, { "content": "// Note: the `PartialEq` bound is only required for testing. If it becomes a serious annoyance we\n\n// can remove it.\n\npub trait LmdGhost<S: Store<E>, E: EthSpec>: PartialEq + Send + Sync + Sized {\n\n /// Create a new instance, with the given `store` and `finalized_root`.\n\n fn new(store: Arc<S>, finalized_block: &BeaconBlock<E>, finalized_root: Hash256) -> Self;\n\n\n\n /// Process an attestation message from some validator that attests to some `block_hash`\n\n /// representing a block at some `block_slot`.\n\n fn process_attestation(\n\n &self,\n\n validator_index: usize,\n\n block_hash: Hash256,\n\n block_slot: Slot,\n\n ) -> Result<()>;\n\n\n\n /// Process a block that was seen on the network.\n\n fn process_block(&self, block: &BeaconBlock<E>, block_hash: Hash256) -> Result<()>;\n\n\n\n /// Returns the head of the chain, starting the search at `start_block_root` and moving upwards\n\n /// (in block height).\n\n fn find_head<F>(\n\n &self,\n", "file_path": "eth2/lmd_ghost/src/lib.rs", "rank": 63, "score": 175972.0684447392 }, { "content": "/// Remove all entries from the given hash map for which `prune_if` returns true.\n\n///\n\n/// The keys in the map should be validator indices, which will be looked up\n\n/// in the state's validator registry and then passed to `prune_if`.\n\n/// Entries for unknown validators will be kept.\n\nfn prune_validator_hash_map<T, F, E: EthSpec>(\n\n map: &mut HashMap<u64, T>,\n\n prune_if: F,\n\n finalized_state: &BeaconState<E>,\n\n) where\n\n F: Fn(&Validator) -> bool,\n\n{\n\n map.retain(|&validator_index, _| {\n\n finalized_state\n\n .validators\n\n .get(validator_index as usize)\n\n .map_or(true, |validator| !prune_if(validator))\n\n });\n\n}\n\n\n\n/// Compare two operation pools.\n\nimpl<T: EthSpec + Default> PartialEq for OperationPool<T> {\n\n fn eq(&self, other: &Self) -> bool {\n\n *self.attestations.read() == *other.attestations.read()\n\n && *self.deposits.read() == *other.deposits.read()\n", "file_path": "eth2/operation_pool/src/lib.rs", "rank": 64, "score": 173403.0864646065 }, { "content": "/// Load a `BeaconState` from the given `path`. The file should contain raw SSZ bytes (i.e., no\n\n/// ASCII encoding or schema).\n\npub fn state_from_ssz_file<E: EthSpec>(path: PathBuf) -> Result<BeaconState<E>, String> {\n\n File::open(path.clone())\n\n .map_err(move |e| format!(\"Unable to open SSZ genesis state file {:?}: {:?}\", path, e))\n\n .and_then(|mut file| {\n\n let mut bytes = vec![];\n\n file.read_to_end(&mut bytes)\n\n .map_err(|e| format!(\"Failed to read SSZ file: {:?}\", e))?;\n\n Ok(bytes)\n\n })\n\n .and_then(|bytes| {\n\n BeaconState::from_ssz_bytes(&bytes)\n\n .map_err(|e| format!(\"Unable to parse SSZ genesis state file: {:?}\", e))\n\n })\n\n}\n", "file_path": "beacon_node/genesis/src/lib.rs", "rank": 65, "score": 169840.85800053185 }, { "content": "/// Marker trait for fixed-length fields (`FixedVector<T, N>`).\n\npub trait FixedLengthField<E: EthSpec>: Field<E> {}\n\n\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 66, "score": 168495.08302279888 }, { "content": "/// Marker trait for variable-length fields (`VariableList<T, N>`).\n\npub trait VariableLengthField<E: EthSpec>: Field<E> {}\n\n\n\n/// Macro to implement the `Field` trait on a new unit struct type.\n\nmacro_rules! field {\n\n ($struct_name:ident, $marker_trait:ident, $value_ty:ty, $length_ty:ty, $column:expr,\n\n $update_pattern:expr, $get_value:expr) => {\n\n #[derive(Clone, Copy)]\n\n pub struct $struct_name;\n\n\n\n impl<T> Field<T> for $struct_name\n\n where\n\n T: EthSpec,\n\n {\n\n type Value = $value_ty;\n\n type Length = $length_ty;\n\n\n\n fn column() -> DBColumn {\n\n $column\n\n }\n\n\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 67, "score": 168495.08302279888 }, { "content": "fn hash_with_round(seed: &[u8], round: u8) -> Vec<u8> {\n\n let mut seed = seed.to_vec();\n\n seed.append(&mut int_to_bytes1(round));\n\n hash(&seed[..])\n\n}\n\n\n", "file_path": "eth2/utils/swap_or_not_shuffle/src/compute_shuffled_index.rs", "rank": 68, "score": 168181.23175597427 }, { "content": "#[allow(clippy::trivially_copy_pass_by_ref)]\n\npub fn fork_to_hex_str<S>(bytes: &[u8; FORK_BYTES_LEN], serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let mut hex_string: String = \"0x\".to_string();\n\n hex_string.push_str(&hex::encode(&bytes));\n\n\n\n serializer.serialize_str(&hex_string)\n\n}\n\n\n", "file_path": "eth2/types/src/utils/serde_utils.rs", "rank": 69, "score": 168157.81965096935 }, { "content": "/// Compute the hash of two slices concatenated.\n\npub fn hash_concat(h1: &[u8], h2: &[u8]) -> Vec<u8> {\n\n let mut vec1 = h1.to_vec();\n\n vec1.extend_from_slice(h2);\n\n hash(&vec1)\n\n}\n\n\n\n/// The max index that can be used with `ZERO_HASHES`.\n\n#[cfg(feature = \"zero_hash_cache\")]\n\npub const ZERO_HASHES_MAX_INDEX: usize = 48;\n\n\n\n#[cfg(feature = \"zero_hash_cache\")]\n\nlazy_static! {\n\n /// Cached zero hashes where `ZERO_HASHES[i]` is the hash of a Merkle tree with 2^i zero leaves.\n\n pub static ref ZERO_HASHES: Vec<Vec<u8>> = {\n\n let mut hashes = vec![vec![0; 32]; ZERO_HASHES_MAX_INDEX + 1];\n\n\n\n for i in 0..ZERO_HASHES_MAX_INDEX {\n\n hashes[i + 1] = hash_concat(&hashes[i], &hashes[i]);\n\n }\n\n\n", "file_path": "eth2/utils/eth2_hashing/src/lib.rs", "rank": 70, "score": 167429.92666753626 }, { "content": "/// Returns the digest of `input`.\n\n///\n\n/// Uses `ring::digest::SHA256`.\n\npub fn hash(input: &[u8]) -> Vec<u8> {\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n let h = digest(&SHA256, input).as_ref().into();\n\n\n\n #[cfg(target_arch = \"wasm32\")]\n\n let h = Sha256::digest(input).as_ref().into();\n\n\n\n h\n\n}\n\n\n", "file_path": "eth2/utils/eth2_hashing/src/lib.rs", "rank": 71, "score": 164692.13682510413 }, { "content": "/// A helper function providing common functionality between the `TreeHash` implementations for\n\n/// `FixedVector` and `VariableList`.\n\npub fn vec_tree_hash_root<T, N>(vec: &[T]) -> Vec<u8>\n\nwhere\n\n T: TreeHash,\n\n N: Unsigned,\n\n{\n\n let (leaves, minimum_chunk_count) = match T::tree_hash_type() {\n\n TreeHashType::Basic => {\n\n let mut leaves =\n\n Vec::with_capacity((BYTES_PER_CHUNK / T::tree_hash_packing_factor()) * vec.len());\n\n\n\n for item in vec {\n\n leaves.append(&mut item.tree_hash_packed_encoding());\n\n }\n\n\n\n let values_per_chunk = T::tree_hash_packing_factor();\n\n let minimum_chunk_count = (N::to_usize() + values_per_chunk - 1) / values_per_chunk;\n\n\n\n (leaves, minimum_chunk_count)\n\n }\n\n TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {\n", "file_path": "eth2/utils/ssz_types/src/tree_hash.rs", "rank": 72, "score": 163837.81886890344 }, { "content": "/// Returns `int` as little-endian bytes with a length of 1.\n\npub fn int_to_bytes1(int: u8) -> Vec<u8> {\n\n vec![int]\n\n}\n\n\n", "file_path": "eth2/utils/int_to_bytes/src/lib.rs", "rank": 73, "score": 162989.52836274108 }, { "content": "/// Same as `compare_result_detailed`, however it drops the caches on both states before\n\n/// comparison.\n\npub fn compare_beacon_state_results_without_caches<T: EthSpec, E: Debug>(\n\n result: &mut Result<BeaconState<T>, E>,\n\n expected: &mut Option<BeaconState<T>>,\n\n) -> Result<(), Error> {\n\n if let (Ok(ref mut result), Some(ref mut expected)) = (result.as_mut(), expected.as_mut()) {\n\n result.drop_all_caches();\n\n expected.drop_all_caches();\n\n }\n\n\n\n compare_result_detailed(&result, &expected)\n\n}\n\n\n", "file_path": "tests/ef_tests/src/case_result.rs", "rank": 74, "score": 162428.6959373779 }, { "content": "/// Merkleizes bytes and returns the root, using a simple algorithm that does not optimize to avoid\n\n/// processing or storing padding bytes.\n\n///\n\n/// The input `bytes` will be padded to ensure that the number of leaves is a power-of-two.\n\n///\n\n/// It is likely a better choice to use [merkleize_padded](fn.merkleize_padded.html) instead.\n\n///\n\n/// ## CPU Performance\n\n///\n\n/// Will hash all nodes in the tree, even if they are padding and pre-determined.\n\n///\n\n/// ## Memory Performance\n\n///\n\n/// - Duplicates the input `bytes`.\n\n/// - Stores all internal nodes, even if they are padding.\n\n/// - Does not free up unused memory during operation.\n\npub fn merkleize_standard(bytes: &[u8]) -> Vec<u8> {\n\n // If the bytes are just one chunk (or less than one chunk) just return them.\n\n if bytes.len() <= HASHSIZE {\n\n let mut o = bytes.to_vec();\n\n o.resize(HASHSIZE, 0);\n\n return o;\n\n }\n\n\n\n let leaves = num_sanitized_leaves(bytes.len());\n\n let nodes = num_nodes(leaves);\n\n let internal_nodes = nodes - leaves;\n\n\n\n let num_bytes = std::cmp::max(internal_nodes, 1) * HASHSIZE + bytes.len();\n\n\n\n let mut o: Vec<u8> = vec![0; internal_nodes * HASHSIZE];\n\n\n\n o.append(&mut bytes.to_vec());\n\n\n\n assert_eq!(o.len(), num_bytes);\n\n\n", "file_path": "eth2/utils/tree_hash/src/merkleize_standard.rs", "rank": 75, "score": 161369.7667667573 }, { "content": "/// Returns `state.eth1_data.block_hash` at the start of eth1 voting period defined by\n\n/// `state.slot`.\n\nfn eth1_block_hash_at_start_of_voting_period<T: EthSpec, S: Store<T>>(\n\n store: Arc<S>,\n\n state: &BeaconState<T>,\n\n) -> Result<Hash256, Error> {\n\n let period = T::SlotsPerEth1VotingPeriod::to_u64();\n\n\n\n if !eth1_data_change_is_possible(state) {\n\n // If there are less than 50% of the votes in the current state, it's impossible that the\n\n // `eth1_data.block_hash` has changed from the value at `state.eth1_data.block_hash`.\n\n Ok(state.eth1_data.block_hash)\n\n } else {\n\n // If there have been more than 50% of votes in this period it's possible (but not\n\n // necessary) that the `state.eth1_data.block_hash` has been changed since the start of the\n\n // voting period.\n\n let slot = (state.slot / period) * period;\n\n let prev_state_root = state\n\n .get_state_root(slot)\n\n .map_err(Error::UnableToGetPreviousStateRoot)?;\n\n\n\n store\n\n .get_state(&prev_state_root, Some(slot))\n\n .map_err(Error::StoreError)?\n\n .map(|state| state.eth1_data.block_hash)\n\n .ok_or_else(|| Error::PreviousStateNotInDB(*prev_state_root))\n\n }\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/src/eth1_chain.rs", "rank": 76, "score": 161016.1576746496 }, { "content": "fn hash_with_round_and_position(seed: &[u8], round: u8, position: usize) -> Option<Vec<u8>> {\n\n let mut seed = seed.to_vec();\n\n seed.append(&mut int_to_bytes1(round));\n\n /*\n\n * Note: the specification has an implicit assertion in `int_to_bytes4` that `position / 256 <\n\n * 2**24`. For efficiency, we do not check for that here as it is checked in `compute_shuffled_index`.\n\n */\n\n seed.append(&mut int_to_bytes4((position / 256) as u32));\n\n Some(hash(&seed[..]))\n\n}\n\n\n", "file_path": "eth2/utils/swap_or_not_shuffle/src/compute_shuffled_index.rs", "rank": 77, "score": 156861.2087727664 }, { "content": "fn build_env() -> Environment<E> {\n\n EnvironmentBuilder::minimal()\n\n .null_logger()\n\n .expect(\"should build env logger\")\n\n .single_thread_tokio_runtime()\n\n .expect(\"should start tokio runtime\")\n\n .build()\n\n .expect(\"environment should build\")\n\n}\n\n\n", "file_path": "beacon_node/rest_api/tests/test.rs", "rank": 78, "score": 154702.4616562656 }, { "content": "/// Trait for types representing fields of the `BeaconState`.\n\n///\n\n/// All of the required methods are type-level, because we do most things with fields at the\n\n/// type-level. We require their value-level witnesses to be `Copy` so that we can avoid the\n\n/// turbofish when calling functions like `store_updated_vector`.\n\npub trait Field<E: EthSpec>: Copy {\n\n /// The type of value stored in this field: the `T` from `FixedVector<T, N>`.\n\n ///\n\n /// The `Default` impl will be used to fill extra vector entries.\n\n type Value: Decode + Encode + Default + Clone + PartialEq + std::fmt::Debug;\n\n\n\n /// The length of this field: the `N` from `FixedVector<T, N>`.\n\n type Length: Unsigned;\n\n\n\n /// The database column where the integer-indexed chunks for this field should be stored.\n\n ///\n\n /// Each field's column **must** be unique.\n\n fn column() -> DBColumn;\n\n\n\n /// Update pattern for this field, so that we can do differential updates.\n\n fn update_pattern(spec: &ChainSpec) -> UpdatePattern;\n\n\n\n /// The number of values to store per chunk on disk.\n\n ///\n\n /// Default is 128 so that we read/write 4K pages when the values are 32 bytes.\n", "file_path": "beacon_node/store/src/chunked_vector.rs", "rank": 79, "score": 152906.9898231268 }, { "content": "/// Merkleize `bytes` and return the root, optionally padding the tree out to `min_leaves` number of\n\n/// leaves.\n\n///\n\n/// First all nodes are extracted from `bytes` and then a padding node is added until the number of\n\n/// leaf chunks is greater than or equal to `min_leaves`. Callers may set `min_leaves` to `0` if no\n\n/// adding additional chunks should be added to the given `bytes`.\n\n///\n\n/// If `bytes.len() <= BYTES_PER_CHUNK`, no hashing is done and `bytes` is returned, potentially\n\n/// padded out to `BYTES_PER_CHUNK` length with `0`.\n\n///\n\n/// ## CPU Performance\n\n///\n\n/// A cache of `MAX_TREE_DEPTH` hashes are stored to avoid re-computing the hashes of padding nodes\n\n/// (or their parents). Therefore, adding padding nodes only incurs one more hash per additional\n\n/// height of the tree.\n\n///\n\n/// ## Memory Performance\n\n///\n\n/// This algorithm has two interesting memory usage properties:\n\n///\n\n/// 1. The maximum memory footprint is roughly `O(V / 2)` memory, where `V` is the number of leaf\n\n/// chunks with values (i.e., leaves that are not padding). The means adding padding nodes to\n\n/// the tree does not increase the memory footprint.\n\n/// 2. At each height of the tree half of the memory is freed until only a single chunk is stored.\n\n/// 3. The input `bytes` are not copied into another list before processing.\n\n///\n\n/// _Note: there are some minor memory overheads, including a handful of usizes and a list of\n\n/// `MAX_TREE_DEPTH` hashes as `lazy_static` constants._\n\npub fn merkleize_padded(bytes: &[u8], min_leaves: usize) -> Vec<u8> {\n\n // If the bytes are just one chunk or less, pad to one chunk and return without hashing.\n\n if bytes.len() <= BYTES_PER_CHUNK && min_leaves <= 1 {\n\n let mut o = bytes.to_vec();\n\n o.resize(BYTES_PER_CHUNK, 0);\n\n return o;\n\n }\n\n\n\n assert!(\n\n bytes.len() > BYTES_PER_CHUNK || min_leaves > 1,\n\n \"Merkle hashing only needs to happen if there is more than one chunk\"\n\n );\n\n\n\n // The number of leaves that can be made directly from `bytes`.\n\n let leaves_with_values = (bytes.len() + (BYTES_PER_CHUNK - 1)) / BYTES_PER_CHUNK;\n\n\n\n // The number of parents that have at least one non-padding leaf.\n\n //\n\n // Since there is more than one node in this tree (see prior assertion), there should always be\n\n // one or more initial parent nodes.\n", "file_path": "eth2/utils/tree_hash/src/merkleize_padded.rs", "rank": 80, "score": 151900.4402354604 }, { "content": "/// Returns the withdrawal credentials for a given public key.\n\npub fn get_withdrawal_credentials(pubkey: &PublicKey, prefix_byte: u8) -> Vec<u8> {\n\n let hashed = hash(&ssz_encode(pubkey));\n\n let mut prefixed = vec![prefix_byte];\n\n prefixed.extend_from_slice(&hashed[1..]);\n\n\n\n prefixed\n\n}\n\n\n", "file_path": "eth2/utils/bls/src/lib.rs", "rank": 81, "score": 151894.49318120902 }, { "content": "type FnMapErr<TSocket> = fn(timeout::Error<(RPCError, InboundFramed<TSocket>)>) -> RPCError;\n\n\n\nimpl<TSocket> InboundUpgrade<TSocket> for RPCProtocol\n\nwhere\n\n TSocket: AsyncRead + AsyncWrite,\n\n{\n\n type Output = InboundOutput<TSocket>;\n\n type Error = RPCError;\n\n\n\n type Future = future::AndThen<\n\n future::MapErr<\n\n timeout::Timeout<stream::StreamFuture<InboundFramed<TSocket>>>,\n\n FnMapErr<TSocket>,\n\n >,\n\n FutureResult<InboundOutput<TSocket>, RPCError>,\n\n FnAndThen<TSocket>,\n\n >;\n\n\n\n fn upgrade_inbound(\n\n self,\n", "file_path": "beacon_node/eth2-libp2p/src/rpc/protocol.rs", "rank": 82, "score": 151809.9526640515 }, { "content": "fn error(reason: Invalid) -> BlockOperationError<Invalid> {\n\n BlockOperationError::invalid(reason)\n\n}\n\n\n", "file_path": "eth2/state_processing/src/per_block_processing/verify_attestation.rs", "rank": 83, "score": 150534.19483741923 }, { "content": "type DBHashMap = HashMap<Vec<u8>, Vec<u8>>;\n\n\n\n/// A thread-safe `HashMap` wrapper.\n\npub struct MemoryStore<E: EthSpec> {\n\n db: RwLock<DBHashMap>,\n\n _phantom: PhantomData<E>,\n\n}\n\n\n\nimpl<E: EthSpec> Clone for MemoryStore<E> {\n\n fn clone(&self) -> Self {\n\n Self {\n\n db: RwLock::new(self.db.read().clone()),\n\n _phantom: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<E: EthSpec> MemoryStore<E> {\n\n /// Create a new, empty database.\n\n pub fn open() -> Self {\n", "file_path": "beacon_node/store/src/memory_store.rs", "rank": 84, "score": 150274.5683142312 }, { "content": "fn cache_state<T: EthSpec>(state: &mut BeaconState<T>) -> Result<(), Error> {\n\n let previous_state_root = state.update_tree_hash_cache()?;\n\n\n\n // Note: increment the state slot here to allow use of our `state_root` and `block_root`\n\n // getter/setter functions.\n\n //\n\n // This is a bit hacky, however it gets the job safely without lots of code.\n\n let previous_slot = state.slot;\n\n state.slot += 1;\n\n\n\n // Store the previous slot's post state transition root.\n\n state.set_state_root(previous_slot, previous_state_root)?;\n\n\n\n // Cache latest block header state root\n\n if state.latest_block_header.state_root == Hash256::zero() {\n\n state.latest_block_header.state_root = previous_state_root;\n\n }\n\n\n\n // Cache block root\n\n let latest_block_root = state.latest_block_header.canonical_root();\n", "file_path": "eth2/state_processing/src/per_slot_processing.rs", "rank": 85, "score": 149563.28815836582 }, { "content": "fn error(reason: Invalid) -> BlockOperationError<Invalid> {\n\n BlockOperationError::invalid(reason)\n\n}\n\n\n", "file_path": "eth2/state_processing/src/per_block_processing/verify_proposer_slashing.rs", "rank": 86, "score": 149059.7679057028 }, { "content": "fn error(reason: Invalid) -> BlockOperationError<Invalid> {\n\n BlockOperationError::invalid(reason)\n\n}\n\n\n", "file_path": "eth2/state_processing/src/per_block_processing/verify_attester_slashing.rs", "rank": 87, "score": 149059.7679057028 }, { "content": "fn error(reason: Invalid) -> BlockOperationError<Invalid> {\n\n BlockOperationError::invalid(reason)\n\n}\n\n\n", "file_path": "eth2/state_processing/src/per_block_processing/is_valid_indexed_attestation.rs", "rank": 88, "score": 149059.7679057028 }, { "content": "fn error(reason: ExitInvalid) -> BlockOperationError<ExitInvalid> {\n\n BlockOperationError::invalid(reason)\n\n}\n\n\n", "file_path": "eth2/state_processing/src/per_block_processing/verify_exit.rs", "rank": 89, "score": 147633.7241844416 }, { "content": "fn error(reason: DepositInvalid) -> BlockOperationError<DepositInvalid> {\n\n BlockOperationError::invalid(reason)\n\n}\n\n\n", "file_path": "eth2/state_processing/src/per_block_processing/verify_deposit.rs", "rank": 90, "score": 147633.7241844416 }, { "content": "/// Same as `compare_result`, however utilizes the `CompareFields` trait to give a list of\n\n/// mismatching fields when `Ok(result) != Some(expected)`.\n\npub fn compare_result_detailed<T, E>(\n\n result: &Result<T, E>,\n\n expected: &Option<T>,\n\n) -> Result<(), Error>\n\nwhere\n\n T: PartialEq<T> + Debug + CompareFields,\n\n E: Debug,\n\n{\n\n match (result, expected) {\n\n (Ok(result), Some(expected)) => {\n\n let mut mismatching_fields: Vec<Comparison> = expected\n\n .compare_fields(result)\n\n .into_iter()\n\n // Filter all out all fields that are equal.\n\n .filter(Comparison::not_equal)\n\n .collect();\n\n\n\n mismatching_fields\n\n .iter_mut()\n\n .for_each(|f| f.retain_children(FieldComparison::not_equal));\n", "file_path": "tests/ef_tests/src/case_result.rs", "rank": 91, "score": 147098.41767954407 }, { "content": "fn bytes_to_int64(slice: &[u8]) -> u64 {\n\n let mut bytes = [0; 8];\n\n bytes.copy_from_slice(&slice[0..8]);\n\n u64::from_le_bytes(bytes)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn returns_none_for_zero_length_list() {\n\n assert_eq!(None, shuffle_list(vec![], 90, &[42, 42], true));\n\n }\n\n}\n", "file_path": "eth2/utils/swap_or_not_shuffle/src/shuffle_list.rs", "rank": 92, "score": 147027.02221312135 }, { "content": "fn bytes_to_int64(slice: &[u8]) -> u64 {\n\n let mut bytes = [0; 8];\n\n bytes.copy_from_slice(&slice[0..8]);\n\n u64::from_le_bytes(bytes)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use ethereum_types::H256 as Hash256;\n\n\n\n #[test]\n\n #[ignore]\n\n fn fuzz_test() {\n\n let max_list_size = 2_usize.pow(24);\n\n let test_runs = 1000;\n\n\n\n // Test at max list_size with the end index.\n\n for _ in 0..test_runs {\n\n let index = max_list_size - 1;\n", "file_path": "eth2/utils/swap_or_not_shuffle/src/compute_shuffled_index.rs", "rank": 93, "score": 145198.2404586883 }, { "content": "// Converts a vector to u64 (from little endian)\n\nfn bytes_to_u64(array: &[u8]) -> u64 {\n\n let mut bytes = [0u8; 8];\n\n bytes.copy_from_slice(array);\n\n u64::from_le_bytes(bytes)\n\n}\n\n\n", "file_path": "tests/ef_tests/src/cases/bls_sign_msg.rs", "rank": 94, "score": 145198.2404586883 }, { "content": "// Converts a vector to u64 (from little endian)\n\nfn bytes_to_u64(array: &[u8]) -> u64 {\n\n let mut bytes = [0u8; 8];\n\n bytes.copy_from_slice(array);\n\n u64::from_le_bytes(bytes)\n\n}\n", "file_path": "tests/ef_tests/src/cases/bls_g2_compressed.rs", "rank": 95, "score": 145198.2404586883 }, { "content": "// Converts a vector to u64 (from big endian)\n\nfn bytes_to_u64(array: &[u8]) -> u64 {\n\n let mut result: u64 = 0;\n\n for (i, value) in array.iter().rev().enumerate() {\n\n if i == 8 {\n\n break;\n\n }\n\n result += u64::pow(2, i as u32 * 8) * u64::from(*value);\n\n }\n\n result\n\n}\n", "file_path": "tests/ef_tests/src/cases/bls_g2_uncompressed.rs", "rank": 96, "score": 145198.2404586883 }, { "content": "/// Returns `int` as little-endian bytes with a length of 32.\n\nfn int_to_bytes32(int: u64) -> Vec<u8> {\n\n let mut vec = int.to_le_bytes().to_vec();\n\n vec.resize(32, 0);\n\n vec\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn bool() {\n\n let mut true_bytes: Vec<u8> = vec![1];\n\n true_bytes.append(&mut vec![0; 31]);\n\n\n\n let false_bytes: Vec<u8> = vec![0; 32];\n\n\n\n assert_eq!(true.tree_hash_root(), true_bytes);\n\n assert_eq!(false.tree_hash_root(), false_bytes);\n\n }\n", "file_path": "eth2/utils/tree_hash/src/impls.rs", "rank": 97, "score": 143729.8623071877 }, { "content": "/// Returns `int` as little-endian bytes with a length of 32.\n\nfn int_to_bytes32(int: usize) -> Vec<u8> {\n\n let mut vec = int.to_le_bytes().to_vec();\n\n vec.resize(32, 0);\n\n vec\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n use crate::deposit_log::tests::EXAMPLE_LOG;\n\n use crate::http::Log;\n\n\n\n pub const TREE_DEPTH: usize = 32;\n\n\n\n fn example_log() -> DepositLog {\n\n let log = Log {\n\n block_number: 42,\n\n data: EXAMPLE_LOG.to_vec(),\n\n };\n\n DepositLog::from_log(&log).expect(\"should decode log\")\n", "file_path": "beacon_node/eth1/src/deposit_cache.rs", "rank": 98, "score": 143729.8623071877 }, { "content": "/// Returns `true` if a `PendingAttestation` and `BeaconState` share the same beacon block hash for\n\n/// the current slot of the `PendingAttestation`.\n\n///\n\n/// Spec v0.9.1\n\nfn has_common_beacon_block_root<T: EthSpec>(\n\n a: &PendingAttestation<T>,\n\n state: &BeaconState<T>,\n\n) -> Result<bool, BeaconStateError> {\n\n let state_block_root = *state.get_block_root(a.data.slot)?;\n\n\n\n Ok(a.data.beacon_block_root == state_block_root)\n\n}\n", "file_path": "eth2/state_processing/src/per_epoch_processing/validator_statuses.rs", "rank": 99, "score": 143164.05031659995 } ]
Rust
src/protocol/spdm/get_caps.rs
capoferro/manticore
64a4fb3089133d1543849bfdc44ccb8855ba0b7e
use core::mem; use core::time::Duration; use enumflags2::bitflags; use enumflags2::BitFlags; use crate::io::ReadInt as _; use crate::protocol::spdm; use crate::protocol::spdm::CommandType; protocol_struct! { type GetCaps; const TYPE: CommandType = GetCaps; #![fuzz_derives_if = any()] struct Request { pub crypto_timeout: Duration, #[cfg_attr(feature = "serde", serde(with = "crate::serde::bitflags"))] pub caps: BitFlags<Caps>, pub max_packet_size: u32, pub max_message_size: u32, } fn Request::from_wire(r, a) { spdm::expect_zeros(r, 3)?; let ct_exp = r.read_le::<u8>()?; let crypto_timeout = Duration::from_micros(1u64 << ct_exp); spdm::expect_zeros(r, 2)?; let caps = BitFlags::<Caps>::from_wire(r, a)?; let max_packet_size = r.read_le::<u32>()?; let max_message_size = r.read_le::<u32>()?; Ok(Self { crypto_timeout, caps, max_packet_size, max_message_size }) } fn Request::to_wire(&self, w) { spdm::write_zeros(&mut w, 3)?; let ct_micros = self.crypto_timeout.as_micros(); if !ct_micros.is_power_of_two() { return Err(wire::Error::OutOfRange); } let ct_exp = 8 * mem::size_of_val(&ct_micros) as u32 - ct_micros.leading_zeros() - 1; w.write_le(ct_exp as u8)?; spdm::write_zeros(&mut w, 2)?; self.caps.to_wire(&mut w)?; w.write_le(self.max_packet_size)?; w.write_le(self.max_message_size)?; Ok(()) } #![fuzz_derives_if = any()] struct Response { pub crypto_timeout: Duration, #[cfg_attr(feature = "serde", serde(with = "crate::serde::bitflags"))] pub caps: BitFlags<Caps>, pub max_packet_size: u32, pub max_message_size: u32, } fn Response::from_wire(r, a) { spdm::expect_zeros(r, 3)?; let ct_exp = r.read_le::<u8>()?; let crypto_timeout = Duration::from_micros(1u64 << ct_exp); spdm::expect_zeros(r, 2)?; let caps = BitFlags::<Caps>::from_wire(r, a)?; let max_packet_size = r.read_le::<u32>()?; let max_message_size = r.read_le::<u32>()?; Ok(Self { crypto_timeout, caps, max_packet_size, max_message_size }) } fn Response::to_wire(&self, w) { spdm::write_zeros(&mut w, 3)?; let ct_micros = self.crypto_timeout.as_micros(); if !ct_micros.is_power_of_two() { return Err(wire::Error::OutOfRange); } let ct_exp = 8 * mem::size_of_val(&ct_micros) as u32 - ct_micros.leading_zeros() - 1; w.write_le(ct_exp as u8)?; spdm::write_zeros(&mut w, 2)?; self.caps.to_wire(&mut w)?; w.write_le(self.max_packet_size)?; w.write_le(self.max_message_size)?; Ok(()) } } #[cfg(feature = "arbitrary-derive")] use { crate::protocol::arbitrary_bitflags, libfuzzer_sys::arbitrary::{self, Arbitrary, Unstructured}, }; #[cfg(feature = "arbitrary-derive")] impl Arbitrary for GetCapsRequest { fn arbitrary(u: &mut Unstructured) -> arbitrary::Result<Self> { Ok(Self { crypto_timeout: u.arbitrary()?, caps: arbitrary_bitflags(u)?, max_packet_size: u.arbitrary()?, max_message_size: u.arbitrary()?, }) } fn size_hint(_depth: usize) -> (usize, Option<usize>) { let size = mem::size_of::<Duration>() + mem::size_of::<BitFlags<Caps>>() + mem::size_of::<u32>() * 2; (size, Some(size)) } } #[cfg(feature = "arbitrary-derive")] impl Arbitrary for GetCapsResponse { fn arbitrary(u: &mut Unstructured) -> arbitrary::Result<Self> { Ok(Self { crypto_timeout: u.arbitrary()?, caps: arbitrary_bitflags(u)?, max_packet_size: u.arbitrary()?, max_message_size: u.arbitrary()?, }) } fn size_hint(_depth: usize) -> (usize, Option<usize>) { let size = mem::size_of::<Duration>() + mem::size_of::<BitFlags<Caps>>() + mem::size_of::<u32>() * 2; (size, Some(size)) } } #[bitflags] #[repr(u32)] #[derive(Copy, Clone, Debug, PartialEq, Eq)] #[cfg_attr(feature = "arbitrary-derive", derive(Arbitrary))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum Caps { Cache = 1 << 0, Certs = 1 << 1, Challenge = 1 << 2, UnsignedMeasurements = 1 << 3, SignedMeasurements = 1 << 4, FreshMeasurements = 1 << 5, SessionEncryption = 1 << 6, SessionAuth = 1 << 7, MutualAuth = 1 << 8, KeyExchange = 1 << 9, PskWithoutContext = 1 << 10, PskWithContext = 1 << 11, Heartbeat = 1 << 13, KeyUpdate = 1 << 14, HandshakeInTheClear = 1 << 15, PubKeyProvisioned = 1 << 16, Chunking = 1 << 17, AliasCert = 1 << 18, } impl Caps { pub fn manticore() -> enumflags2::BitFlags<Self> { Self::Certs | Self::Challenge | Self::SignedMeasurements | Self::FreshMeasurements | Self::SessionEncryption | Self::KeyExchange | Self::Heartbeat | Self::AliasCert } } #[cfg(test)] mod test { use super::*; round_trip_test! { request_round_trip: { bytes: &[ 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0b01110110, 0b00100010, 0b00000100, 0b00000000, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, ], json: r#"{ "crypto_timeout": { "nanos": 4096000, "secs": 0 }, "caps": [ "Certs", "Challenge", "SignedMeasurements", "FreshMeasurements", "SessionEncryption", "KeyExchange", "Heartbeat", "AliasCert" ], "max_packet_size": 256, "max_message_size": 1024 }"#, value: GetCapsRequest { crypto_timeout: Duration::from_micros(4096), caps: Caps::manticore(), max_packet_size: 256, max_message_size: 1024, }, }, response_round_trip: { bytes: &[ 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0b01110110, 0b00100010, 0b00000100, 0b00000000, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, ], json: r#"{ "crypto_timeout": { "nanos": 4096000, "secs": 0 }, "caps": [ "Certs", "Challenge", "SignedMeasurements", "FreshMeasurements", "SessionEncryption", "KeyExchange", "Heartbeat", "AliasCert" ], "max_packet_size": 256, "max_message_size": 1024 }"#, value: GetCapsResponse { crypto_timeout: Duration::from_micros(4096), caps: Caps::manticore(), max_packet_size: 256, max_message_size: 1024, }, }, } }
use core::mem; use core::time::Duration; use enumflags2::bitflags; use enumflags2::BitFlags; use crate::io::ReadInt as _; use crate::protocol::spdm; use crate::protocol::spdm::CommandType; protocol_struct! { type GetCaps; const TYPE: CommandType = GetCaps; #![fuzz_derives_if = any()] struct Request { pub crypto_timeout: Duration, #[cfg_attr(feature = "serde", serde(with = "crate::serde::bitflags"))] pub caps: BitFlags<Caps>, pub max_packet_size: u32, pub max_message_size: u32, } fn Request::from_wire(r, a) { spdm::expect_zeros(r, 3)?; let ct_exp = r.read_le::<u8>()?; let crypto_timeout = Duration::from_micros(1u64 << ct_exp); spdm::expect_zeros(r, 2)?; let caps = BitFlags::<Caps>::from_wire(r, a)?; let max_packet_size = r.read_le::<u32>()?; let max_message_size = r.read_le::<u32>()?; Ok(Self { crypto_timeout, caps, max_packet_size, max_message_size }) } fn Request::to_wire(&self, w) { spdm::write_zeros(&mut w, 3)?; let ct_micros = self.crypto_timeout.as_micros(); if !ct_micros.is_power_of_two() { return Err(wire::Error::OutOfRange); } let ct_exp = 8 * mem::size_of_val(&ct_micros) as u32 - ct_micros.leading_zeros() - 1; w.write_le(ct_exp as u8)?; spdm::write_zeros(&mut w, 2)?; self.caps.to_wire(&mut w)?; w.write_le(self.max_packet_size)?; w.write_le(self.max_message_size)?; Ok(()) } #![fuzz_derives_if = any()] struct Response { pub crypto_timeout: Duration, #[cfg_attr(feature = "serde", serde(with = "crate::serde::bitflags"))] pub caps: BitFlags<Caps>, pub max_packet_size: u32, pub max_message_size: u32, } fn Response::from_wire(r, a) { spdm::expect_zeros(r, 3)?; let ct_exp = r.read_le::<u8>()?; let crypto_timeout = Duration::from_micros(1u64 << ct_exp); spdm::expect_zeros(r, 2)?; let caps = BitFlags::<Caps>::from_wire(r, a)?; let max_packet_size = r.read_le::<u32>()?; let max_message_size = r.read_le::<u32>()?; Ok(Self { crypto_timeout, caps, max_packet_size, max_message_size }) } fn Response::to_wire(&self, w) { spdm::write_zeros(&mut w, 3)?; let ct_micros = self.crypto_timeout.as_micros(); if !ct_micros.is_power_of_two() { return Err(wire::Error::OutOfRange); } let ct_exp = 8 * mem::size_of_val(&ct_micros) as u32 - ct_micros.leading_zeros() - 1; w.write_le(ct_exp as u8)?; spdm::write_zeros(&mut w, 2)?; self.caps.to_wire(&mut w)?; w.write_le(self.max_packet_size)?; w.write_le(self.max_message_size)?; Ok(()) } } #[cfg(feature = "arbitrary-derive")] use { crate::protocol::arbitrary_bitflags, libfuzzer_sys::arbitrary::{self, Arbitrary, Unstructured}, }; #[cfg(feature = "arbitrary-derive")] impl Arbitrary for GetCapsRequest { fn arbitrary(u: &mut Unstructured) -> arbitrary::Result<Self> { Ok(Self { crypto_timeout: u.arbitrary()?, caps: arbitrary_bitflags(u)?, max_packet_size: u.arbitrary()?, max_message_size: u.arbitrary()?, }) } fn size_hint(_depth: usize) -> (usize, Option<usize>) {
(size, Some(size)) } } #[cfg(feature = "arbitrary-derive")] impl Arbitrary for GetCapsResponse { fn arbitrary(u: &mut Unstructured) -> arbitrary::Result<Self> { Ok(Self { crypto_timeout: u.arbitrary()?, caps: arbitrary_bitflags(u)?, max_packet_size: u.arbitrary()?, max_message_size: u.arbitrary()?, }) } fn size_hint(_depth: usize) -> (usize, Option<usize>) { let size = mem::size_of::<Duration>() + mem::size_of::<BitFlags<Caps>>() + mem::size_of::<u32>() * 2; (size, Some(size)) } } #[bitflags] #[repr(u32)] #[derive(Copy, Clone, Debug, PartialEq, Eq)] #[cfg_attr(feature = "arbitrary-derive", derive(Arbitrary))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum Caps { Cache = 1 << 0, Certs = 1 << 1, Challenge = 1 << 2, UnsignedMeasurements = 1 << 3, SignedMeasurements = 1 << 4, FreshMeasurements = 1 << 5, SessionEncryption = 1 << 6, SessionAuth = 1 << 7, MutualAuth = 1 << 8, KeyExchange = 1 << 9, PskWithoutContext = 1 << 10, PskWithContext = 1 << 11, Heartbeat = 1 << 13, KeyUpdate = 1 << 14, HandshakeInTheClear = 1 << 15, PubKeyProvisioned = 1 << 16, Chunking = 1 << 17, AliasCert = 1 << 18, } impl Caps { pub fn manticore() -> enumflags2::BitFlags<Self> { Self::Certs | Self::Challenge | Self::SignedMeasurements | Self::FreshMeasurements | Self::SessionEncryption | Self::KeyExchange | Self::Heartbeat | Self::AliasCert } } #[cfg(test)] mod test { use super::*; round_trip_test! { request_round_trip: { bytes: &[ 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0b01110110, 0b00100010, 0b00000100, 0b00000000, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, ], json: r#"{ "crypto_timeout": { "nanos": 4096000, "secs": 0 }, "caps": [ "Certs", "Challenge", "SignedMeasurements", "FreshMeasurements", "SessionEncryption", "KeyExchange", "Heartbeat", "AliasCert" ], "max_packet_size": 256, "max_message_size": 1024 }"#, value: GetCapsRequest { crypto_timeout: Duration::from_micros(4096), caps: Caps::manticore(), max_packet_size: 256, max_message_size: 1024, }, }, response_round_trip: { bytes: &[ 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0b01110110, 0b00100010, 0b00000100, 0b00000000, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, ], json: r#"{ "crypto_timeout": { "nanos": 4096000, "secs": 0 }, "caps": [ "Certs", "Challenge", "SignedMeasurements", "FreshMeasurements", "SessionEncryption", "KeyExchange", "Heartbeat", "AliasCert" ], "max_packet_size": 256, "max_message_size": 1024 }"#, value: GetCapsResponse { crypto_timeout: Duration::from_micros(4096), caps: Caps::manticore(), max_packet_size: 256, max_message_size: 1024, }, }, } }
let size = mem::size_of::<Duration>() + mem::size_of::<BitFlags<Caps>>() + mem::size_of::<u32>() * 2;
assignment_statement
[ { "content": "/// No-std helper for using as a `write!()` target.\n\nstruct ArrayBuf<const N: usize>([u8; N], usize);\n\n\n\nimpl<const N: usize> AsRef<str> for ArrayBuf<N> {\n\n fn as_ref(&self) -> &str {\n\n core::str::from_utf8(&self.0[..self.1]).unwrap()\n\n }\n\n}\n\n\n\nimpl<const N: usize> Default for ArrayBuf<N> {\n\n fn default() -> Self {\n\n Self([0; N], 0)\n\n }\n\n}\n\n\n\nimpl<const N: usize> fmt::Write for ArrayBuf<N> {\n\n fn write_str(&mut self, s: &str) -> fmt::Result {\n\n let bytes = s.as_bytes();\n\n let space_left = N - self.1;\n\n if space_left < bytes.len() {\n\n return Err(fmt::Error);\n\n }\n\n\n\n self.0[self.1..self.1 + bytes.len()].copy_from_slice(bytes);\n\n self.1 += bytes.len();\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 0, "score": 243470.414201554 }, { "content": "/// Parses an non-negative `INTEGER`, up to four bytes.\n\npub fn u32(buf: &mut untrusted::Reader) -> Result<u32, Error> {\n\n uint(buf)?.read_all(Error::BadEncoding, |buf| {\n\n let mut v: u32 = 0;\n\n let mut octets = 0;\n\n while let Ok(b) = buf.read_byte() {\n\n if octets >= 4 {\n\n return Err(Error::BadEncoding);\n\n }\n\n v <<= 8;\n\n v |= b as u32;\n\n octets += 1;\n\n }\n\n Ok(v)\n\n })\n\n}\n\n\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 1, "score": 209490.7291918863 }, { "content": "/// Parses an optional, non-negative `INTEGER`, up to four bytes.\n\npub fn opt_u32(buf: &mut untrusted::Reader) -> Result<Option<u32>, Error> {\n\n if !buf.peek(Tag::INTEGER.0) {\n\n return Ok(None);\n\n }\n\n u32(buf).map(Some)\n\n}\n\n\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 2, "score": 190706.90247474628 }, { "content": "fn write_zeros(w: &mut impl Write, count: usize) -> Result<(), wire::Error> {\n\n for _ in 0..count {\n\n w.write_le(0u8)?;\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use zerocopy::AsBytes as _;\n\n\n\n #[test]\n\n fn version_byte_values() {\n\n let v1 = Version::MANTICORE;\n\n let v2 = Version::new(0x1f, 0x1e);\n\n let v3 = ExtendedVersion::new(v1, 5, 6);\n\n assert_eq!(v1.as_bytes(), [0x12]);\n\n assert_eq!(v2.as_bytes(), [0xfe]);\n\n assert_eq!(v3.as_bytes(), [0x56, 0x12]);\n", "file_path": "src/protocol/spdm/mod.rs", "rank": 3, "score": 185955.97117105738 }, { "content": "/// Parse `count` big-endian bytes.\n\nfn be(buf: &mut untrusted::Reader, count: usize) -> Result<u32, Error> {\n\n debug_assert!(count <= 4);\n\n let mut val = 0;\n\n for _ in 0..count {\n\n val <<= 8;\n\n val |= buf.read_byte()? as u32;\n\n }\n\n Ok(val)\n\n}\n\n\n\n/// A DER tag.\n\n///\n\n/// Unlike BoringSSL, we don't bother to parse tag numbers greater than 30,\n\n/// because none of the tags we care about use a tag larger than that.\n\n///\n\n/// Tags cannot be interrogated beyond basic comparisons with existing\n\n/// constants.\n\n// This is encoded directly as the \"first octet\" of a DER tag.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub struct Tag(u8);\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 4, "score": 170622.14925170637 }, { "content": "/// For serializing a `Vec<u8>` as a bytestring.\n\npub fn se_bytestring<S>(\n\n bytes: &impl AsRef<[u8]>,\n\n s: S,\n\n) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let bytes = bytes.as_ref();\n\n if s.is_human_readable() {\n\n match core::str::from_utf8(bytes) {\n\n Ok(utf8) => s.serialize_str(utf8),\n\n _ => s.serialize_bytes(bytes),\n\n }\n\n } else {\n\n s.serialize_bytes(bytes)\n\n }\n\n}\n\n\n\n/// Like `se_bytestring` but for use with `#[serde(with)]`.\n\n#[cfg(feature = \"std\")]\n\npub mod bytestring {\n\n pub use super::de_bytestring as deserialize;\n\n pub use super::se_bytestring as serialize;\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 5, "score": 167156.46902317472 }, { "content": "/// For deserializing a `Vec<Vec<u8>>` from either a string of hex digits or a\n\n/// sequence of bytes.\n\npub fn se_hexstrings<S>(\n\n bytes: &[impl AsRef<[u8]>],\n\n s: S,\n\n) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n #[derive(serde::Serialize)]\n\n #[serde(transparent)]\n\n struct HexString<'a>(#[serde(serialize_with = \"se_hexstring\")] &'a [u8]);\n\n\n\n s.collect_seq(bytes.iter().map(|b| HexString(b.as_ref())))\n\n}\n\n\n\n/// Like `se_hexstring` but for use with `#[serde(with)]`.\n\n#[cfg(feature = \"std\")]\n\npub mod hexstring {\n\n pub use super::de_hexstring as deserialize;\n\n pub use super::se_hexstring as serialize;\n\n}\n", "file_path": "src/serde.rs", "rank": 6, "score": 167156.2166089351 }, { "content": "#[cfg(not(feature = \"std\"))]\n\npub fn se_hexstring<S>(\n\n bytes: &impl AsRef<[u8]>,\n\n s: S,\n\n) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let bytes = bytes.as_ref();\n\n // To serialize a string, serde only provides `serialize_str`, which takes\n\n // an &str param, rather than exposing a writer-like interface like\n\n // sequence serialization does.\n\n //\n\n // Because we would need to allocate double the space to serialize `bytes`,\n\n // as opposed to the simple transmutation for `se_bytestring()`, we have to\n\n // fall back on \"just\" a byte serialization when in `no_std` mode.\n\n s.serialize_bytes(bytes)\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 7, "score": 167151.55660256918 }, { "content": "/// For defaulting a field to `true`.\n\npub fn default_to_true() -> bool {\n\n true\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 8, "score": 167151.55660256918 }, { "content": "/// Serializes an integer as binary.\n\npub fn se_bin<S, X>(x: &X, s: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n X: Binary + Into<u64> + Copy,\n\n{\n\n if s.is_human_readable() {\n\n let mut buf = ArrayBuf::<66>::default();\n\n let _ = write!(buf, \"{:#01$b}\", x, mem::size_of::<X>() * 8 + 2);\n\n s.serialize_str(buf.as_ref())\n\n } else {\n\n s.serialize_u64(x.clone().into())\n\n }\n\n}\n\n\n\n/// Like `se_bin` but for use with `#[serde(with)]`.\n\npub mod bin {\n\n pub use super::de_radix as deserialize;\n\n pub use super::se_bin as serialize;\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 9, "score": 163020.46152925387 }, { "content": "/// Serializes an integer as hex.\n\npub fn se_hex<S, X>(x: &X, s: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n X: LowerHex + Into<u64> + Copy,\n\n{\n\n if s.is_human_readable() {\n\n let mut buf = ArrayBuf::<18>::default();\n\n let _ = write!(buf, \"{:#01$x}\", x, mem::size_of::<X>() * 2 + 2);\n\n s.serialize_str(buf.as_ref())\n\n } else {\n\n s.serialize_u64(x.clone().into())\n\n }\n\n}\n\n\n\n/// Like `se_hex` but for use with `#[serde(with)]`.\n\npub mod hex {\n\n pub use super::de_radix as deserialize;\n\n pub use super::se_hex as serialize;\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 10, "score": 163020.46152925387 }, { "content": "#[cfg(test)]\n\npub fn from_keypair(keypair: &[u8]) -> (Verify256, Sign256) {\n\n let signer = Sign256::from_pkcs8(keypair).unwrap();\n\n let verifier = signer.verifier();\n\n (verifier, signer)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::crypto::sig::Sign as _;\n\n use crate::crypto::sig::Verify as _;\n\n use testutil::data::keys;\n\n use testutil::data::misc_crypto;\n\n\n\n #[test]\n\n #[cfg_attr(miri, ignore)]\n\n fn rsa() {\n\n let (mut verifier, mut signer) = from_keypair(keys::KEY1_RSA_KEYPAIR);\n\n\n\n verifier\n", "file_path": "src/crypto/ring/rsa.rs", "rank": 11, "score": 162336.04315904263 }, { "content": "#[inline(always)]\n\nfn inverse_popcnt(n: usize) -> u8 {\n\n // NOTE: if the `1` below is accientally typed at `u8`, for `n = 8` we will\n\n // get overflow from the shift; instead, we perform the shift using native\n\n // arithmetic.\n\n ((1usize << n) - 1) as _\n\n}\n\n\n\nimpl BitBuf {\n\n /// Creates an empty `BitBuf`.\n\n pub fn new() -> Self {\n\n Self { len: 0, bits: 0 }\n\n }\n\n\n\n /// Creates a new eight-bit `BitBuf` with the given bits.\n\n pub fn from_bits(bits: u8) -> Self {\n\n Self { len: 8, bits }\n\n }\n\n\n\n /// Returns the number of bits currently in the buffer.\n\n pub fn len(&self) -> usize {\n", "file_path": "src/io/bit_buf.rs", "rank": 12, "score": 157031.05946663773 }, { "content": "/// For skipping field serialization if it's set to `true`.\n\n///\n\n/// (Use with `default_to_true()`.\n\npub fn skip_if_true(b: &bool) -> bool {\n\n *b\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 13, "score": 155317.22079029406 }, { "content": "/// Serializes bitflags as a list.\n\npub fn se_bitflags<S, B>(b: &BitFlags<B>, s: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n B: BitFlag + serde::Serialize,\n\n B::Numeric: Into<u64>,\n\n{\n\n if s.is_human_readable() {\n\n s.collect_seq(b.iter())\n\n } else {\n\n s.serialize_u64(b.bits().into())\n\n }\n\n}\n\n\n\n/// Like `se_bin` but for use with `#[serde(with)]`.\n\npub mod bitflags {\n\n pub use super::de_bitflags as deserialize;\n\n pub use super::se_bitflags as serialize;\n\n}\n", "file_path": "src/serde.rs", "rank": 14, "score": 155074.22905881712 }, { "content": "/// Parses a `NULL`.\n\npub fn null(buf: &mut untrusted::Reader) -> Result<(), Error> {\n\n tagged(Tag::NULL, buf, |mut buf| {\n\n if buf.at_end() {\n\n Ok(())\n\n } else {\n\n Err(Error::BadEncoding)\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 15, "score": 146310.9198448749 }, { "content": "/// Returns true if `x` is the default value.\n\npub fn is_default<T: Default + PartialEq>(x: &T) -> bool {\n\n *x == T::default()\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 16, "score": 137306.8904536961 }, { "content": "/// Sends `req` to a virtual RoT listening on `localhost:{port}`, using\n\n/// Spdm-over-TCP.\n\n///\n\n/// Blocks until a response comes back.\n\npub fn send_spdm<'a, Cmd: Command<'a, CommandType = spdm::CommandType>>(\n\n port: u16,\n\n req: Cmd::Req,\n\n arena: &'a dyn Arena,\n\n) -> Result<\n\n Result<Cmd::Resp, protocol::Error<'a, Cmd>>,\n\n server::Error<net::SpdmHeader>,\n\n> {\n\n log::info!(\"connecting to 127.0.0.1:{}\", port);\n\n let mut conn = TcpStream::connect((\"127.0.0.1\", port)).map_err(|e| {\n\n log::error!(\"{}\", e);\n\n net::Error::Io(io::Error::Internal)\n\n })?;\n\n let mut writer = Writer::new(net::SpdmHeader {\n\n command: <Cmd::Req as Message>::TYPE,\n\n is_request: false,\n\n version: spdm::Version::MANTICORE,\n\n });\n\n log::info!(\"serializing {}\", type_name::<Cmd::Req>());\n\n req.to_wire(&mut writer)?;\n", "file_path": "e2e/src/support/tcp.rs", "rank": 17, "score": 136367.63040915367 }, { "content": "#[cfg(feature = \"std\")]\n\nfn hex_to_bytes<B: TryFrom<Vec<u8>>, E: de::Error>(\n\n bors: BytesOrStr,\n\n) -> Result<B, E> {\n\n let try_fail = || {\n\n de::Error::custom(format_args!(\n\n \"could not convert to {}\",\n\n type_name::<B>()\n\n ))\n\n };\n\n\n\n let hex = match bors {\n\n BytesOrStr::Bytes(b) => {\n\n return b.into_owned().try_into().map_err(|_| try_fail())\n\n }\n\n BytesOrStr::Str(s) => s,\n\n };\n\n let hex = hex.as_ref();\n\n\n\n if hex.len() % 2 != 0 {\n\n return Err(de::Error::invalid_length(\n", "file_path": "src/serde.rs", "rank": 18, "score": 135458.15691719888 }, { "content": "/// Opens the given input and output files.\n\n///\n\n/// If either file is missing, it is replaced with stdin or stdout, respectively.\n\npub fn stdio(\n\n input_file: Option<impl AsRef<Path>>,\n\n output_file: Option<impl AsRef<Path>>,\n\n) -> (Box<dyn Read>, Box<dyn Write>) {\n\n let input: Box<dyn Read> = match input_file {\n\n Some(path) => {\n\n let file = File::open(path).expect(\"failed to open input file\");\n\n Box::new(BufReader::new(file))\n\n }\n\n None => Box::new(io::stdin()),\n\n };\n\n\n\n let output: Box<dyn Write> = match output_file {\n\n Some(path) => {\n\n let file = File::create(path).expect(\"failed to open output file\");\n\n Box::new(file)\n\n }\n\n None => Box::new(io::stdout()),\n\n };\n\n\n\n (input, output)\n\n}\n", "file_path": "tool/src/util.rs", "rank": 19, "score": 135443.77660528698 }, { "content": "pub fn opt_bool(buf: &mut untrusted::Reader) -> Result<Option<bool>, Error> {\n\n match &opt(Tag::BOOLEAN, buf)? {\n\n None => Ok(None),\n\n Some(b) if b == FALSE => Ok(Some(false)),\n\n Some(b) if b == TRUE => Ok(Some(true)),\n\n _ => Err(Error::BadEncoding),\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, Debug)]\n\npub struct Oid<'cert>(&'cert [u8]);\n\n\n\nimpl<'cert> Oid<'cert> {\n\n pub const fn new(der: &'cert [u8]) -> Self {\n\n Self(der)\n\n }\n\n}\n\n\n\n/// Generates a new [`Oid`] constant with the given components.\n\nmacro_rules! oid {\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 20, "score": 133350.85719569834 }, { "content": "/// Sends `req` to a virtual RoT listening on `localhost:{port}`, using\n\n/// Cerberus-over-TCP.\n\n///\n\n/// Blocks until a response comes back.\n\npub fn send_cerberus<\n\n 'a,\n\n Cmd: Command<'a, CommandType = cerberus::CommandType>,\n\n>(\n\n port: u16,\n\n req: Cmd::Req,\n\n arena: &'a dyn Arena,\n\n) -> Result<\n\n Result<Cmd::Resp, protocol::Error<'a, Cmd>>,\n\n server::Error<net::CerberusHeader>,\n\n> {\n\n log::info!(\"connecting to 127.0.0.1:{}\", port);\n\n let mut conn = TcpStream::connect((\"127.0.0.1\", port)).map_err(|e| {\n\n log::error!(\"{}\", e);\n\n net::Error::Io(io::Error::Internal)\n\n })?;\n\n let mut writer = Writer::new(net::CerberusHeader {\n\n command: <Cmd::Req as Message>::TYPE,\n\n });\n\n log::info!(\"serializing {}\", type_name::<Cmd::Req>());\n", "file_path": "e2e/src/support/tcp.rs", "rank": 21, "score": 130315.02883954786 }, { "content": "/// Computes an SP 800-108 KDF with the Cerberus parametrization.\n\nfn sp800_108_hmac256(key: &[u8], label: &[u8], context: &[u8]) -> session::Key {\n\n let key = hmac::Key::new(hmac::HMAC_SHA256, key);\n\n let mut ctx = hmac::Context::with_key(&key);\n\n ctx.update(&1u32.to_be_bytes());\n\n ctx.update(label);\n\n ctx.update(&[0]);\n\n ctx.update(context);\n\n ctx.update(&256u16.to_be_bytes());\n\n let tag = ctx.sign();\n\n\n\n let mut key = session::Key::default();\n\n key.copy_from_slice(tag.as_ref());\n\n key\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::session::Session as _;\n\n\n", "file_path": "src/session/ring.rs", "rank": 22, "score": 128304.64192983042 }, { "content": "/// Parse a single element of a `SEQUENCE`, returning its tag and contents.\n\npub fn any<'cert>(\n\n buf: &mut untrusted::Reader<'cert>,\n\n) -> Result<(Tag, untrusted::Input<'cert>), Error> {\n\n let tag = Tag::parse(buf)?;\n\n let len_byte = buf.read_byte()?;\n\n // NOTE: the header is always two bytes long.\n\n\n\n // If `len` has the high bit set, then it is a \"long form\" length.\n\n let len = if len_byte & 0x80 == 0 {\n\n len_byte as usize\n\n } else {\n\n let num_bytes = len_byte & 0x7f;\n\n\n\n if num_bytes == 0 || num_bytes > 4 {\n\n // We only support lengths at most 32 bits.\n\n //\n\n // This also catches indefinite-length constructed objects,\n\n // which we absolutely don't support.\n\n return Err(Error::BadEncoding);\n\n }\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 23, "score": 124907.68366264031 }, { "content": "/// Parses a CWT certificate.\n\npub fn parse<'cert>(\n\n cert: &'cert [u8],\n\n key: Option<&sig::PublicKeyParams<'_>>,\n\n ciphers: &mut impl sig::Ciphers,\n\n) -> Result<Cert<'cert>, Error> {\n\n let buf = untrusted::Input::from(cert);\n\n let cose = buf.read_all(Error::BadEncoding, Cose::parse)?;\n\n\n\n let (issuer, subject, subject_key, ku) =\n\n cose.payload.read_all(Error::BadEncoding, |buf| {\n\n Item::parse(buf)?.read_all(|buf| {\n\n Item::parse(buf)?.into_map()?.walk(|map| {\n\n let iss = Name(\n\n map.must_get(label::CWT_ISS)?.into_utf8()?.as_bytes(),\n\n );\n\n let sub = Name(\n\n map.must_get(label::CWT_SUB)?.into_utf8()?.as_bytes(),\n\n );\n\n\n\n let (_algo, params) =\n", "file_path": "src/cert/cwt/mod.rs", "rank": 24, "score": 124902.69910373152 }, { "content": "/// Parses an X.509 certificate.\n\n///\n\n/// This function performs several aggressive checks to reject any and all\n\n/// certificates that do not fit our profile. These include:\n\n/// - Version must be v3.\n\n/// - Extensions must be present.\n\n/// - `keyUsage`, `authorityKeyIdentifier`, `subjectKeyIdentifier` must all\n\n/// be present.\n\n/// - A `keyUsage` with `keyCertSign` and any other usage is rejected.\n\n///\n\n/// All of the above are treated as encoding errors.\n\npub fn parse<'cert>(\n\n raw_cert: &'cert [u8],\n\n format: cert::CertFormat,\n\n key: Option<&sig::PublicKeyParams<'_>>,\n\n ciphers: &mut impl sig::Ciphers,\n\n) -> Result<Cert<'cert>, Error> {\n\n let buf = untrusted::Input::from(raw_cert);\n\n let (mut cert, tbs, sig_algo, sig) =\n\n buf.read_all(Error::BadEncoding, |buf| {\n\n der::tagged(Tag::SEQUENCE, buf, |buf| {\n\n let mark = buf.mark();\n\n let tbs = der::parse(Tag::SEQUENCE, buf)?;\n\n let tbs_bytes =\n\n buf.get_input_between_marks(mark, buf.mark())?;\n\n let sig_algo_bytes = der::parse(Tag::SEQUENCE, buf)?;\n\n let sig_algo =\n\n sig_algo_bytes.read_all(Error::BadEncoding, parse_algo)?;\n\n\n\n let sig = der::bits_total(buf)?;\n\n\n", "file_path": "src/cert/x509/mod.rs", "rank": 25, "score": 124902.69910373152 }, { "content": "/// Deserializes an integer from either a string (which supports hex encoding)\n\n/// or a normal integer.\n\n///\n\n/// Unfortunately, the way this is implemented breaks non-self-describing\n\n/// formats, but Serde support isn't really intended for that anways.\n\npub fn de_radix<'de, D, T>(d: D) -> Result<T, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n Radix<T>: de::Visitor<'de, Value = T>,\n\n{\n\n if d.is_human_readable() {\n\n d.deserialize_any(Radix::<T>(PhantomData))\n\n } else {\n\n d.deserialize_u64(Radix::<T>(PhantomData))\n\n }\n\n}\n\n\n\n/// Like `de_radix` but for use with `#[serde(with)]`.\n\npub mod dec {\n\n pub use super::de_radix as deserialize;\n\n\n\n // We cannot just write `use serde::Serialize::serialize;`, so we need to\n\n // do this silliness instead.\n\n pub fn serialize<S, X>(x: &X, s: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n X: serde::Serialize,\n\n {\n\n x.serialize(s)\n\n }\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 26, "score": 123573.89049935449 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn de_hexstring<'de, D, B>(d: D) -> Result<B, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n B: TryFrom<Vec<u8>>,\n\n{\n\n hex_to_bytes::<B, D::Error>(BytesOrStr::deserialize(d)?)\n\n}\n\n\n\n/// For deserializing a `Vec<Vec<u8>>` from either a string of hex digits or a\n\n/// sequence of bytes.\n", "file_path": "src/serde.rs", "rank": 27, "score": 123569.53658342428 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn de_bytestring<'de, D, B>(d: D) -> Result<B, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n B: TryFrom<Vec<u8>>,\n\n{\n\n match BytesOrStr::deserialize(d)? {\n\n BytesOrStr::Bytes(b) => b.into_owned(),\n\n BytesOrStr::Str(s) => s.into_owned().into_bytes(),\n\n }\n\n .try_into()\n\n .map_err(|_| {\n\n de::Error::custom(format_args!(\n\n \"could not covert to {}\",\n\n type_name::<B>()\n\n ))\n\n })\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 28, "score": 123569.53658342428 }, { "content": "/// Parses a non-negative `INTEGER`.\n\n///\n\n/// We reject all integers with the sign bit set.\n\npub fn uint<'cert>(\n\n buf: &mut untrusted::Reader<'cert>,\n\n) -> Result<untrusted::Input<'cert>, Error> {\n\n let (tag, data) = any(buf)?;\n\n if tag != Tag::INTEGER {\n\n return Err(Error::BadEncoding);\n\n }\n\n data.read_all(Error::BadEncoding, |buf| {\n\n // `data` must be non-empty.\n\n let first = buf.read_byte()?;\n\n\n\n // `[0x00]` is the valid representation of `0`.\n\n if first == 0 && buf.at_end() {\n\n return Ok(());\n\n }\n\n\n\n // Check the sign bit; negative values are forbidden.\n\n if first & 0x80 != 0 {\n\n return Err(Error::BadEncoding);\n\n }\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 29, "score": 122571.04872168333 }, { "content": "/// Parses an `OBJECT IDENTIFIER`. The only purpose of an OID is to be compared\n\n/// to other OIDs byte-for-byte.\n\npub fn oid<'cert>(\n\n buf: &mut untrusted::Reader<'cert>,\n\n) -> Result<Oid<'cert>, Error> {\n\n Ok(Oid::new(parse(Tag::OID, buf)?.as_slice_less_safe()))\n\n}\n\n\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 30, "score": 122571.04872168333 }, { "content": "/// Parses a required element of a `SEQUENCE`.\n\npub fn parse<'cert>(\n\n tag: Tag,\n\n buf: &mut untrusted::Reader<'cert>,\n\n) -> Result<untrusted::Input<'cert>, Error> {\n\n opt(tag, buf)?.ok_or(Error::BadEncoding)\n\n}\n\n\n\n/// Parses a required element of a `SEQUENCE`, passing the buffer to `dec` for further decoding.\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 31, "score": 122571.04872168333 }, { "content": "/// Parses an optional element of a `SEQUENCE`.\n\npub fn opt<'cert>(\n\n tag: Tag,\n\n buf: &mut untrusted::Reader<'cert>,\n\n) -> Result<Option<untrusted::Input<'cert>>, Error> {\n\n if !buf.peek(tag.0) {\n\n return Ok(None);\n\n }\n\n let (_, data) = any(buf)?;\n\n Ok(Some(data))\n\n}\n\n\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 32, "score": 122571.04872168333 }, { "content": "#[inline]\n\npub fn bits_total<'cert>(\n\n buf: &mut untrusted::Reader<'cert>,\n\n) -> Result<untrusted::Input<'cert>, Error> {\n\n bits_checked(buf, true)\n\n}\n\n\n\n/// Parses a `BIT STRING`, ensuring that its trailing bits are zero.\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 33, "score": 120377.48111625528 }, { "content": "#[inline]\n\npub fn bits_partial<'cert>(\n\n buf: &mut untrusted::Reader<'cert>,\n\n) -> Result<untrusted::Input<'cert>, Error> {\n\n bits_checked(buf, false)\n\n}\n\n\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 34, "score": 120377.48111625528 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn de_hexstrings<'de, D, B>(d: D) -> Result<Box<[B]>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n B: TryFrom<Vec<u8>>,\n\n{\n\n let hexen = Vec::<BytesOrStr>::deserialize(d)?;\n\n let mut bufs = Vec::with_capacity(hexen.len());\n\n for hex in hexen {\n\n bufs.push(hex_to_bytes::<B, D::Error>(hex)?);\n\n }\n\n Ok(bufs.into_boxed_slice())\n\n}\n\n\n\n/// For serializing a `Vec<u8>` as a hexstring.\n", "file_path": "src/serde.rs", "rank": 35, "score": 119657.98464966103 }, { "content": "/// Starts a server loop for serving PA-RoT requests, as described by `opts`.\n\npub fn serve(opts: Options) -> ! {\n\n log::info!(\"configuring server...\");\n\n let networking = capabilities::Networking {\n\n max_message_size: opts.max_message_size,\n\n max_packet_size: opts.max_packet_size,\n\n mode: capabilities::RotMode::Platform,\n\n roles: capabilities::BusRole::Host.into(),\n\n };\n\n\n\n let timeouts = capabilities::Timeouts {\n\n regular: opts.regular_timeout,\n\n crypto: opts.crypto_timeout,\n\n };\n\n\n\n let identity = fakes::Identity::new(\n\n &opts.firmware_version,\n\n opts.vendor_firmware_versions\n\n .iter()\n\n .map(|(k, v)| (*k, v.as_slice())),\n\n &opts.unique_device_identity,\n", "file_path": "e2e/src/support/rot.rs", "rank": 36, "score": 118154.19345540897 }, { "content": "/// Deserializes bitflags from a list *or* from an integer value.\n\npub fn de_bitflags<'de, D, B>(d: D) -> Result<BitFlags<B>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n B: BitFlag + Deserialize<'de> + fmt::Debug,\n\n Radix<B::Numeric>: de::Visitor<'de, Value = B::Numeric>,\n\n{\n\n if d.is_human_readable() {\n\n d.deserialize_any(BitsVisitor::<B>(PhantomData))\n\n } else {\n\n d.deserialize_u64(BitsVisitor::<B>(PhantomData))\n\n }\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 37, "score": 117290.0454249423 }, { "content": "#[inline]\n\npub fn tagged<'cert, T>(\n\n tag: Tag,\n\n buf: &mut untrusted::Reader<'cert>,\n\n dec: impl FnOnce(&mut untrusted::Reader<'cert>) -> Result<T, Error>,\n\n) -> Result<T, Error> {\n\n parse(tag, buf)?.read_all(Error::BadEncoding, dec)\n\n}\n\n\n\n/// Parses a `BIT STRING`, ensuring that its length is divisible by 8.\n", "file_path": "src/cert/x509/der/mod.rs", "rank": 38, "score": 115955.85078843657 }, { "content": "#[cfg(feature = \"std\")]\n\nstruct BytesOrStrVisitor;\n\n#[cfg(feature = \"std\")]\n\nimpl<'de> Deserialize<'de> for BytesOrStr<'de> {\n\n fn deserialize<D>(d: D) -> Result<Self, D::Error>\n\n where\n\n D: de::Deserializer<'de>,\n\n {\n\n d.deserialize_any(BytesOrStrVisitor)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl<'de> de::Visitor<'de> for BytesOrStrVisitor {\n\n type Value = BytesOrStr<'de>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"byte array or string\")\n\n }\n\n\n\n fn visit_seq<A>(self, mut seq: A) -> Result<BytesOrStr<'de>, A::Error>\n", "file_path": "src/serde.rs", "rank": 39, "score": 114441.16871150986 }, { "content": "#[test]\n\nfn u32_00() {\n\n let mut reader = Reader::new(Input::from(data::der::DOUBLE_ZERO));\n\n assert!(der::u32(&mut reader).is_err());\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 40, "score": 108931.32972583297 }, { "content": "#[test]\n\nfn u32_huge() {\n\n let mut reader = Reader::new(Input::from(data::der::HUGE_INT));\n\n assert!(der::u32(&mut reader).is_err());\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 41, "score": 106438.26931121424 }, { "content": "#[test]\n\nfn u32_neg() {\n\n let mut reader = Reader::new(Input::from(data::der::NEGATIVE));\n\n assert!(der::u32(&mut reader).is_err());\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 42, "score": 106438.26931121424 }, { "content": "#[cfg(feature = \"arbitrary-derive\")]\n\nfn arbitrary_bitflags<B>(\n\n u: &mut libfuzzer_sys::arbitrary::Unstructured,\n\n) -> libfuzzer_sys::arbitrary::Result<enumflags2::BitFlags<B>>\n\nwhere\n\n B: enumflags2::BitFlag,\n\n B::Numeric: libfuzzer_sys::arbitrary::Arbitrary,\n\n{\n\n Ok(enumflags2::BitFlags::from_bits_truncate(u.arbitrary()?))\n\n}\n", "file_path": "src/protocol/mod.rs", "rank": 43, "score": 105669.46033194003 }, { "content": "struct ExpectedByDisplay<T>(T);\n\nimpl<T: fmt::Display> de::Expected for ExpectedByDisplay<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 44, "score": 105312.35535757791 }, { "content": "#[test]\n\nfn u32_42() -> Result {\n\n Input::from(data::der::FORTY_TWO).read_all(BadEncoding, |buf| {\n\n assert_eq!(der::u32(buf)?, 42);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 45, "score": 103123.49237221698 }, { "content": "#[test]\n\nfn u32_9000() -> Result {\n\n Input::from(data::der::NINE_THOUSAND).read_all(BadEncoding, |buf| {\n\n assert_eq!(der::u32(buf)?, 9000);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 46, "score": 103123.49237221698 }, { "content": "#[test]\n\nfn u32_128() -> Result {\n\n Input::from(data::der::ONE_TWENTY_EIGHT).read_all(BadEncoding, |buf| {\n\n assert_eq!(der::u32(buf)?, 128);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 47, "score": 103123.49237221698 }, { "content": "#[test]\n\nfn u32_0() -> Result {\n\n Input::from(data::der::ZERO).read_all(BadEncoding, |buf| {\n\n assert_eq!(der::u32(buf)?, 0);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 48, "score": 103123.49237221698 }, { "content": "/// Helper for `de_radix`.\n\nstruct BitsVisitor<B>(PhantomData<B>);\n\n\n\nimpl<'de, B> de::Visitor<'de> for BitsVisitor<B>\n\nwhere\n\n B: BitFlag + Deserialize<'de> + fmt::Debug,\n\n Radix<B::Numeric>: de::Visitor<'de, Value = B::Numeric>,\n\n{\n\n type Value = BitFlags<B>;\n\n\n\n fn expecting(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {\n\n write!(\n\n f,\n\n \"integer representation of {} or list of enum values\",\n\n type_name::<B>()\n\n )\n\n }\n\n\n\n fn visit_seq<A>(self, mut seq: A) -> Result<BitFlags<B>, A::Error>\n\n where\n\n A: de::SeqAccess<'de>,\n", "file_path": "src/serde.rs", "rank": 49, "score": 98109.89964433786 }, { "content": "/// Provides the \"request\" half of a transaction with a host.\n\n///\n\n/// See [`HostPort`] for more information.\n\npub trait HostRequest<'req, Header> {\n\n /// Returns the header sent by the host for this request.\n\n ///\n\n /// This function should not be called after calling `reply()`.\n\n fn header(&self) -> Result<Header, net::Error>;\n\n\n\n /// Returns the raw byte stream for the payload of the request.\n\n ///\n\n /// This function should not be called after calling `reply()`.\n\n fn payload(&mut self) -> Result<&mut dyn ReadZero<'req>, net::Error>;\n\n\n\n /// Replies to this request..\n\n ///\n\n /// Calling this function performs sufficient transport-level operations to\n\n /// begin a response, before handing off actually composing the payload to\n\n /// the caller via the returned [`HostResponse`].\n\n fn reply(\n\n &mut self,\n\n header: Header,\n\n ) -> Result<&mut dyn HostResponse<'req>, net::Error>;\n\n}\n\n\n", "file_path": "src/net/host.rs", "rank": 50, "score": 97066.43772340287 }, { "content": "/// Parses an RFC3279 algorithm identifier.\n\nfn parse_algo(buf: &mut untrusted::Reader) -> Result<sig::Algo, Error> {\n\n match der::oid(buf)? {\n\n oid::RSA_PKCS1_SHA256 => {\n\n der::null(buf)?;\n\n Ok(sig::Algo::RsaPkcs1Sha256)\n\n }\n\n _ => Err(Error::UnknownAlgorithm),\n\n }\n\n}\n\n\n", "file_path": "src/cert/x509/mod.rs", "rank": 51, "score": 93875.11473113712 }, { "content": "type Req<'a> = <C as Command<'a>>::Req;\n\n\n\nfuzz_target!(|data: AsStatic<'static, Req<'static>>| {\n\n let mut out = [0u8; 1024];\n\n let _ = Req::borrow(&data).to_wire(&mut &mut out[..]);\n\n});\n\n\n", "file_path": "fuzz/gen/manticore_protocol_request_counter_RequestCounter__req_to_wire.rs", "rank": 52, "score": 93349.97680113587 }, { "content": "type Resp<'a> = <C as Command<'a>>::Resp;\n\n\n\nfuzz_target!(|data: AsStatic<'static, Resp<'static>>| {\n\n let mut out = [0u8; 1024];\n\n let _ = Resp::borrow(&data).to_wire(&mut &mut out[..]);\n\n});\n\n\n", "file_path": "fuzz/gen/manticore_protocol_request_counter_RequestCounter__resp_to_wire.rs", "rank": 53, "score": 93349.97680113587 }, { "content": "type Resp<'a> = <C as Command<'a>>::Resp;\n\n\n\nfuzz_target!(|data: AsStatic<'static, Resp<'static>>| {\n\n let mut out = [0u8; 1024];\n\n let _ = Resp::borrow(&data).to_wire(&mut &mut out[..]);\n\n});\n\n\n", "file_path": "fuzz/gen/manticore_protocol_spdm_get_caps_GetCaps__resp_to_wire.rs", "rank": 54, "score": 91739.59968450178 }, { "content": "type Req<'a> = <C as Command<'a>>::Req;\n\n\n\nfuzz_target!(|data: AsStatic<'static, Req<'static>>| {\n\n let mut out = [0u8; 1024];\n\n let _ = Req::borrow(&data).to_wire(&mut &mut out[..]);\n\n});\n\n\n", "file_path": "fuzz/gen/manticore_protocol_spdm_get_caps_GetCaps__req_to_wire.rs", "rank": 55, "score": 91739.59968450178 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct Options {\n\n /// Internal flag.\n\n #[structopt(long)]\n\n start_pa_rot_with_options: Option<String>,\n\n}\n\n\n", "file_path": "e2e/src/main.rs", "rank": 56, "score": 74775.94843469377 }, { "content": "struct Connection {\n\n req_nonce: Box<[u8]>,\n\n resp_nonce: Box<[u8]>,\n\n keys: Keys,\n\n}\n\n\n", "file_path": "src/session/ring.rs", "rank": 57, "score": 74770.79970432268 }, { "content": "#[derive(Default)]\n\nstruct Extensions {\n\n basic_constraints: Option<cert::BasicConstraints>,\n\n key_usage: Option<KeyUsage>,\n\n}\n\n\n", "file_path": "src/cert/x509/mod.rs", "rank": 58, "score": 73339.8231355935 }, { "content": "#[derive(Debug)]\n\nstruct BasicConstraints {\n\n is_ca: bool,\n\n path_len_constraint: Option<u32>,\n\n}\n\n\n\n/// A parse error for a [`Cert`].\n\n///\n\n/// Note: `Error: From<untrusted::EndOfInput>` is an *implementation detail*\n\n/// that should not be relied upon. We reserve the right to break this but\n\n/// Rust does not provide a way to scope `impl` blocks.\n\n#[derive(Clone, Debug)]\n\npub enum Error {\n\n /// Indicates that the signature is not supported by the [`sig::Ciphers`] used.\n\n UnsupportedSig,\n\n /// Indicates that the encoding (e.g., DER or CBOR) was invalid for some\n\n /// reason.\n\n BadEncoding,\n\n /// Indicates that a low-level I/O error occured while parsing a cert.\n\n Io(io::Error),\n\n /// An algorithm specified in a certificate was not known to Manticore.\n", "file_path": "src/cert/mod.rs", "rank": 59, "score": 73339.8231355935 }, { "content": "/// A `Ciphers` that blindly accepts all signatures.\n\nstruct NoVerify;\n\n\n\nimpl sig::Verify for NoVerify {\n\n fn verify(\n\n &mut self,\n\n _: &[&[u8]],\n\n _: &[u8],\n\n ) -> Result<(), sig::Error> {\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl sig::Ciphers for NoVerify {\n\n fn negotiate(&self, _: &mut capabilities::Crypto) {}\n\n fn verifier<'a>(\n\n &'a mut self,\n\n _: sig::Algo,\n\n _: &sig::PublicKeyParams,\n\n ) -> Option<&'a mut dyn sig::Verify> {\n\n Some(self)\n", "file_path": "fuzz/targets/x509_unsigned.rs", "rank": 60, "score": 73339.8231355935 }, { "content": "/// Helper struct for exposing a TCP stream as a Manticore reader.\n\nstruct TcpReader {\n\n tcp: TcpStream,\n\n len: usize,\n\n}\n\nimpl io::Read for TcpReader {\n\n fn read_bytes(&mut self, out: &mut [u8]) -> Result<(), io::Error> {\n\n let Self { tcp, len } = self;\n\n if *len < out.len() {\n\n return Err(io::Error::BufferExhausted);\n\n }\n\n tcp.read_exact(out).map_err(|e| {\n\n log::error!(\"{}\", e);\n\n io::Error::Internal\n\n })?;\n\n *len -= out.len();\n\n Ok(())\n\n }\n\n\n\n fn remaining_data(&self) -> usize {\n\n self.len\n\n }\n\n}\n\n#[allow(unsafe_code)]\n\nunsafe impl io::ReadZero<'_> for TcpReader {}\n\n\n", "file_path": "e2e/src/support/tcp.rs", "rank": 61, "score": 72006.86531091842 }, { "content": "#[derive(FromBytes)]\n\n#[repr(C)]\n\nstruct FwRegionRange {\n\n start_addr: u32,\n\n end_addr: u32,\n\n}\n\n\n\nimpl FwRegion<'_> {\n\n /// Returns whether this region must be validated on boot, rather than just\n\n /// when loading a new firmware update.\n\n pub fn must_validate_on_boot(&self) -> bool {\n\n (self.header.flags & 1) == 1\n\n }\n\n\n\n /// Returns the raw encoded flags for this element.\n\n pub fn raw_flags(&self) -> u8 {\n\n self.header.flags\n\n }\n\n\n\n /// Returns the hash that this region is expected to conform to.\n\n pub fn image_hash(&self) -> (hash::Algo, &[u8]) {\n\n let hash_type = match self.header.hash_type {\n", "file_path": "src/manifest/pfm.rs", "rank": 62, "score": 72001.85234683374 }, { "content": "#[derive(FromBytes)]\n\n#[repr(C)]\n\nstruct FwRegionHeader {\n\n hash_type: u8,\n\n region_count: u8,\n\n flags: u8,\n\n _reserved: u8,\n\n}\n\n\n", "file_path": "src/manifest/pfm.rs", "rank": 63, "score": 72001.85234683374 }, { "content": "fn main() {\n\n logging_setup();\n\n for (i, arg) in std::env::args_os().enumerate() {\n\n log::info!(\"argv[{}] = {:?}\", i, arg);\n\n }\n\n\n\n let opts = Options::from_args();\n\n if let Some(pa_opts) = &opts.start_pa_rot_with_options {\n\n use support::rot::*;\n\n let opts = serde_json::from_str::<Options>(pa_opts).unwrap();\n\n serve(opts);\n\n }\n\n}\n", "file_path": "e2e/src/main.rs", "rank": 64, "score": 71790.16094116727 }, { "content": "fn main() {\n\n match CliCommand::from_args() {\n\n CliCommand::Message(m) => m.run(),\n\n CliCommand::Manifest(m) => m.run(),\n\n }\n\n}\n", "file_path": "tool/src/main.rs", "rank": 65, "score": 71790.16094116727 }, { "content": "#[cfg_attr(test, ctor::ctor)]\n\nfn logging_setup() {\n\n let pid = std::process::id();\n\n env_logger::builder()\n\n .format(move |#[allow(unused)] buf, record| {\n\n // Log to stderr in tests, in order to trigger output capture.\n\n #[cfg(test)]\n\n macro_rules! logln {\n\n ($($tt:tt)*) => {eprintln!($($tt)*)};\n\n }\n\n #[cfg(not(test))]\n\n macro_rules! logln {\n\n ($($tt:tt)*) => {{\n\n use std::io::Write;\n\n writeln!(buf, $($tt)*)?\n\n }};\n\n }\n\n\n\n for line in record.args().to_string().trim().lines() {\n\n logln!(\n\n \"[{level}{pid} {file}:{line}] {msg}\",\n", "file_path": "e2e/src/main.rs", "rank": 66, "score": 70377.15187059445 }, { "content": "#[test]\n\nfn challenge() {\n\n use manticore::protocol::challenge::*;\n\n use manticore::protocol::get_cert::*;\n\n use manticore::protocol::get_digests::*;\n\n use manticore::protocol::key_exchange::*;\n\n\n\n let mut h = ring::hash::Engine::new();\n\n let virt = rot::Virtual::spawn(&rot::Options {\n\n cert_chain: vec![\n\n x509::CHAIN1.to_vec(),\n\n x509::CHAIN2.to_vec(),\n\n x509::CHAIN3.to_vec(),\n\n ],\n\n cert_format: CertFormat::RiotX509,\n\n alias_keypair: Some(rot::KeyPairFormat::RsaPkcs8(\n\n keys::KEY3_RSA_KEYPAIR.to_vec(),\n\n )),\n\n ..Default::default()\n\n });\n\n\n", "file_path": "e2e/src/tests/challenge.rs", "rank": 67, "score": 70377.15187059445 }, { "content": "/// Utility function for reading `count` zeroes in a row.\n\nfn expect_zeros(\n\n r: &mut (impl Read + ?Sized),\n\n count: usize,\n\n) -> Result<(), wire::Error> {\n\n for _ in 0..count {\n\n if r.read_le::<u8>()? != 0 {\n\n return Err(wire::Error::OutOfRange);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/protocol/spdm/mod.rs", "rank": 68, "score": 69055.9807892552 }, { "content": "#[test]\n\n#[cfg_attr(miri, ignore)]\n\nfn explicit_key() {\n\n let cert = Cert::parse(\n\n data::x509::SUB_SIGNED,\n\n CertFormat::RiotX509,\n\n Some(&PublicKeyParams::Rsa {\n\n modulus: keys::KEY1_RSA_MOD,\n\n exponent: keys::KEY1_RSA_EXP,\n\n }),\n\n &mut ring::sig::Ciphers::new(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(cert.subject(), cert.issuer());\n\n assert!(cert.supports_cert_signing());\n\n assert!(cert.is_ca_cert().unwrap());\n\n assert!(!cert.is_within_path_len_constraint(2));\n\n}\n\n\n\nmacro_rules! table_test {\n\n ([$pred:ident] $($test:ident:$data:ident,)*) => {$(\n", "file_path": "src/cert/x509/test.rs", "rank": 69, "score": 69055.9807892552 }, { "content": "#[test]\n\nfn test() {\n\n assert_eq!(raw_cbor!(0:0), [0]);\n\n assert_eq!(raw_cbor!(1:23), [0b001_10111]);\n\n assert_eq!(raw_cbor!(1:24), [0b001_11000, 24]);\n\n assert_eq!(raw_cbor!(1:256), [0b001_11001, 1, 0]);\n\n assert_eq!(raw_cbor!(1:65536), [0b001_11010, 0, 1, 0, 0]);\n\n assert_eq!(\n\n raw_cbor!(1:(1 << 32)),\n\n [0b001_11011, 0, 0, 0, 1, 0, 0, 0, 0]\n\n );\n\n assert_eq!(raw_cbor!(4@2:5), [0b100_11001, 0, 5]);\n\n\n\n assert_eq!(\n\n raw_cbor!(2 { \"hello\" }),\n\n [0b010_00101, b'h', b'e', b'l', b'l', b'o']\n\n );\n\n\n\n assert_eq!(raw_cbor!(4 []), [0b100_00000]);\n\n\n\n assert_eq!(raw_cbor!(4 [\"a\", \"b\",]), [0b100_00010, b'a', b'b'],);\n\n}\n", "file_path": "src/cert/cwt/cbor/macros.rs", "rank": 70, "score": 69055.9807892552 }, { "content": "#[test]\n\n#[cfg_attr(miri, ignore)]\n\nfn self_signed() {\n\n let cert = Cert::parse(\n\n data::x509::SELF_SIGNED,\n\n CertFormat::RiotX509,\n\n None,\n\n &mut ring::sig::Ciphers::new(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(cert.subject(), cert.issuer());\n\n assert!(cert.supports_cert_signing());\n\n assert!(cert.is_ca_cert().unwrap());\n\n assert!(!cert.is_within_path_len_constraint(2));\n\n}\n\n\n", "file_path": "src/cert/x509/test.rs", "rank": 71, "score": 69055.9807892552 }, { "content": "#[test]\n\n#[cfg_attr(miri, ignore)]\n\nfn self_signed() {\n\n let cwt = TestCwt {\n\n issuer: \"my cool ca\",\n\n subject: \"my cool ca\",\n\n spki: PublicKeyParams::Rsa {\n\n modulus: keys::KEY1_RSA_MOD,\n\n exponent: keys::KEY1_RSA_EXP,\n\n },\n\n key_usage: &[0b0010_0000],\n\n issuer_key: keys::KEY1_RSA_KEYPAIR,\n\n };\n\n\n\n let data = cwt.encode();\n\n let cert = Cert::parse(\n\n &data,\n\n CertFormat::OpenDiceCwt,\n\n None,\n\n &mut ring::sig::Ciphers::new(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(cert.subject(), cert.issuer());\n\n assert!(cert.supports_cert_signing());\n\n}\n", "file_path": "src/cert/cwt/test.rs", "rank": 72, "score": 69055.9807892552 }, { "content": "fn parse_extn(\n\n buf: &mut untrusted::Reader,\n\n extns: &mut Extensions,\n\n) -> Result<(), Error> {\n\n der::tagged(Tag::SEQUENCE, buf, |buf| {\n\n let oid = der::oid(buf)?;\n\n let is_critical = der::opt_bool(buf)?.unwrap_or(false);\n\n der::tagged(Tag::OCTET_STRING, buf, |buf| match oid {\n\n oid::KEY_USAGE => {\n\n if extns.key_usage.is_some() {\n\n return Err(Error::BadEncoding);\n\n }\n\n\n\n der::bits_partial(buf)?.read_all(Error::BadEncoding, |buf| {\n\n let bytes = buf.read_bytes_to_end().as_slice_less_safe();\n\n extns.key_usage = Some(KeyUsage::from_be(bytes)?);\n\n Ok(())\n\n })\n\n }\n\n oid::BASIC_CONSTRAINTS => {\n", "file_path": "src/cert/x509/mod.rs", "rank": 73, "score": 69055.9807892552 }, { "content": "#[test]\n\nfn uint_00() {\n\n let mut reader = Reader::new(Input::from(data::der::DOUBLE_ZERO));\n\n assert!(der::uint(&mut reader).is_err());\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 74, "score": 69055.9807892552 }, { "content": "#[test]\n\nfn indefinite_any() {\n\n let mut reader = Reader::new(Input::from(data::der::INDEFINITE_ANY));\n\n assert!(der::any(&mut reader).is_err());\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 75, "score": 69055.9807892552 }, { "content": "/// The \"inner\" state of the `HostPort`. This type is intended to carry the state\n\n/// and functionality for an in-process request/response flow, without making it\n\n/// accessible to outside callers except through the associated [`manticore::net`]\n\n/// trait objects.\n\n///\n\n/// Most implementations of `HostPort` will follow this \"nesting doll\" pattern.\n\n///\n\n/// This type implements [`HostRequest`], [`HostReply`], and [`manticore::io::Read`],\n\n/// though users may only move from one trait implementation to the other by calling\n\n/// methods like `reply()` and `payload()`.\n\nstruct Inner<H> {\n\n listener: TcpListener,\n\n // State for `HostRequest`: a parsed header, the length of the payload, and\n\n // a stream to read it from.\n\n stream: Option<(H, usize, TcpStream)>,\n\n // State for `HostResponse`: a `Writer` to dump the response bytes into.\n\n output_buffer: Option<Writer<H>>,\n\n}\n\n\n\nimpl<H> TcpHostPort<H> {\n\n /// Binds a new `TcpHostPort` to an open port.\n\n pub fn bind() -> Result<Self, net::Error> {\n\n let listener = TcpListener::bind((\"127.0.0.1\", 0)).map_err(|e| {\n\n log::error!(\"{}\", e);\n\n net::Error::Io(io::Error::Internal)\n\n })?;\n\n Ok(Self(Inner {\n\n listener,\n\n stream: None,\n\n output_buffer: None,\n", "file_path": "e2e/src/support/tcp.rs", "rank": 76, "score": 68423.54029887897 }, { "content": "/// A helper for constructing X-over-TCP messages, for `X in [Cerberus, Spdm]`.\n\n///\n\n/// Because an X-over-TCP header requires a length prefix for the payload,\n\n/// we need to buffer the entire reply before writing the header.\n\n///\n\n/// This type implements [`manticore::io::Write`].\n\nstruct Writer<H> {\n\n header: H,\n\n buf: Vec<u8>,\n\n}\n\n\n\nimpl<H: Header> Writer<H> {\n\n /// Creates a new `Writer` that will encode the given abstract `header`.\n\n pub fn new(header: H) -> Self {\n\n Self {\n\n header,\n\n buf: Vec::new(),\n\n }\n\n }\n\n\n\n /// Flushes the buffered data to the given [`std::io::Write`] (usually, a\n\n /// [`TcpStream`]).\n\n pub fn finish(self, w: impl std::io::Write) -> Result<(), net::Error> {\n\n self.header.to_tcp(&self.buf, w)\n\n }\n\n}\n", "file_path": "e2e/src/support/tcp.rs", "rank": 77, "score": 68418.51967066765 }, { "content": "/// A COSE Sign1 structure.\n\nstruct Cose<'cert> {\n\n algo: sig::Algo,\n\n protected_bytes: untrusted::Input<'cert>,\n\n payload: untrusted::Input<'cert>,\n\n signature: &'cert [u8],\n\n\n\n // The COSE RFC isn't super clear on how you're meant to use the key id,\n\n // so we ignore it for now.\n\n #[allow(unused)]\n\n kid: Option<&'cert [u8]>,\n\n}\n\n\n\nimpl<'cert> Cose<'cert> {\n\n fn parse(buf: &mut untrusted::Reader<'cert>) -> Result<Cose<'cert>, Error> {\n\n struct Headers<'cert> {\n\n algo: Option<sig::Algo>,\n\n kid: Option<&'cert [u8]>,\n\n }\n\n\n\n fn parse_headers<'cert>(\n", "file_path": "src/cert/cwt/mod.rs", "rank": 78, "score": 68414.31795944541 }, { "content": "/// Represents a place that bytes can be read from, such as a `&[u8]`.\n\n///\n\n/// # Relation with [`std::io::Read`]\n\n/// [`std::io::Read`] is distinct from `Read`, since `Read` must know,\n\n/// a-priori, the total length of the underlying buffer.\n\n///\n\n/// The recommended way to use a [`std::io::Read`] with a `manticore` API is to\n\n/// use `read_to_end(&mut buf)` and to then pass `&mut buf[..]` into\n\n/// `manticore`. We hope to remove this restriction in the future.\n\npub trait Read {\n\n /// Reads exactly `n` bytes from `self`.\n\n fn read_bytes(&mut self, out: &mut [u8]) -> Result<(), io::Error>;\n\n\n\n /// Returns the number of bytes still available to read.\n\n fn remaining_data(&self) -> usize;\n\n}\n\nimpl dyn Read {} // Ensure object-safety.\n\n\n\n/// Convenience functions for reading integers from a [`Read`].\n\n#[extend::ext(name = ReadInt)]\n\npub impl<R: Read + ?Sized> R {\n\n /// Reads a little-endian integer.\n\n #[inline]\n\n fn read_le<I: LeInt>(&mut self) -> Result<I, io::Error> {\n\n I::read_from(self)\n\n }\n\n}\n\n\n\n/// A [`Read`] that may, as an optimization, zero-copy read data for the\n", "file_path": "src/io/read.rs", "rank": 79, "score": 68394.24802461977 }, { "content": "/// A manager for a Cerberus session, usable by either the host (the client)\n\n/// or the device (the server).\n\n///\n\n/// A `Session` is a state machine with four states:\n\n/// 1. \"Inactive\": the starting state, indicating no session.\n\n/// - This state may be entered via [`Session::destroy_session()`] at\n\n/// any time.\n\n/// 2. \"Ready\": the session is configured with the challenge nonces, ready for\n\n/// ECDH to begin.\n\n/// - This state may be entered via [`Session::create_session()`] at\n\n/// any time.\n\n/// 3. \"Agreement\": an ephemeral ECDH private key has been created, and is\n\n/// pending receipt of the peer's public key to complete the ECDH\n\n/// transaction.\n\n/// - This state may be entered via [`Session::begin_ecdh()`], but only\n\n/// from the \"Ready\" or \"Active\" states.\n\n/// - [`Session::begin_ecdh()`] may be called on an active session to\n\n/// reestablish it.\n\n/// 4. \"Active\": a session has been established, and [`Session::aes_key()`]\n\n/// and [`Session::hmac_key()`] will produce the negotiated keys.\n\n/// - This state may be entered via [`Session::finish_ecdh()`], but only\n\n/// from the \"Ready\" state.\n\n///\n\n/// All transition failures must leave the `Session` as-is, except for\n\n/// [`Session::finish_ecdh()`], which must bring it back to the \"Ready\" state\n\n/// on failure. It is a programmer error to call [`Session::finish_ecdh()`]\n\n/// out-of-order, since it's intended to be called with\n\n/// [`Session::begin_ecdh()`] in the context of request. This transition can\n\n/// only fail if the entire ECDH exchange fails.\n\n///\n\n/// Users will move through the states like so:\n\n/// 1. When a device receives a [`protocol::challenge`] request that wants to\n\n/// establish a session later, its `Session` enters the \"ready\" state; once\n\n/// the response arrives, the host enters the \"ready\" state too.\n\n/// 2. The host enters the \"agreement\" state and sends a\n\n/// [`protocol::key_exchange`] request to the device with the fresh public\n\n/// key.\n\n/// 3. The device also enters the \"agreement\" state, and then immediately\n\n/// transitions to \"active\" using the host's public key. It then sends its\n\n/// public key, along with an HMAC of its certificate using the session's\n\n/// HMAC key, to the host.\n\n/// 4. After verifying the signature over the public keys, the host enters the\n\n/// \"active\" state using the device's public key, and verifies the HMAC in\n\n/// the response.\n\n/// 5. The device returns to the \"inactive\" state when receiving a\n\n/// session-destroying [`protocol::key_exchange`] request. Upon success,\n\n/// the host also enters the \"inactive\" state.\n\n///\n\n/// See the [module documentation][self] for information on the key derivation\n\n/// function used to create the HMAC and encryption keys.\n\npub trait Session {\n\n /// Begins a new session.\n\n ///\n\n /// Sessions begin when a successful [`protocol::challenge`] command is\n\n /// completed. The challenge produces two nonces as a byproduct, which are\n\n /// used as the basis for the session.\n\n ///\n\n /// This function destroys any prior existing session.\n\n fn create_session(\n\n &mut self,\n\n req_nonce: &[u8],\n\n resp_nonce: &[u8],\n\n ) -> Result<(), Error>;\n\n\n\n /// Destroys a session without creating a new one.\n\n fn destroy_session(&mut self) -> Result<(), Error>;\n\n\n\n /// Returns the maximum number of bytes needed to encode `our_key` in\n\n /// [`Self::begin_ecdh()`].\n\n fn ephemeral_bytes(&self) -> usize;\n", "file_path": "src/session/mod.rs", "rank": 80, "score": 68390.45771511312 }, { "content": "/// A hashing engine, which maintains the state for one digest.\n\n///\n\n/// Callers should not use the `raw` API directly; [`Hasher`] is a type-safe\n\n/// wrapper that manages a session with an `Engine`.\n\n///\n\n/// Implementers only need to provide the \"raw\" form of the API; the remaining\n\n/// functions are convenience helpers.\n\npub trait Engine {\n\n /// Returns whether this engine supports the given algorithm.\n\n fn supports(&mut self, algo: Algo) -> bool;\n\n\n\n /// Begins a new hashing operation, discarding any previous state.\n\n ///\n\n /// If `key` is `Some`, this becomes an HMAC operation instead, using that\n\n /// as the key.\n\n fn start_raw(\n\n &mut self,\n\n algo: Algo,\n\n key: Option<&[u8]>,\n\n ) -> Result<(), Error>;\n\n\n\n /// Adds `data` to the hashing state.\n\n fn write_raw(&mut self, data: &[u8]) -> Result<(), Error>;\n\n\n\n /// Completes the hashing/HMAC operation.\n\n ///\n\n /// Calling this function multiple times will have an unspecified effect.\n", "file_path": "src/crypto/hash.rs", "rank": 81, "score": 68389.56886823857 }, { "content": "/// Provides access to \"chip identity\" information of various types.\n\npub trait Identity {\n\n /// Returns a string indicating the RoT's firmware version.\n\n ///\n\n /// Although not enforced, it is recommended that this be an ASCII string.\n\n fn firmware_version(&self) -> &[u8; 32];\n\n\n\n /// Returns a string indicating the Vendor firmware version at the specified slot.\n\n fn vendor_firmware_version(&self, slot: u8) -> Option<&[u8; 32]> {\n\n let _ = slot;\n\n None\n\n }\n\n\n\n /// Returns the \"unique device identity\" for the device. This is a binary\n\n /// value of unspecified format.\n\n fn unique_device_identity(&self) -> &[u8];\n\n}\n\nimpl dyn Identity {} // Ensure object-safe.\n\n\n", "file_path": "src/hardware/mod.rs", "rank": 82, "score": 68386.30508307018 }, { "content": "/// Represents a place that bytes can be written to, such as a `&[u8]`.\n\n///\n\n/// # Relation with [`std::io::Write`]\n\n/// [`std::io::Write`] provides approximately a superset of `Write`, with\n\n/// more detailed errors. [`StdWrite`] provides an implementation of\n\n/// `Write` in terms of [`std::io::Write`].\n\npub trait Write {\n\n /// Attempt to write `buf` exactly to `self`.\n\n ///\n\n /// This function does not perform partial writes: it will either block\n\n /// until completion or return an error.\n\n fn write_bytes(&mut self, buf: &[u8]) -> Result<(), io::Error>;\n\n\n\n /// Writes a little-endian integer.\n\n ///\n\n /// # Note\n\n /// Do not implement this function yourself. Callers are not required to\n\n /// call it in order to actually perform a write, so whether or not it is\n\n /// called is an implementation detail.\n\n #[inline]\n\n fn write_le<I: LeInt>(&mut self, val: I) -> Result<(), io::Error>\n\n where\n\n Self: Sized,\n\n {\n\n val.write_to(self)\n\n }\n", "file_path": "src/io/write.rs", "rank": 83, "score": 68385.43928756283 }, { "content": "/// A cryptographically-secure random number generator.\n\n///\n\n/// The sole purpose of this type is to fill buffers with random bytes,\n\n/// specifically for nonces or generating secrets as part of key exchange.\n\n///\n\n/// `Csrng`s must already be seeded with sufficient entropy; creating new\n\n/// random number generators is beyond the scope of this trait.\n\npub trait Csrng {\n\n /// Fills `buf` with random bytes.\n\n fn fill(&mut self, buf: &mut [u8]) -> Result<(), Error>;\n\n}\n\nimpl dyn Csrng {} // Ensure object-safe.\n", "file_path": "src/crypto/csrng.rs", "rank": 84, "score": 68385.3929543157 }, { "content": "/// A collection of ciphers that are provided to certificate machinery.\n\n///\n\n/// Users are expected to implement this trait to efficiently describe to\n\n/// Manticore which algorithms they consider acceptable and how to access them.\n\npub trait Ciphers {\n\n /// Performs cryptographic capabilities negotiation.\n\n ///\n\n /// This function populates `caps` with whatever asymmetric cryptography\n\n /// it supports.\n\n fn negotiate(&self, caps: &mut capabilities::Crypto);\n\n\n\n /// Returns a [`Verify`] that can be used to verify signatures using\n\n /// the given `key`.\n\n ///\n\n /// Returns `None` if `key`'s algorithm is not supported.\n\n fn verifier<'a>(\n\n &'a mut self,\n\n algo: Algo,\n\n key: &PublicKeyParams,\n\n ) -> Option<&'a mut dyn Verify>;\n\n}\n\nimpl dyn Ciphers {} // Ensure object-safe.\n\n\n\n/// A [`Ciphers`] that blindly accepts all signatures, for testing purposes.\n", "file_path": "src/crypto/sig.rs", "rank": 85, "score": 68381.40167368983 }, { "content": "/// Provides access to device reset-related information for a particular\n\n/// device.\n\npub trait Reset {\n\n /// Returns the number of times the device has been reset since it was\n\n /// powered on.\n\n fn resets_since_power_on(&self) -> u32;\n\n\n\n /// Returns the uptime of the device, i.e., the absolute duration since it\n\n /// was last released from reset.\n\n ///\n\n /// The resolution and accuracy of this value are expected to be\n\n /// best-effort.\n\n fn uptime(&self) -> Duration;\n\n}\n\nimpl dyn Reset {} // Ensure object-safe.\n", "file_path": "src/hardware/mod.rs", "rank": 86, "score": 68381.40167368983 }, { "content": " /// A public-in-private trait, for ensuring outside users cannot\n\n /// accidentally implement `HandlerMethods`.\n\n pub trait Sealed {}\n\n}\n\n\n\n/// Context for a request, i.e., all relevant variables for handling a request.\n\npub struct Context<'req, Buf, Req, Server> {\n\n pub req_buf: Buf,\n\n pub req: Req,\n\n pub server: Server,\n\n pub arena: &'req dyn Arena,\n\n}\n\n\n", "file_path": "src/server/handler.rs", "rank": 87, "score": 68381.40167368983 }, { "content": " pub trait Sealed {}\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nmod cfg_gated {\n\n use super::sealed::Sealed;\n\n\n", "file_path": "src/protocol/borrowed.rs", "rank": 88, "score": 68381.40167368983 }, { "content": "/// An signing engine, already primed with a keypair.\n\n///\n\n/// There is no way to extract the keypair back out of a `Sign` value.\n\npub trait Sign {\n\n /// Returns an upper bound for the number of bytes a signature of this type\n\n /// will need.\n\n fn sig_bytes(&self) -> usize;\n\n\n\n /// Creates a digital signature for `message_vec`, writing it to signature.\n\n ///\n\n /// `message_vec` is an iovec-like structure: the message is split across\n\n /// many buffers for digital signatures that are the concatenation of many\n\n /// parts, such as the Cerberus challenge command or a CWT signature.\n\n ///\n\n /// If the signature is created successfully, returns the number of bytes\n\n /// written to `signature`.\n\n fn sign(\n\n &mut self,\n\n message_vec: &[&[u8]],\n\n signature: &mut [u8],\n\n ) -> Result<usize, Error>;\n\n}\n\nimpl dyn Sign {} // Ensure object-safe.\n", "file_path": "src/crypto/sig.rs", "rank": 89, "score": 68381.40167368983 }, { "content": "/// A signature-verification engine, already primed with a key.\n\n///\n\n/// There is no way to extract the key back out of a `Verify` value.\n\npub trait Verify {\n\n /// Verifies that `signature` is a valid signature for `message_vec`.\n\n ///\n\n /// `message_vec` is an iovec-like structure: the message is split across\n\n /// many buffers for digital signatures that are the concatenation of many\n\n /// parts, such as the Cerberus challenge command or a CWT signature.\n\n ///\n\n /// If the underlying cryptographic operation succeeds, returns `Ok(())`.\n\n /// Failures, including signature check failures, are included in the\n\n /// `Err` variant.\n\n fn verify(\n\n &mut self,\n\n message_vec: &[&[u8]],\n\n signature: &[u8],\n\n ) -> Result<(), Error>;\n\n}\n\nimpl dyn Verify {} // Ensure object-safe.\n\n\n", "file_path": "src/crypto/sig.rs", "rank": 90, "score": 68381.40167368983 }, { "content": "#[test]\n\nfn device_id() {\n\n use manticore::protocol::device_id::*;\n\n\n\n let virt = rot::Virtual::spawn(&rot::Options {\n\n device_id: DeviceIdentifier {\n\n vendor_id: 0xc020,\n\n device_id: 0x0001,\n\n subsys_vendor_id: 0xffff,\n\n subsys_id: 0xaa55,\n\n },\n\n ..Default::default()\n\n });\n\n\n\n let arena = BumpArena::new([0; 64]);\n\n let resp = virt.send_cerberus::<DeviceId>(DeviceIdRequest {}, &arena);\n\n assert_eq!(\n\n resp.unwrap().unwrap().id,\n\n DeviceIdentifier {\n\n vendor_id: 0xc020,\n\n device_id: 0x0001,\n\n subsys_vendor_id: 0xffff,\n\n subsys_id: 0xaa55,\n\n }\n\n );\n\n}\n", "file_path": "e2e/src/tests/device_queries.rs", "rank": 91, "score": 67817.97603971724 }, { "content": "#[test]\n\nfn firmware_version() {\n\n use manticore::protocol::firmware_version::*;\n\n\n\n let virt = rot::Virtual::spawn(&rot::Options {\n\n firmware_version: b\"my cool e2e test\".to_vec(),\n\n ..Default::default()\n\n });\n\n\n\n let arena = BumpArena::new([0; 64]);\n\n let resp = virt.send_cerberus::<FirmwareVersion>(\n\n FirmwareVersionRequest { index: 0 },\n\n &arena,\n\n );\n\n\n\n let version = resp.unwrap().unwrap().version;\n\n assert!(version.starts_with(b\"my cool e2e test\"));\n\n}\n\n\n", "file_path": "e2e/src/tests/device_queries.rs", "rank": 92, "score": 67817.97603971724 }, { "content": "#[test]\n\nfn null_nonempty() {\n\n let mut reader = Reader::new(Input::from(data::der::NONEMPTY_NULL));\n\n assert!(der::null(&mut reader).is_err());\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 93, "score": 67817.97603971724 }, { "content": "#[test]\n\nfn scalar_ordering() {\n\n let ordering = [\n\n Scalar::Int(0.into()),\n\n Scalar::Int(23.into()),\n\n Scalar::Int(200.into()),\n\n Scalar::Int(1000.into()),\n\n Scalar::Int(100_000.into()),\n\n Scalar::Int((-1).into()),\n\n Scalar::Int((-23).into()),\n\n Scalar::Int((-200).into()),\n\n Scalar::Int((-1000).into()),\n\n Scalar::Int((-100_000).into()),\n\n Scalar::Utf8(\"z\"),\n\n Scalar::Utf8(\"aa\"),\n\n Scalar::Bytes(b\"z\"),\n\n Scalar::Bytes(b\"aa\"),\n\n ];\n\n for w in ordering.windows(2) {\n\n assert!(w[0] < w[1], \"expected {:?} < {:?}\", w[0], w[1]);\n\n }\n", "file_path": "src/cert/cwt/cbor/mod.rs", "rank": 94, "score": 67817.97603971724 }, { "content": "#[test]\n\nfn long_form_any() {\n\n let mut reader = Reader::new(Input::from(data::der::LONG_FORM_ANY));\n\n assert!(der::any(&mut reader).is_err());\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 95, "score": 67817.97603971724 }, { "content": "#[test]\n\nfn parse_wrong() {\n\n let mut reader = Reader::new(Input::from(data::der::NULL));\n\n assert!(der::parse(Tag::INTEGER, &mut reader).is_err());\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 96, "score": 67817.97603971724 }, { "content": "#[test]\n\nfn uint_neg() {\n\n let mut reader = Reader::new(Input::from(data::der::NEGATIVE));\n\n assert!(der::uint(&mut reader).is_err());\n\n}\n\n\n", "file_path": "src/cert/x509/der/test.rs", "rank": 97, "score": 67817.97603971724 }, { "content": "#[test]\n\nfn context_tag() {\n\n assert_eq!(Tag::context_specific(3), Tag(0b10_1_00011));\n\n}\n\n\n\n// NOTE: We don't test der::any very much, since it's tested indirectly by all\n\n// of the helpers.\n", "file_path": "src/cert/x509/der/test.rs", "rank": 98, "score": 67817.97603971724 } ]
Rust
kernel/src/allocator/tests.rs
rrybarczyk/osy
3328c26343d47c31e615df080d4769821567df15
mod align_util { use allocator::util::{align_up, align_down}; #[test] fn test_align_down() { assert_eq!(align_down(0, 2), 0); assert_eq!(align_down(0, 8), 0); assert_eq!(align_down(0, 1 << 5), 0); assert_eq!(align_down(1 << 10, 1 << 10), 1 << 10); assert_eq!(align_down(1 << 20, 1 << 10), 1 << 20); assert_eq!(align_down(1 << 23, 1 << 4), 1 << 23); assert_eq!(align_down(1, 1 << 4), 0); assert_eq!(align_down(10, 1 << 4), 0); assert_eq!(align_down(0xFFFF, 1 << 2), 0xFFFC); assert_eq!(align_down(0xFFFF, 1 << 3), 0xFFF8); assert_eq!(align_down(0xFFFF, 1 << 4), 0xFFF0); assert_eq!(align_down(0xFFFF, 1 << 5), 0xFFE0); assert_eq!(align_down(0xAFFFF, 1 << 8), 0xAFF00); assert_eq!(align_down(0xAFFFF, 1 << 12), 0xAF000); assert_eq!(align_down(0xAFFFF, 1 << 16), 0xA0000); } #[test] fn test_align_up() { assert_eq!(align_up(0, 2), 0); assert_eq!(align_up(0, 8), 0); assert_eq!(align_up(0, 1 << 5), 0); assert_eq!(align_up(1 << 10, 1 << 10), 1 << 10); assert_eq!(align_up(1 << 20, 1 << 10), 1 << 20); assert_eq!(align_up(1 << 23, 1 << 4), 1 << 23); assert_eq!(align_up(1, 1 << 4), 1 << 4); assert_eq!(align_up(10, 1 << 4), 1 << 4); assert_eq!(align_up(0xFFFF, 1 << 2), 0x10000); assert_eq!(align_up(0xFFFF, 1 << 3), 0x10000); assert_eq!(align_up(0xFFFF, 1 << 4), 0x10000); assert_eq!(align_up(0xAFFFF, 1 << 12), 0xB0000); assert_eq!(align_up(0xABCDAB, 1 << 2), 0xABCDAC); assert_eq!(align_up(0xABCDAB, 1 << 4), 0xABCDB0); assert_eq!(align_up(0xABCDAB, 1 << 8), 0xABCE00); assert_eq!(align_up(0xABCDAB, 1 << 12), 0xABD000); assert_eq!(align_up(0xABCDAB, 1 << 16), 0xAC0000); } #[test] #[should_panic] fn test_panics_1() { align_down(0xFFFF0000, 7); } #[test] #[should_panic] fn test_panics_2() { align_down(0xFFFF0000, 123); } #[test] #[should_panic] fn test_panics_3() { align_up(0xFFFF0000, 7); } #[test] #[should_panic] fn test_panics_4() { align_up(0xFFFF0000, 456); } } #[path = ""] mod allocator { #[allow(dead_code)] mod bump; #[allow(dead_code)] mod bin; use alloc::allocator::{AllocErr, Layout}; use alloc::raw_vec::RawVec; macro test_allocators { (@$kind:ident, $name:ident, $mem:expr, |$info:pat| $block:expr) => { #[test] fn $name() { let mem: RawVec<u8> = RawVec::with_capacity($mem); let start = mem.ptr() as usize; let end = start + $mem; let allocator = $kind::Allocator::new(start, end); let $info = (start, end, allocator); $block } }, ($bin:ident, $bump:ident, $mem:expr, |$info:pat| $block:expr) => ( test_allocators!(@bin, $bin, $mem, |$info| $block); test_allocators!(@bump, $bump, $mem, |$info| $block); ) } macro layout($size:expr, $align:expr) { Layout::from_size_align($size, $align).unwrap() } macro test_layouts($layouts:expr, $start:expr, $end:expr, $a:expr) { let (layouts, start, end, mut a) = ($layouts, $start, $end, $a); let mut pointers: Vec<(usize, Layout)> = vec![]; for layout in &layouts { let ptr = a.alloc(layout.clone()).unwrap() as usize; pointers.push((ptr, layout.clone())); } for &(ptr, ref layout) in &pointers { assert!(ptr >= start, "allocated {:x} after start ({:x})", ptr, start); assert!(ptr + layout.size() <= end, "{:x} + {:x} exceeds the bounds of {:x}", ptr, layout.size(), end); } pointers.sort_by_key(|&(ptr, _)| ptr); for window in pointers.windows(2) { let (&(ptr_a, ref layout_a), &(ptr_b, _)) = (&window[0], &window[1]); assert!(ptr_b - ptr_a >= layout_a.size(), "memory region {:x} - {:x} does not fit {}", ptr_a, ptr_b, layout_a.size()); } for &(ptr, ref layout) in &pointers { assert!(ptr % layout.align() == 0, "{:x} is not aligned to {}", ptr, layout.align()); } } test_allocators!(bin_exhausted, bump_exhausted, 128, |(_, _, mut a)| { let e = a.alloc(layout!(1024, 128)).unwrap_err(); assert_eq!(e, AllocErr::Exhausted { request: layout!(1024, 128) }) }); test_allocators!(bin_alloc, bump_alloc, 8 * (1 << 20), |(start, end, a)| { let layouts = [ layout!(16, 16), layout!(16, 128), layout!(16, 256), layout!(4, 256), layout!(1024, 16), layout!(1024, 4), layout!(1024, 128), layout!(2048, 8), layout!(2049, 8), layout!(2050, 8), layout!(4095, 4), layout!(4096, 4), layout!(4096, 4), layout!(4096, 4096), layout!(16, 4096), layout!(8192, 4096), layout!(8192, 8), layout!(8192, 8), ]; test_layouts!(layouts, start, end, a); }); test_allocators!(bin_alloc_2, bump_alloc_2, 16 * (1 << 20), |(start, end, a)| { let mut layouts = vec![]; for i in 1..1024 { layouts.push(layout!(i * 8, 16)); } test_layouts!(layouts, start, end, a); }); fn scribble(ptr: *mut u8, size: usize) { unsafe { ::std::ptr::write_bytes(ptr, 0xAF, size); } } test_allocators!(bin_dealloc_s, bump_dealloc_s, 4096, |(_, _, mut a)| { let layouts = [ layout!(16, 16), layout!(16, 128), layout!(16, 256), ]; let mut pointers: Vec<(usize, Layout)> = vec![]; for layout in &layouts { let ptr = a.alloc(layout.clone()).unwrap(); scribble(ptr, layout.size()); pointers.push((ptr as usize, layout.clone())); } for (ptr, layout) in pointers { scribble(ptr as *mut u8, layout.size()); a.dealloc(ptr as *mut u8, layout); } }); test_allocators!(@bin, bin_dealloc_1, 65536, |(_, _, mut a)| { let layouts = [ layout!(16, 16), layout!(16, 256), layout!(32, 4), layout!(32, 1024), layout!(4, 1024), layout!(4, 32), ]; for (i, layout) in layouts.iter().enumerate() { let mut ptrs = vec![]; for _ in 0..(25 + i * 2) { let ptr = a.alloc(layout.clone()).expect("allocation"); assert!(ptr as usize % layout.align() == 0, "{:x} is not aligned to {}", ptr as usize, layout.align()); scribble(ptr, layout.size()); ptrs.push((ptr, layout.clone())); } for (ptr, layout) in ptrs { a.dealloc(ptr, layout); } } for _ in 0..500 { for layout in &layouts { let ptr = a.alloc(layout.clone()).expect("allocation"); scribble(ptr, layout.size()); assert!(ptr as usize % layout.align() == 0, "{:x} is not aligned to {}", ptr as usize, layout.align()); a.dealloc(ptr, layout.clone()); } } }); test_allocators!(@bin, bin_dealloc_2, 8192, |(_, _, mut a)| { let layouts = [ layout!(3072, 16), layout!(512, 32), ]; for _ in 0..1000 { let mut ptrs = vec![]; for layout in &layouts { let ptr = a.alloc(layout.clone()).expect("allocation"); scribble(ptr, layout.size()); ptrs.push(ptr as usize); } for (layout, ptr) in layouts.iter().zip(ptrs.into_iter()) { scribble(ptr as *mut u8, layout.size()); a.dealloc(ptr as *mut u8, layout.clone()); } } }); } mod linked_list { use allocator::linked_list::LinkedList; #[test] fn example_1() { let address_1 = (&mut (1 as usize)) as *mut usize; let address_2 = (&mut (2 as usize)) as *mut usize; let mut list = LinkedList::new(); unsafe { list.push(address_1); list.push(address_2); } assert_eq!(list.peek(), Some(address_2)); assert_eq!(list.pop(), Some(address_2)); assert_eq!(list.pop(), Some(address_1)); assert_eq!(list.pop(), None); } #[test] fn example_2() { let address_1 = (&mut (1 as usize)) as *mut usize; let address_2 = (&mut (2 as usize)) as *mut usize; let address_3 = (&mut (3 as usize)) as *mut usize; let mut list = LinkedList::new(); unsafe { list.push(address_1); list.push(address_2); list.push(address_3); } for node in list.iter_mut() { if node.value() == address_2 { node.pop(); } } assert_eq!(list.pop(), Some(address_3)); assert_eq!(list.pop(), Some(address_1)); assert_eq!(list.pop(), None); } #[test] fn example_3() { let address_1 = (&mut (1 as usize)) as *mut usize; let address_2 = (&mut (2 as usize)) as *mut usize; let address_3 = (&mut (3 as usize)) as *mut usize; let mut list = LinkedList::new(); unsafe { list.push(address_1); list.push(address_2); list.push(address_3); } for node in list.iter_mut() { if node.value() == address_2 { node.pop(); } } { let mut iter = list.iter(); assert_eq!(iter.next(), Some(address_3)); assert_eq!(iter.next(), Some(address_1)); assert_eq!(iter.next(), None); } for node in list.iter_mut() { if node.value() == address_1 { node.pop(); } } { let mut iter = list.iter(); assert_eq!(iter.next(), Some(address_3)); assert_eq!(iter.next(), None); } for node in list.iter_mut() { if node.value() == address_3 { node.pop(); } } let mut iter = list.iter(); assert_eq!(iter.next(), None); } }
mod align_util { use allocator::util::{align_up, align_down}; #[test] fn test_align_down() { a
12), 0xAF000); assert_eq!(align_down(0xAFFFF, 1 << 16), 0xA0000); } #[test] fn test_align_up() { assert_eq!(align_up(0, 2), 0); assert_eq!(align_up(0, 8), 0); assert_eq!(align_up(0, 1 << 5), 0); assert_eq!(align_up(1 << 10, 1 << 10), 1 << 10); assert_eq!(align_up(1 << 20, 1 << 10), 1 << 20); assert_eq!(align_up(1 << 23, 1 << 4), 1 << 23); assert_eq!(align_up(1, 1 << 4), 1 << 4); assert_eq!(align_up(10, 1 << 4), 1 << 4); assert_eq!(align_up(0xFFFF, 1 << 2), 0x10000); assert_eq!(align_up(0xFFFF, 1 << 3), 0x10000); assert_eq!(align_up(0xFFFF, 1 << 4), 0x10000); assert_eq!(align_up(0xAFFFF, 1 << 12), 0xB0000); assert_eq!(align_up(0xABCDAB, 1 << 2), 0xABCDAC); assert_eq!(align_up(0xABCDAB, 1 << 4), 0xABCDB0); assert_eq!(align_up(0xABCDAB, 1 << 8), 0xABCE00); assert_eq!(align_up(0xABCDAB, 1 << 12), 0xABD000); assert_eq!(align_up(0xABCDAB, 1 << 16), 0xAC0000); } #[test] #[should_panic] fn test_panics_1() { align_down(0xFFFF0000, 7); } #[test] #[should_panic] fn test_panics_2() { align_down(0xFFFF0000, 123); } #[test] #[should_panic] fn test_panics_3() { align_up(0xFFFF0000, 7); } #[test] #[should_panic] fn test_panics_4() { align_up(0xFFFF0000, 456); } } #[path = ""] mod allocator { #[allow(dead_code)] mod bump; #[allow(dead_code)] mod bin; use alloc::allocator::{AllocErr, Layout}; use alloc::raw_vec::RawVec; macro test_allocators { (@$kind:ident, $name:ident, $mem:expr, |$info:pat| $block:expr) => { #[test] fn $name() { let mem: RawVec<u8> = RawVec::with_capacity($mem); let start = mem.ptr() as usize; let end = start + $mem; let allocator = $kind::Allocator::new(start, end); let $info = (start, end, allocator); $block } }, ($bin:ident, $bump:ident, $mem:expr, |$info:pat| $block:expr) => ( test_allocators!(@bin, $bin, $mem, |$info| $block); test_allocators!(@bump, $bump, $mem, |$info| $block); ) } macro layout($size:expr, $align:expr) { Layout::from_size_align($size, $align).unwrap() } macro test_layouts($layouts:expr, $start:expr, $end:expr, $a:expr) { let (layouts, start, end, mut a) = ($layouts, $start, $end, $a); let mut pointers: Vec<(usize, Layout)> = vec![]; for layout in &layouts { let ptr = a.alloc(layout.clone()).unwrap() as usize; pointers.push((ptr, layout.clone())); } for &(ptr, ref layout) in &pointers { assert!(ptr >= start, "allocated {:x} after start ({:x})", ptr, start); assert!(ptr + layout.size() <= end, "{:x} + {:x} exceeds the bounds of {:x}", ptr, layout.size(), end); } pointers.sort_by_key(|&(ptr, _)| ptr); for window in pointers.windows(2) { let (&(ptr_a, ref layout_a), &(ptr_b, _)) = (&window[0], &window[1]); assert!(ptr_b - ptr_a >= layout_a.size(), "memory region {:x} - {:x} does not fit {}", ptr_a, ptr_b, layout_a.size()); } for &(ptr, ref layout) in &pointers { assert!(ptr % layout.align() == 0, "{:x} is not aligned to {}", ptr, layout.align()); } } test_allocators!(bin_exhausted, bump_exhausted, 128, |(_, _, mut a)| { let e = a.alloc(layout!(1024, 128)).unwrap_err(); assert_eq!(e, AllocErr::Exhausted { request: layout!(1024, 128) }) }); test_allocators!(bin_alloc, bump_alloc, 8 * (1 << 20), |(start, end, a)| { let layouts = [ layout!(16, 16), layout!(16, 128), layout!(16, 256), layout!(4, 256), layout!(1024, 16), layout!(1024, 4), layout!(1024, 128), layout!(2048, 8), layout!(2049, 8), layout!(2050, 8), layout!(4095, 4), layout!(4096, 4), layout!(4096, 4), layout!(4096, 4096), layout!(16, 4096), layout!(8192, 4096), layout!(8192, 8), layout!(8192, 8), ]; test_layouts!(layouts, start, end, a); }); test_allocators!(bin_alloc_2, bump_alloc_2, 16 * (1 << 20), |(start, end, a)| { let mut layouts = vec![]; for i in 1..1024 { layouts.push(layout!(i * 8, 16)); } test_layouts!(layouts, start, end, a); }); fn scribble(ptr: *mut u8, size: usize) { unsafe { ::std::ptr::write_bytes(ptr, 0xAF, size); } } test_allocators!(bin_dealloc_s, bump_dealloc_s, 4096, |(_, _, mut a)| { let layouts = [ layout!(16, 16), layout!(16, 128), layout!(16, 256), ]; let mut pointers: Vec<(usize, Layout)> = vec![]; for layout in &layouts { let ptr = a.alloc(layout.clone()).unwrap(); scribble(ptr, layout.size()); pointers.push((ptr as usize, layout.clone())); } for (ptr, layout) in pointers { scribble(ptr as *mut u8, layout.size()); a.dealloc(ptr as *mut u8, layout); } }); test_allocators!(@bin, bin_dealloc_1, 65536, |(_, _, mut a)| { let layouts = [ layout!(16, 16), layout!(16, 256), layout!(32, 4), layout!(32, 1024), layout!(4, 1024), layout!(4, 32), ]; for (i, layout) in layouts.iter().enumerate() { let mut ptrs = vec![]; for _ in 0..(25 + i * 2) { let ptr = a.alloc(layout.clone()).expect("allocation"); assert!(ptr as usize % layout.align() == 0, "{:x} is not aligned to {}", ptr as usize, layout.align()); scribble(ptr, layout.size()); ptrs.push((ptr, layout.clone())); } for (ptr, layout) in ptrs { a.dealloc(ptr, layout); } } for _ in 0..500 { for layout in &layouts { let ptr = a.alloc(layout.clone()).expect("allocation"); scribble(ptr, layout.size()); assert!(ptr as usize % layout.align() == 0, "{:x} is not aligned to {}", ptr as usize, layout.align()); a.dealloc(ptr, layout.clone()); } } }); test_allocators!(@bin, bin_dealloc_2, 8192, |(_, _, mut a)| { let layouts = [ layout!(3072, 16), layout!(512, 32), ]; for _ in 0..1000 { let mut ptrs = vec![]; for layout in &layouts { let ptr = a.alloc(layout.clone()).expect("allocation"); scribble(ptr, layout.size()); ptrs.push(ptr as usize); } for (layout, ptr) in layouts.iter().zip(ptrs.into_iter()) { scribble(ptr as *mut u8, layout.size()); a.dealloc(ptr as *mut u8, layout.clone()); } } }); } mod linked_list { use allocator::linked_list::LinkedList; #[test] fn example_1() { let address_1 = (&mut (1 as usize)) as *mut usize; let address_2 = (&mut (2 as usize)) as *mut usize; let mut list = LinkedList::new(); unsafe { list.push(address_1); list.push(address_2); } assert_eq!(list.peek(), Some(address_2)); assert_eq!(list.pop(), Some(address_2)); assert_eq!(list.pop(), Some(address_1)); assert_eq!(list.pop(), None); } #[test] fn example_2() { let address_1 = (&mut (1 as usize)) as *mut usize; let address_2 = (&mut (2 as usize)) as *mut usize; let address_3 = (&mut (3 as usize)) as *mut usize; let mut list = LinkedList::new(); unsafe { list.push(address_1); list.push(address_2); list.push(address_3); } for node in list.iter_mut() { if node.value() == address_2 { node.pop(); } } assert_eq!(list.pop(), Some(address_3)); assert_eq!(list.pop(), Some(address_1)); assert_eq!(list.pop(), None); } #[test] fn example_3() { let address_1 = (&mut (1 as usize)) as *mut usize; let address_2 = (&mut (2 as usize)) as *mut usize; let address_3 = (&mut (3 as usize)) as *mut usize; let mut list = LinkedList::new(); unsafe { list.push(address_1); list.push(address_2); list.push(address_3); } for node in list.iter_mut() { if node.value() == address_2 { node.pop(); } } { let mut iter = list.iter(); assert_eq!(iter.next(), Some(address_3)); assert_eq!(iter.next(), Some(address_1)); assert_eq!(iter.next(), None); } for node in list.iter_mut() { if node.value() == address_1 { node.pop(); } } { let mut iter = list.iter(); assert_eq!(iter.next(), Some(address_3)); assert_eq!(iter.next(), None); } for node in list.iter_mut() { if node.value() == address_3 { node.pop(); } } let mut iter = list.iter(); assert_eq!(iter.next(), None); } }
ssert_eq!(align_down(0, 2), 0); assert_eq!(align_down(0, 8), 0); assert_eq!(align_down(0, 1 << 5), 0); assert_eq!(align_down(1 << 10, 1 << 10), 1 << 10); assert_eq!(align_down(1 << 20, 1 << 10), 1 << 20); assert_eq!(align_down(1 << 23, 1 << 4), 1 << 23); assert_eq!(align_down(1, 1 << 4), 0); assert_eq!(align_down(10, 1 << 4), 0); assert_eq!(align_down(0xFFFF, 1 << 2), 0xFFFC); assert_eq!(align_down(0xFFFF, 1 << 3), 0xFFF8); assert_eq!(align_down(0xFFFF, 1 << 4), 0xFFF0); assert_eq!(align_down(0xFFFF, 1 << 5), 0xFFE0); assert_eq!(align_down(0xAFFFF, 1 << 8), 0xAFF00); assert_eq!(align_down(0xAFFFF, 1 <<
function_block-random_span
[ { "content": "pub fn next_test_ip4() -> SocketAddr {\n\n let port = PORT.fetch_add(1, Ordering::SeqCst) as u16 + base_port();\n\n SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(127, 0, 0, 1), port))\n\n}\n\n\n", "file_path": "std/src/net/test.rs", "rank": 0, "score": 132764.1280292329 }, { "content": "pub fn next_test_ip6() -> SocketAddr {\n\n let port = PORT.fetch_add(1, Ordering::SeqCst) as u16 + base_port();\n\n SocketAddr::V6(SocketAddrV6::new(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1),\n\n port, 0, 0))\n\n}\n\n\n", "file_path": "std/src/net/test.rs", "rank": 1, "score": 132764.1280292329 }, { "content": "#[test]\n\nfn test_rounding() {\n\n assert_eq!(round_up_to_next(0, 4), 0);\n\n assert_eq!(round_up_to_next(1, 4), 4);\n\n assert_eq!(round_up_to_next(2, 4), 4);\n\n assert_eq!(round_up_to_next(3, 4), 4);\n\n assert_eq!(round_up_to_next(4, 4), 4);\n\n assert_eq!(round_up_to_next(5, 4), 8);\n\n}\n\n\n\n// Returns a tuple of (pairs_offset, end_of_pairs_offset),\n\n// from the start of a mallocated array.\n", "file_path": "std/src/collections/hash/table.rs", "rank": 2, "score": 130062.87928077488 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub fn park() {\n\n let thread = current();\n\n\n\n // If we were previously notified then we consume this notification and\n\n // return quickly.\n\n if thread.inner.state.compare_exchange(NOTIFIED, EMPTY, SeqCst, SeqCst).is_ok() {\n\n return\n\n }\n\n\n\n // Otherwise we need to coordinate going to sleep\n\n let mut m = thread.inner.lock.lock().unwrap();\n\n match thread.inner.state.compare_exchange(EMPTY, PARKED, SeqCst, SeqCst) {\n\n Ok(_) => {}\n\n Err(NOTIFIED) => return, // notified after we locked\n\n Err(_) => panic!(\"inconsistent park state\"),\n\n }\n\n loop {\n\n m = thread.inner.cvar.wait(m).unwrap();\n\n match thread.inner.state.compare_exchange(NOTIFIED, EMPTY, SeqCst, SeqCst) {\n\n Ok(_) => return, // got a notification\n", "file_path": "std/src/thread/mod.rs", "rank": 3, "score": 129720.14408425972 }, { "content": "fn _assert_sync_and_send() {\n\n fn _assert_both<T: Send + Sync>() {}\n\n _assert_both::<JoinHandle<()>>();\n\n _assert_both::<Thread>();\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n// Tests\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n\n#[cfg(all(test, not(target_os = \"emscripten\")))]\n\nmod tests {\n\n use any::Any;\n\n use sync::mpsc::{channel, Sender};\n\n use result;\n\n use super::{Builder};\n\n use thread;\n\n use time::Duration;\n\n use u32;\n\n\n", "file_path": "std/src/thread/mod.rs", "rank": 4, "score": 129166.71512188637 }, { "content": "// The bots run multiple builds at the same time, and these builds\n\n// all want to use ports. This function figures out which workspace\n\n// it is running in and assigns a port range based on it.\n\nfn base_port() -> u16 {\n\n let cwd = env::current_dir().unwrap();\n\n let dirs = [\"32-opt\", \"32-nopt\",\n\n \"musl-64-opt\", \"cross-opt\",\n\n \"64-opt\", \"64-nopt\", \"64-opt-vg\", \"64-debug-opt\",\n\n \"all-opt\", \"snap3\", \"dist\"];\n\n dirs.iter().enumerate().find(|&(_, dir)| {\n\n cwd.to_str().unwrap().contains(dir)\n\n }).map(|p| p.0).unwrap_or(0) as u16 * 1000 + 19600\n\n}\n", "file_path": "std/src/net/test.rs", "rank": 5, "score": 128192.83465846532 }, { "content": "#[test]\n\nfn test_offset_calculation() {\n\n assert_eq!(calculate_allocation(128, 8, 16, 8), (8, 144, false));\n\n assert_eq!(calculate_allocation(3, 1, 2, 1), (1, 5, false));\n\n assert_eq!(calculate_allocation(6, 2, 12, 4), (4, 20, false));\n\n assert_eq!(calculate_offsets(128, 15, 4), (128, 143, false));\n\n assert_eq!(calculate_offsets(3, 2, 4), (4, 6, false));\n\n assert_eq!(calculate_offsets(6, 12, 4), (8, 20, false));\n\n}\n\n\n\nimpl<K, V> RawTable<K, V> {\n\n /// Does not initialize the buckets. The caller should ensure they,\n\n /// at the very least, set every hash to EMPTY_BUCKET.\n\n unsafe fn new_uninitialized(capacity: usize) -> RawTable<K, V> {\n\n if capacity == 0 {\n\n return RawTable {\n\n size: 0,\n\n capacity_mask: capacity.wrapping_sub(1),\n\n hashes: TaggedHashUintPtr::new(EMPTY as *mut HashUint),\n\n marker: marker::PhantomData,\n\n };\n", "file_path": "std/src/collections/hash/table.rs", "rank": 6, "score": 127734.76873698432 }, { "content": "#[cfg(not(test))]\n\npub fn init() {}\n\n\n", "file_path": "std/src/sys/redox/mod.rs", "rank": 7, "score": 127303.52822307189 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub fn yield_now() {\n\n imp::Thread::yield_now()\n\n}\n\n\n\n/// Determines whether the current thread is unwinding because of panic.\n\n///\n\n/// A common use of this feature is to poison shared resources when writing\n\n/// unsafe code, by checking `panicking` when the `drop` is called.\n\n///\n\n/// This is usually not needed when writing safe code, as [`Mutex`es][Mutex]\n\n/// already poison themselves when a thread panics while holding the lock.\n\n///\n\n/// This can also be used in multithreaded applications, in order to send a\n\n/// message to other threads warning that a thread has panicked (e.g. for\n\n/// monitoring purposes).\n\n///\n\n/// # Examples\n\n///\n\n/// ```should_panic\n\n/// use std::thread;\n", "file_path": "std/src/thread/mod.rs", "rank": 8, "score": 127297.5250988529 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub fn current() -> Thread {\n\n thread_info::current_thread().expect(\"use of std::thread::current() is not \\\n\n possible after the thread's local \\\n\n data has been destroyed\")\n\n}\n\n\n\n/// Cooperatively gives up a timeslice to the OS scheduler.\n\n///\n\n/// This is used when the programmer knows that the thread will have nothing\n\n/// to do for some time, and thus avoid wasting computing time.\n\n///\n\n/// For example when polling on a resource, it is common to check that it is\n\n/// available, and if not to yield in order to avoid busy waiting.\n\n///\n\n/// Thus the pattern of `yield`ing after a failed poll is rather common when\n\n/// implementing low-level shared resources or synchronization primitives.\n\n///\n\n/// However programmers will usually prefer to use, [`channel`]s, [`Condvar`]s,\n\n/// [`Mutex`]es or [`join`] for their synchronization routines, as they avoid\n\n/// thinking about thread scheduling.\n", "file_path": "std/src/thread/mod.rs", "rank": 9, "score": 125630.90225551353 }, { "content": "#[inline]\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub fn panicking() -> bool {\n\n panicking::panicking()\n\n}\n\n\n\n/// Puts the current thread to sleep for the specified amount of time.\n\n///\n\n/// The thread may sleep longer than the duration specified due to scheduling\n\n/// specifics or platform-dependent functionality.\n\n///\n\n/// # Platform behavior\n\n///\n\n/// On Unix platforms this function will not return early due to a\n\n/// signal being received or a spurious wakeup.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// use std::thread;\n\n///\n\n/// // Let's sleep for 2 seconds:\n\n/// thread::sleep_ms(2000);\n\n/// ```\n", "file_path": "std/src/thread/mod.rs", "rank": 10, "score": 125630.90225551353 }, { "content": "#[stable(feature = \"thread_sleep\", since = \"1.4.0\")]\n\npub fn sleep(dur: Duration) {\n\n imp::Thread::sleep(dur)\n\n}\n\n\n\n// constants for park/unpark\n\nconst EMPTY: usize = 0;\n\nconst PARKED: usize = 1;\n\nconst NOTIFIED: usize = 2;\n\n\n\n/// Blocks unless or until the current thread's token is made available.\n\n///\n\n/// A call to `park` does not guarantee that the thread will remain parked\n\n/// forever, and callers should be prepared for this possibility.\n\n///\n\n/// # park and unpark\n\n///\n\n/// Every thread is equipped with some basic low-level blocking support, via the\n\n/// [`thread::park`][`park`] function and [`thread::Thread::unpark`][`unpark`]\n\n/// method. [`park`] blocks the current thread, which can then be resumed from\n\n/// another thread by calling the [`unpark`] method on the blocked thread's\n", "file_path": "std/src/thread/mod.rs", "rank": 11, "score": 121828.76924436998 }, { "content": "fn hton<I: NetInt>(i: I) -> I { i.to_be() }\n", "file_path": "std/src/net/mod.rs", "rank": 12, "score": 120448.82133652363 }, { "content": "/// Returns the (start address, end address) of the available memory on this\n\n/// system if it can be determined. If it cannot, `None` is returned.\n\n///\n\n/// This function is expected to return `Some` under all normal cirumstances.\n\nfn memory_map() -> Option<(usize, usize)> {\n\n let binary_end = unsafe { (&_end as *const u8) as u32 };\n\n\n\n unimplemented!(\"memory map fetch\")\n\n}\n", "file_path": "kernel/src/allocator/mod.rs", "rank": 13, "score": 119629.60011053878 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\n#[rustc_deprecated(since = \"1.6.0\", reason = \"replaced by `std::thread::sleep`\")]\n\npub fn sleep_ms(ms: u32) {\n\n sleep(Duration::from_millis(ms as u64))\n\n}\n\n\n\n/// Puts the current thread to sleep for the specified amount of time.\n\n///\n\n/// The thread may sleep longer than the duration specified due to scheduling\n\n/// specifics or platform-dependent functionality.\n\n///\n\n/// # Platform behavior\n\n///\n\n/// On Unix platforms this function will not return early due to a\n\n/// signal being received or a spurious wakeup. Platforms which do not support\n\n/// nanosecond precision for sleeping will have `dur` rounded up to the nearest\n\n/// granularity of time they can sleep for.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// use std::{thread, time};\n\n///\n\n/// let ten_millis = time::Duration::from_millis(10);\n\n/// let now = time::Instant::now();\n\n///\n\n/// thread::sleep(ten_millis);\n\n///\n\n/// assert!(now.elapsed() >= ten_millis);\n\n/// ```\n", "file_path": "std/src/thread/mod.rs", "rank": 14, "score": 119629.60011053878 }, { "content": "#[stable(feature = \"park_timeout\", since = \"1.4.0\")]\n\npub fn park_timeout(dur: Duration) {\n\n let thread = current();\n\n\n\n // Like `park` above we have a fast path for an already-notified thread, and\n\n // afterwards we start coordinating for a sleep.\n\n // return quickly.\n\n if thread.inner.state.compare_exchange(NOTIFIED, EMPTY, SeqCst, SeqCst).is_ok() {\n\n return\n\n }\n\n let m = thread.inner.lock.lock().unwrap();\n\n match thread.inner.state.compare_exchange(EMPTY, PARKED, SeqCst, SeqCst) {\n\n Ok(_) => {}\n\n Err(NOTIFIED) => return, // notified after we locked\n\n Err(_) => panic!(\"inconsistent park_timeout state\"),\n\n }\n\n\n\n // Wait with a timeout, and if we spuriously wake up or otherwise wake up\n\n // from a notification we just want to unconditionally set the state back to\n\n // empty, either consuming a notification or un-flagging ourselves as\n\n // parked.\n", "file_path": "std/src/thread/mod.rs", "rank": 15, "score": 119629.60011053878 }, { "content": "fn ntoh<I: NetInt>(i: I) -> I { I::from_be(i) }\n\n\n", "file_path": "std/src/net/mod.rs", "rank": 16, "score": 118493.48867269594 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\n#[rustc_deprecated(since = \"1.6.0\", reason = \"replaced by `std::thread::park_timeout`\")]\n\npub fn park_timeout_ms(ms: u32) {\n\n park_timeout(Duration::from_millis(ms as u64))\n\n}\n\n\n\n/// Blocks unless or until the current thread's token is made available or\n\n/// the specified duration has been reached (may wake spuriously).\n\n///\n\n/// The semantics of this function are equivalent to [`park`][park] except\n\n/// that the thread will be blocked for roughly no longer than `dur`. This\n\n/// method should not be used for precise timing due to anomalies such as\n\n/// preemption or platform differences that may not cause the maximum\n\n/// amount of time waited to be precisely `dur` long.\n\n///\n\n/// See the [park documentation][park] for more details.\n\n///\n\n/// # Platform behavior\n\n///\n\n/// Platforms which do not support nanosecond precision for sleeping will have\n\n/// `dur` rounded up to the nearest granularity of time they can sleep for.\n\n///\n", "file_path": "std/src/thread/mod.rs", "rank": 17, "score": 117530.73646728888 }, { "content": "pub fn strlen(string: *const c_char) -> usize {\n\n let mut size = 0;\n\n while unsafe { *(string.offset(size as isize)) } != 0 {\n\n size += 1;\n\n }\n\n\n\n size\n\n}\n\n\n\npub mod os {\n\n /// Gets a detailed string description for the given error number.\n\n pub fn error_string(_errno: i32) -> String {\n\n \"unknown error\".to_string()\n\n }\n\n\n\n /// Returns the platform-specific value of errno\n\n pub fn errno() -> i32 {\n\n -1\n\n }\n\n}\n", "file_path": "std/src/sys/ros/mod.rs", "rank": 18, "score": 112953.05074918942 }, { "content": "pub fn sa6(a: Ipv6Addr, p: u16) -> SocketAddr {\n\n SocketAddr::V6(SocketAddrV6::new(a, p, 0, 0))\n\n}\n\n\n", "file_path": "std/src/net/test.rs", "rank": 19, "score": 112649.4068141204 }, { "content": "pub fn sa4(a: Ipv4Addr, p: u16) -> SocketAddr {\n\n SocketAddr::V4(SocketAddrV4::new(a, p))\n\n}\n\n\n", "file_path": "std/src/net/test.rs", "rank": 20, "score": 112649.4068141204 }, { "content": "#[cfg(test)]\n\npub fn test_num<T>(ten: T, two: T) where\n\n T: PartialEq\n\n + Add<Output=T> + Sub<Output=T>\n\n + Mul<Output=T> + Div<Output=T>\n\n + Rem<Output=T> + fmt::Debug\n\n + Copy\n\n{\n\n assert_eq!(ten.add(two), ten + two);\n\n assert_eq!(ten.sub(two), ten - two);\n\n assert_eq!(ten.mul(two), ten * two);\n\n assert_eq!(ten.div(two), ten / two);\n\n assert_eq!(ten.rem(two), ten % two);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use u8;\n\n use u16;\n\n use u32;\n\n use u64;\n", "file_path": "std/src/num.rs", "rank": 21, "score": 111636.06099004914 }, { "content": "pub fn decode_error_kind(errno: i32) -> ErrorKind {\n\n match errno {\n\n syscall::ECONNREFUSED => ErrorKind::ConnectionRefused,\n\n syscall::ECONNRESET => ErrorKind::ConnectionReset,\n\n syscall::EPERM | syscall::EACCES => ErrorKind::PermissionDenied,\n\n syscall::EPIPE => ErrorKind::BrokenPipe,\n\n syscall::ENOTCONN => ErrorKind::NotConnected,\n\n syscall::ECONNABORTED => ErrorKind::ConnectionAborted,\n\n syscall::EADDRNOTAVAIL => ErrorKind::AddrNotAvailable,\n\n syscall::EADDRINUSE => ErrorKind::AddrInUse,\n\n syscall::ENOENT => ErrorKind::NotFound,\n\n syscall::EINTR => ErrorKind::Interrupted,\n\n syscall::EINVAL => ErrorKind::InvalidInput,\n\n syscall::ETIMEDOUT => ErrorKind::TimedOut,\n\n syscall::EEXIST => ErrorKind::AlreadyExists,\n\n\n\n // These two constants can have the same value on some systems,\n\n // but different values on others, so we can't use a match\n\n // clause\n\n x if x == syscall::EAGAIN || x == syscall::EWOULDBLOCK =>\n\n ErrorKind::WouldBlock,\n\n\n\n _ => ErrorKind::Other,\n\n }\n\n}\n\n\n", "file_path": "std/src/sys/redox/mod.rs", "rank": 22, "score": 110422.6589922662 }, { "content": "fn path_to_peer_addr(path_str: &str) -> SocketAddr {\n\n let mut parts = path_str.split('/').next().unwrap_or(\"\").split(':').skip(1);\n\n let host = Ipv4Addr::from_str(parts.next().unwrap_or(\"\")).unwrap_or(Ipv4Addr::new(0, 0, 0, 0));\n\n let port = parts.next().unwrap_or(\"\").parse::<u16>().unwrap_or(0);\n\n SocketAddr::V4(SocketAddrV4::new(host, port))\n\n}\n\n\n", "file_path": "std/src/sys/redox/net/mod.rs", "rank": 23, "score": 110012.56038361475 }, { "content": "fn path_to_local_addr(path_str: &str) -> SocketAddr {\n\n let mut parts = path_str.split('/').nth(1).unwrap_or(\"\").split(':');\n\n let host = Ipv4Addr::from_str(parts.next().unwrap_or(\"\")).unwrap_or(Ipv4Addr::new(0, 0, 0, 0));\n\n let port = parts.next().unwrap_or(\"\").parse::<u16>().unwrap_or(0);\n\n SocketAddr::V4(SocketAddrV4::new(host, port))\n\n}\n", "file_path": "std/src/sys/redox/net/mod.rs", "rank": 24, "score": 110012.56038361475 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub fn channel<T>() -> (Sender<T>, Receiver<T>) {\n\n let a = Arc::new(oneshot::Packet::new());\n\n (Sender::new(Flavor::Oneshot(a.clone())), Receiver::new(Flavor::Oneshot(a)))\n\n}\n\n\n\n/// Creates a new synchronous, bounded channel.\n\n/// All data sent on the [`SyncSender`] will become available on the [`Receiver`]\n\n/// in the same order as it was sent. Like asynchronous [`channel`]s, the\n\n/// [`Receiver`] will block until a message becomes available. `sync_channel`\n\n/// differs greatly in the semantics of the sender, however.\n\n///\n\n/// This channel has an internal buffer on which messages will be queued.\n\n/// `bound` specifies the buffer size. When the internal buffer becomes full,\n\n/// future sends will *block* waiting for the buffer to open up. Note that a\n\n/// buffer size of 0 is valid, in which case this becomes \"rendezvous channel\"\n\n/// where each [`send`] will not return until a [`recv`] is paired with it.\n\n///\n\n/// The [`SyncSender`] can be cloned to [`send`] to the same channel multiple\n\n/// times, but only one [`Receiver`] is supported.\n\n///\n", "file_path": "std/src/sync/mpsc/mod.rs", "rank": 25, "score": 109920.88405752151 }, { "content": "fn read_one_byte(reader: &mut Read) -> Option<Result<u8>> {\n\n let mut buf = [0];\n\n loop {\n\n return match reader.read(&mut buf) {\n\n Ok(0) => None,\n\n Ok(..) => Some(Ok(buf[0])),\n\n Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,\n\n Err(e) => Some(Err(e)),\n\n };\n\n }\n\n}\n\n\n\n/// An iterator over `u8` values of a reader.\n\n///\n\n/// This struct is generally created by calling [`bytes`] on a reader.\n\n/// Please see the documentation of [`bytes`] for more details.\n\n///\n\n/// [`bytes`]: trait.Read.html#method.bytes\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\n#[derive(Debug)]\n", "file_path": "std/src/io/mod.rs", "rank": 26, "score": 108084.95891342114 }, { "content": "pub fn lookup_host(host: &str) -> io::Result<LookupHost> {\n\n net_imp::lookup_host(host).map(LookupHost)\n\n}\n", "file_path": "std/src/net/mod.rs", "rank": 27, "score": 108084.95891342114 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub fn spawn<F, T>(f: F) -> JoinHandle<T> where\n\n F: FnOnce() -> T, F: Send + 'static, T: Send + 'static\n\n{\n\n Builder::new().spawn(f).unwrap()\n\n}\n\n\n\n/// Gets a handle to the thread that invokes it.\n\n///\n\n/// # Examples\n\n///\n\n/// Getting a handle to the current thread with `thread::current()`:\n\n///\n\n/// ```\n\n/// use std::thread;\n\n///\n\n/// let handler = thread::Builder::new()\n\n/// .name(\"named thread\".into())\n\n/// .spawn(|| {\n\n/// let handle = thread::current();\n\n/// assert_eq!(handle.name(), Some(\"named thread\"));\n\n/// })\n\n/// .unwrap();\n\n///\n\n/// handler.join().unwrap();\n\n/// ```\n", "file_path": "std/src/thread/mod.rs", "rank": 28, "score": 107965.55139369384 }, { "content": "pub fn lookup_host(host: &str) -> Result<LookupHost> {\n\n let mut ip_string = String::new();\n\n File::open(\"/etc/net/ip\")?.read_to_string(&mut ip_string)?;\n\n let ip: Vec<u8> = ip_string.trim().split(\".\").map(|part| part.parse::<u8>()\n\n .unwrap_or(0)).collect();\n\n\n\n let mut dns_string = String::new();\n\n File::open(\"/etc/net/dns\")?.read_to_string(&mut dns_string)?;\n\n let dns: Vec<u8> = dns_string.trim().split(\".\").map(|part| part.parse::<u8>()\n\n .unwrap_or(0)).collect();\n\n\n\n if ip.len() == 4 && dns.len() == 4 {\n\n let time = time::SystemTime::now().duration_since(time::UNIX_EPOCH).unwrap();\n\n let tid = (time.subsec_nanos() >> 16) as u16;\n\n\n\n let packet = Dns {\n\n transaction_id: tid,\n\n flags: 0x0100,\n\n queries: vec![DnsQuery {\n\n name: host.to_string(),\n", "file_path": "std/src/sys/redox/net/mod.rs", "rank": 29, "score": 107440.14230129571 }, { "content": "pub fn decode_error_kind(_errno: i32) -> ::io::ErrorKind {\n\n ::io::ErrorKind::Other\n\n}\n\n\n", "file_path": "std/src/sys/ros/mod.rs", "rank": 30, "score": 107440.14230129571 }, { "content": "// A few methods below (read_to_string, read_line) will append data into a\n\n// `String` buffer, but we need to be pretty careful when doing this. The\n\n// implementation will just call `.as_mut_vec()` and then delegate to a\n\n// byte-oriented reading method, but we must ensure that when returning we never\n\n// leave `buf` in a state such that it contains invalid UTF-8 in its bounds.\n\n//\n\n// To this end, we use an RAII guard (to protect against panics) which updates\n\n// the length of the string when it is dropped. This guard initially truncates\n\n// the string to the prior length and only after we've validated that the\n\n// new contents are valid UTF-8 do we allow it to set a longer length.\n\n//\n\n// The unsafety in this function is twofold:\n\n//\n\n// 1. We're looking at the raw bytes of `buf`, so we take on the burden of UTF-8\n\n// checks.\n\n// 2. We're passing a raw buffer to the function `f`, and it is expected that\n\n// the function only *appends* bytes to the buffer. We'll get undefined\n\n// behavior if existing bytes are overwritten to have non-UTF-8 data.\n\nfn append_to_string<F>(buf: &mut String, f: F) -> Result<usize>\n\n where F: FnOnce(&mut Vec<u8>) -> Result<usize>\n\n{\n\n unsafe {\n\n let mut g = Guard { len: buf.len(), buf: buf.as_mut_vec() };\n\n let ret = f(g.buf);\n\n if str::from_utf8(&g.buf[g.len..]).is_err() {\n\n ret.and_then(|_| {\n\n Err(Error::new(ErrorKind::InvalidData,\n\n \"stream did not contain valid UTF-8\"))\n\n })\n\n } else {\n\n g.len = g.buf.len();\n\n ret\n\n }\n\n }\n\n}\n\n\n", "file_path": "std/src/io/mod.rs", "rank": 31, "score": 106133.02097158847 }, { "content": "pub fn tsa<A: ToSocketAddrs>(a: A) -> Result<Vec<SocketAddr>, String> {\n\n match a.to_socket_addrs() {\n\n Ok(a) => Ok(a.collect()),\n\n Err(e) => Err(e.to_string()),\n\n }\n\n}\n\n\n", "file_path": "std/src/net/test.rs", "rank": 32, "score": 106043.29985361134 }, { "content": "pub fn cvt(result: Result<usize, syscall::Error>) -> io::Result<usize> {\n\n result.map_err(|err| io::Error::from_raw_os_error(err.errno))\n\n}\n\n\n\n/// On Redox, use an illegal instruction to abort\n\npub unsafe fn abort_internal() -> ! {\n\n ::core::intrinsics::abort();\n\n}\n", "file_path": "std/src/sys/redox/mod.rs", "rank": 33, "score": 101820.83257593955 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub fn sync_channel<T>(bound: usize) -> (SyncSender<T>, Receiver<T>) {\n\n let a = Arc::new(sync::Packet::new(bound));\n\n (SyncSender::new(a.clone()), Receiver::new(Flavor::Sync(a)))\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n// Sender\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n\nimpl<T> Sender<T> {\n\n fn new(inner: Flavor<T>) -> Sender<T> {\n\n Sender {\n\n inner: UnsafeCell::new(inner),\n\n }\n\n }\n\n\n\n /// Attempts to send a value on this channel, returning it back if it could\n\n /// not be sent.\n\n ///\n\n /// A successful send occurs when it is determined that the other end of\n", "file_path": "std/src/sync/mpsc/mod.rs", "rank": 34, "score": 101063.80377723638 }, { "content": "fn each_addr<A: ToSocketAddrs, F, T>(addr: A, mut f: F) -> io::Result<T>\n\n where F: FnMut(&SocketAddr) -> io::Result<T>\n\n{\n\n let mut last_err = None;\n\n for addr in addr.to_socket_addrs()? {\n\n match f(&addr) {\n\n Ok(l) => return Ok(l),\n\n Err(e) => last_err = Some(e),\n\n }\n\n }\n\n Err(last_err.unwrap_or_else(|| {\n\n Error::new(ErrorKind::InvalidInput,\n\n \"could not resolve to any addresses\")\n\n }))\n\n}\n\n\n\n/// An iterator over `SocketAddr` values returned from a host lookup operation.\n\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n\n iterator and returning socket \\\n\n addresses\",\n", "file_path": "std/src/net/mod.rs", "rank": 35, "score": 99597.21030952713 }, { "content": "/// Align `addr` downwards to the nearest multiple of `align`.\n\n///\n\n/// The returned usize is always <= `addr.`\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if `align` is not a power of 2.\n\npub fn align_down(addr: usize, align: usize) -> usize {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "kernel/src/allocator/util.rs", "rank": 36, "score": 97863.15291989747 }, { "content": "// This uses an adaptive system to extend the vector when it fills. We want to\n\n// avoid paying to allocate and zero a huge chunk of memory if the reader only\n\n// has 4 bytes while still making large reads if the reader does have a ton\n\n// of data to return. Simply tacking on an extra DEFAULT_BUF_SIZE space every\n\n// time is 4,500 times (!) slower than this if the reader has a very small\n\n// amount of data to return.\n\n//\n\n// Because we're extending the buffer with uninitialized data for trusted\n\n// readers, we need to make sure to truncate that if any of this panics.\n\nfn read_to_end<R: Read + ?Sized>(r: &mut R, buf: &mut Vec<u8>) -> Result<usize> {\n\n let start_len = buf.len();\n\n let mut g = Guard { len: buf.len(), buf: buf };\n\n let ret;\n\n loop {\n\n if g.len == g.buf.len() {\n\n unsafe {\n\n g.buf.reserve(32);\n\n let capacity = g.buf.capacity();\n\n g.buf.set_len(capacity);\n\n r.initializer().initialize(&mut g.buf[g.len..]);\n\n }\n\n }\n\n\n\n match r.read(&mut g.buf[g.len..]) {\n\n Ok(0) => {\n\n ret = Ok(g.len - start_len);\n\n break;\n\n }\n\n Ok(n) => g.len += n,\n", "file_path": "std/src/io/mod.rs", "rank": 37, "score": 96426.13804213752 }, { "content": "fn read_until<R: BufRead + ?Sized>(r: &mut R, delim: u8, buf: &mut Vec<u8>)\n\n -> Result<usize> {\n\n let mut read = 0;\n\n loop {\n\n let (done, used) = {\n\n let available = match r.fill_buf() {\n\n Ok(n) => n,\n\n Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,\n\n Err(e) => return Err(e)\n\n };\n\n match memchr::memchr(delim, available) {\n\n Some(i) => {\n\n buf.extend_from_slice(&available[..i + 1]);\n\n (true, i + 1)\n\n }\n\n None => {\n\n buf.extend_from_slice(available);\n\n (false, available.len())\n\n }\n\n }\n", "file_path": "std/src/io/mod.rs", "rank": 38, "score": 96421.96250817977 }, { "content": "#[cfg(not(test))]\n\nfn lang_start_internal(main: &(Fn() -> i32 + Sync + ::panic::RefUnwindSafe),\n\n argc: isize, argv: *const *const u8) -> isize {\n\n use panic;\n\n use sys;\n\n use sys_common;\n\n use sys_common::thread_info;\n\n use thread::Thread;\n\n\n\n sys::init();\n\n\n\n unsafe {\n\n let main_guard = sys::thread::guard::init();\n\n sys::stack_overflow::init();\n\n\n\n // Next, set up the current Thread with the guard information we just\n\n // created. Note that this isn't necessary in general for new threads,\n\n // but we just do this to name the main thread and to give it correct\n\n // info about the stack bounds.\n\n let thread = Thread::new(Some(\"main\".to_owned()));\n\n thread_info::set(main_guard, thread);\n", "file_path": "std/src/rt.rs", "rank": 39, "score": 86736.64163579943 }, { "content": "#[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\n\npub fn take_hook() -> Box<Fn(&PanicInfo) + 'static + Sync + Send> {\n\n if thread::panicking() {\n\n panic!(\"cannot modify the panic hook from a panicking thread\");\n\n }\n\n\n\n unsafe {\n\n HOOK_LOCK.write();\n\n let hook = HOOK;\n\n HOOK = Hook::Default;\n\n HOOK_LOCK.write_unlock();\n\n\n\n match hook {\n\n Hook::Default => Box::new(default_hook),\n\n Hook::Custom(ptr) => Box::from_raw(ptr),\n\n }\n\n }\n\n}\n\n\n\n/// A struct providing information about a panic.\n\n///\n", "file_path": "std/src/panicking.rs", "rank": 40, "score": 86572.9828468355 }, { "content": "/// Test uart driver\n\nfn echo() {\n\n let mut mu = pi::uart::MiniUart::new();\n\n mu.write_str(\"hello world\").expect(\"write str err\");\n\n mu.set_read_timeout(10000);\n\n loop {\n\n mu.write_str(\"$ \").expect(\"write str err\");;\n\n let mut buf = [0u8; 1];\n\n match mu.read(&mut buf) {\n\n Ok(n) => {\n\n mu.write(&buf[0..n]).expect(\"write err\");\n\n mu.write_str(\"\\n\").expect(\"write str err\");;\n\n },\n\n Err(_) => mu.write_str(\"you took to long\").unwrap(),\n\n };\n\n }\n\n}\n\n\n\n#[no_mangle]\n\n#[cfg(not(test))]\n\npub extern \"C\" fn kmain() {\n\n ALLOCATOR.initialize();\n\n blinky(16, 1000);\n\n shell::shell(\"$ \");\n\n // echo();\n\n}\n", "file_path": "kernel/src/kmain.rs", "rank": 41, "score": 84959.27716723419 }, { "content": "fn main() {\n\n let target = env::var(\"TARGET\").expect(\"TARGET was not set\");\n\n let host = env::var(\"HOST\").expect(\"HOST was not set\");\n\n if cfg!(feature = \"backtrace\") &&\n\n !target.contains(\"cloudabi\") &&\n\n !target.contains(\"emscripten\") &&\n\n !target.contains(\"fuchsia\") &&\n\n !target.contains(\"msvc\") &&\n\n !target.contains(\"wasm32\")\n\n {\n\n let _ = build_libbacktrace(&host, &target);\n\n }\n\n\n\n if target.contains(\"linux\") {\n\n if target.contains(\"android\") {\n\n println!(\"cargo:rustc-link-lib=dl\");\n\n println!(\"cargo:rustc-link-lib=log\");\n\n println!(\"cargo:rustc-link-lib=gcc\");\n\n } else if !target.contains(\"musl\") {\n\n println!(\"cargo:rustc-link-lib=dl\");\n", "file_path": "std/src/build.rs", "rank": 42, "score": 84953.32048706981 }, { "content": "#[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\n\npub fn set_hook(hook: Box<Fn(&PanicInfo) + 'static + Sync + Send>) {\n\n if thread::panicking() {\n\n panic!(\"cannot modify the panic hook from a panicking thread\");\n\n }\n\n\n\n unsafe {\n\n HOOK_LOCK.write();\n\n let old_hook = HOOK;\n\n HOOK = Hook::Custom(Box::into_raw(hook));\n\n HOOK_LOCK.write_unlock();\n\n\n\n if let Hook::Custom(ptr) = old_hook {\n\n Box::from_raw(ptr);\n\n }\n\n }\n\n}\n\n\n\n/// Unregisters the current panic hook, returning it.\n\n///\n\n/// If no custom hook is registered, the default hook will be returned.\n", "file_path": "std/src/panicking.rs", "rank": 43, "score": 84932.63850828129 }, { "content": "pub fn main() {\n\n println!(\"cargo:rerun-if-changed=ext/layout.ld\");\n\n println!(\"cargo:rerun-if-changed=ext/init.S\");\n\n}\n", "file_path": "bootloader/build.rs", "rank": 44, "score": 83084.13046403635 }, { "content": "pub fn main() {\n\n if ::std::env::var(\"TARGET\").unwrap() == \"aarch64-none-elf\" {\n\n println!(\"cargo:rustc-link-search=native=ext\");\n\n println!(\"cargo:rustc-link-lib=static=sd\");\n\n println!(\"cargo:rerun-if-changed=ext/libsd.a\");\n\n }\n\n\n\n println!(\"cargo:rerun-if-changed=ext/layout.ld\");\n\n println!(\"cargo:rerun-if-changed=ext/init.S\");\n\n}\n", "file_path": "kernel/build.rs", "rank": 45, "score": 83084.13046403635 }, { "content": "#[stable(feature = \"process_abort\", since = \"1.17.0\")]\n\npub fn abort() -> ! {\n\n unsafe { ::sys::abort_internal() };\n\n}\n\n\n\n/// Returns the OS-assigned process identifier associated with this process.\n\n///\n\n/// # Examples\n\n///\n\n/// Basic usage:\n\n///\n\n/// ```no_run\n\n/// #![feature(getpid)]\n\n/// use std::process;\n\n///\n\n/// println!(\"My pid is {}\", process::id());\n\n/// ```\n\n///\n\n///\n", "file_path": "std/src/process.rs", "rank": 46, "score": 81737.22299057036 }, { "content": "pub fn push(f: Box<FnBox()>) -> bool {\n\n let mut ret = true;\n\n unsafe {\n\n LOCK.lock();\n\n if init() {\n\n (*QUEUE).push(f);\n\n } else {\n\n ret = false;\n\n }\n\n LOCK.unlock();\n\n }\n\n ret\n\n}\n", "file_path": "std/src/sys_common/at_exit_imp.rs", "rank": 47, "score": 81329.10587070673 }, { "content": "#[allow(dead_code)]\n\nfn assert_covariance() {\n\n fn set<'new>(v: HashSet<&'static str>) -> HashSet<&'new str> {\n\n v\n\n }\n\n fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> {\n\n v\n\n }\n\n fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> {\n\n v\n\n }\n\n fn difference<'a, 'new>(v: Difference<'a, &'static str, RandomState>)\n\n -> Difference<'a, &'new str, RandomState> {\n\n v\n\n }\n\n fn symmetric_difference<'a, 'new>(v: SymmetricDifference<'a, &'static str, RandomState>)\n\n -> SymmetricDifference<'a, &'new str, RandomState> {\n\n v\n\n }\n\n fn intersection<'a, 'new>(v: Intersection<'a, &'static str, RandomState>)\n\n -> Intersection<'a, &'new str, RandomState> {\n", "file_path": "std/src/collections/hash/set.rs", "rank": 48, "score": 81110.43302966474 }, { "content": "#[allow(dead_code)]\n\nfn assert_covariance() {\n\n fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> {\n\n v\n\n }\n\n fn map_val<'new>(v: HashMap<u8, &'static str>) -> HashMap<u8, &'new str> {\n\n v\n\n }\n\n fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> {\n\n v\n\n }\n\n fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> {\n\n v\n\n }\n\n fn into_iter_key<'new>(v: IntoIter<&'static str, u8>) -> IntoIter<&'new str, u8> {\n\n v\n\n }\n\n fn into_iter_val<'new>(v: IntoIter<u8, &'static str>) -> IntoIter<u8, &'new str> {\n\n v\n\n }\n\n fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> {\n", "file_path": "std/src/collections/hash/map.rs", "rank": 49, "score": 81110.43302966474 }, { "content": "fn _assert_error_is_sync_send() {\n\n fn _is_sync_send<T: Sync+Send>() {}\n\n _is_sync_send::<Error>();\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{Error, ErrorKind, Repr, Custom};\n\n use error;\n\n use fmt;\n\n use sys::os::error_string;\n\n use sys::decode_error_kind;\n\n\n\n #[test]\n\n fn test_debug_error() {\n\n let code = 6;\n\n let msg = error_string(code);\n\n let kind = decode_error_kind(code);\n\n let err = Error {\n\n repr: Repr::Custom(box Custom {\n", "file_path": "std/src/io/error.rs", "rank": 50, "score": 79951.90591117987 }, { "content": "#[test]\n\nfn can_alias_safehash_as_hash() {\n\n assert_eq!(size_of::<SafeHash>(), size_of::<HashUint>())\n\n}\n\n\n\n// RawBucket methods are unsafe as it's possible to\n\n// make a RawBucket point to invalid memory using safe code.\n\nimpl<K, V> RawBucket<K, V> {\n\n unsafe fn hash(&self) -> *mut HashUint {\n\n self.hash_start.offset(self.idx as isize)\n\n }\n\n unsafe fn pair(&self) -> *mut (K, V) {\n\n self.pair_start.offset(self.idx as isize) as *mut (K, V)\n\n }\n\n unsafe fn hash_pair(&self) -> (*mut HashUint, *mut (K, V)) {\n\n (self.hash(), self.pair())\n\n }\n\n}\n\n\n\n// Buckets hold references to the table.\n\nimpl<K, V, M> FullBucket<K, V, M> {\n", "file_path": "std/src/collections/hash/table.rs", "rank": 51, "score": 78853.50342537378 }, { "content": "/// Determines whether the current thread is unwinding because of panic.\n\npub fn panicking() -> bool {\n\n update_panic_count(0) != 0\n\n}\n\n\n\n/// Entry point of panic from the libcore crate.\n\n#[cfg(not(test))]\n\n#[lang = \"panic_fmt\"]\n\n#[unwind]\n\npub extern fn rust_begin_panic(msg: fmt::Arguments,\n\n file: &'static str,\n\n line: u32,\n\n col: u32) -> ! {\n\n begin_panic_fmt(&msg, &(file, line, col))\n\n}\n\n\n\n/// The entry point for panicking with a formatted message.\n\n///\n\n/// This is designed to reduce the amount of code required at the call\n\n/// site as much as possible (so that `panic!()` has as low an impact\n\n/// on (e.g.) the inlining of other functions as possible), by moving\n\n/// the actual formatting into this shared place.\n\n#[unstable(feature = \"libstd_sys_internals\",\n\n reason = \"used by the panic! macro\",\n\n issue = \"0\")]\n", "file_path": "std/src/panicking.rs", "rank": 52, "score": 78791.29251553753 }, { "content": "#[stable(feature = \"env\", since = \"1.0.0\")]\n\npub fn vars() -> Vars {\n\n Vars { inner: vars_os() }\n\n}\n\n\n\n/// Returns an iterator of (variable, value) pairs of OS strings, for all the\n\n/// environment variables of the current process.\n\n///\n\n/// The returned iterator contains a snapshot of the process's environment\n\n/// variables at the time of this invocation. Modifications to environment\n\n/// variables afterwards will not be reflected in the returned iterator.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use std::env;\n\n///\n\n/// // We will iterate through the references to the element returned by\n\n/// // env::vars_os();\n\n/// for (key, value) in env::vars_os() {\n\n/// println!(\"{:?}: {:?}\", key, value);\n\n/// }\n\n/// ```\n", "file_path": "std/src/env.rs", "rank": 53, "score": 78791.29251553753 }, { "content": "#[unstable(feature = \"getpid\", issue = \"44971\", reason = \"recently added\")]\n\npub fn id() -> u32 {\n\n ::sys::os::getpid()\n\n}\n\n\n\n#[cfg(all(test, not(any(target_os = \"cloudabi\", target_os = \"emscripten\"))))]\n\nmod tests {\n\n use io::prelude::*;\n\n\n\n use io::ErrorKind;\n\n use str;\n\n use super::{Command, Output, Stdio};\n\n\n\n // FIXME(#10380) these tests should not all be ignored on android.\n\n\n\n #[test]\n\n #[cfg_attr(target_os = \"android\", ignore)]\n\n fn smoke() {\n\n let p = if cfg!(target_os = \"windows\") {\n\n Command::new(\"cmd\").args(&[\"/C\", \"exit 0\"]).spawn()\n\n } else {\n", "file_path": "std/src/process.rs", "rank": 54, "score": 78791.29251553753 }, { "content": "#[stable(feature = \"env\", since = \"1.0.0\")]\n\npub fn args() -> Args {\n\n Args { inner: args_os() }\n\n}\n\n\n\n/// Returns the arguments which this program was started with (normally passed\n\n/// via the command line).\n\n///\n\n/// The first element is traditionally the path of the executable, but it can be\n\n/// set to arbitrary text, and it may not even exist, so this property should\n\n/// not be relied upon for security purposes.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use std::env;\n\n///\n\n/// // Prints each argument on a separate line\n\n/// for argument in env::args_os() {\n\n/// println!(\"{:?}\", argument);\n\n/// }\n\n/// ```\n", "file_path": "std/src/env.rs", "rank": 55, "score": 78791.29251553753 }, { "content": "pub fn cleanup() {\n\n for i in 0..ITERS {\n\n unsafe {\n\n LOCK.lock();\n\n let queue = QUEUE;\n\n QUEUE = if i == ITERS - 1 {1} else {0} as *mut _;\n\n LOCK.unlock();\n\n\n\n // make sure we're not recursively cleaning up\n\n assert!(queue as usize != 1);\n\n\n\n // If we never called init, not need to cleanup!\n\n if queue as usize != 0 {\n\n let queue: Box<Queue> = Box::from_raw(queue);\n\n for to_run in *queue {\n\n to_run();\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "std/src/sys_common/at_exit_imp.rs", "rank": 56, "score": 78082.7158881464 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub fn stdout() -> Stdout {\n\n static INSTANCE: Lazy<ReentrantMutex<RefCell<LineWriter<Maybe<StdoutRaw>>>>>\n\n = Lazy::new(stdout_init);\n\n return Stdout {\n\n inner: INSTANCE.get().expect(\"cannot access stdout during shutdown\"),\n\n };\n\n\n\n fn stdout_init() -> Arc<ReentrantMutex<RefCell<LineWriter<Maybe<StdoutRaw>>>>> {\n\n let stdout = match stdout_raw() {\n\n Ok(stdout) => Maybe::Real(stdout),\n\n _ => Maybe::Fake,\n\n };\n\n Arc::new(ReentrantMutex::new(RefCell::new(LineWriter::new(stdout))))\n\n }\n\n}\n\n\n\nimpl Stdout {\n\n /// Locks this handle to the standard output stream, returning a writable\n\n /// guard.\n\n ///\n", "file_path": "std/src/io/stdio.rs", "rank": 57, "score": 77574.6201632919 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub fn stdin() -> Stdin {\n\n static INSTANCE: Lazy<Mutex<BufReader<Maybe<StdinRaw>>>> = Lazy::new(stdin_init);\n\n return Stdin {\n\n inner: INSTANCE.get().expect(\"cannot access stdin during shutdown\"),\n\n };\n\n\n\n fn stdin_init() -> Arc<Mutex<BufReader<Maybe<StdinRaw>>>> {\n\n let stdin = match stdin_raw() {\n\n Ok(stdin) => Maybe::Real(stdin),\n\n _ => Maybe::Fake\n\n };\n\n\n\n Arc::new(Mutex::new(BufReader::with_capacity(stdio::STDIN_BUF_SIZE, stdin)))\n\n }\n\n}\n\n\n\nimpl Stdin {\n\n /// Locks this handle to the standard input stream, returning a readable\n\n /// guard.\n\n ///\n", "file_path": "std/src/io/stdio.rs", "rank": 58, "score": 77574.6201632919 }, { "content": "/// Returns the current time in microseconds.\n\npub fn current_time() -> u64 {\n\n Timer::new().read()\n\n}\n\n\n", "file_path": "pi/src/timer.rs", "rank": 59, "score": 77574.6201632919 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub fn stderr() -> Stderr {\n\n static INSTANCE: Lazy<ReentrantMutex<RefCell<Maybe<StderrRaw>>>> = Lazy::new(stderr_init);\n\n return Stderr {\n\n inner: INSTANCE.get().expect(\"cannot access stderr during shutdown\"),\n\n };\n\n\n\n fn stderr_init() -> Arc<ReentrantMutex<RefCell<Maybe<StderrRaw>>>> {\n\n let stderr = match stderr_raw() {\n\n Ok(stderr) => Maybe::Real(stderr),\n\n _ => Maybe::Fake,\n\n };\n\n Arc::new(ReentrantMutex::new(RefCell::new(stderr)))\n\n }\n\n}\n\n\n\nimpl Stderr {\n\n /// Locks this handle to the standard error stream, returning a writable\n\n /// guard.\n\n ///\n\n /// The lock is released when the returned lock goes out of scope. The\n", "file_path": "std/src/io/stdio.rs", "rank": 60, "score": 77574.6201632919 }, { "content": "fn _remove_var(k: &OsStr) {\n\n os_imp::unsetenv(k).unwrap_or_else(|e| {\n\n panic!(\"failed to remove environment variable `{:?}`: {}\", k, e)\n\n })\n\n}\n\n\n\n/// An iterator that splits an environment variable into paths according to\n\n/// platform-specific conventions.\n\n///\n\n/// This structure is created by the [`std::env::split_paths`] function. See its\n\n/// documentation for more.\n\n///\n\n/// [`std::env::split_paths`]: fn.split_paths.html\n\n#[stable(feature = \"env\", since = \"1.0.0\")]\n\npub struct SplitPaths<'a> { inner: os_imp::SplitPaths<'a> }\n\n\n\n/// Parses input according to platform conventions for the `PATH`\n\n/// environment variable.\n\n///\n\n/// Returns an iterator over the paths contained in `unparsed`.\n", "file_path": "std/src/env.rs", "rank": 61, "score": 77574.6201632919 }, { "content": "#[stable(feature = \"env\", since = \"1.0.0\")]\n\npub fn vars_os() -> VarsOs {\n\n VarsOs { inner: os_imp::env() }\n\n}\n\n\n\n#[stable(feature = \"env\", since = \"1.0.0\")]\n\nimpl Iterator for Vars {\n\n type Item = (String, String);\n\n fn next(&mut self) -> Option<(String, String)> {\n\n self.inner.next().map(|(a, b)| {\n\n (a.into_string().unwrap(), b.into_string().unwrap())\n\n })\n\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }\n\n}\n\n\n\n#[stable(feature = \"std_debug\", since = \"1.16.0\")]\n\nimpl fmt::Debug for Vars {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.pad(\"Vars { .. }\")\n\n }\n", "file_path": "std/src/env.rs", "rank": 62, "score": 76416.09304480703 }, { "content": "pub fn getppid() -> u32 {\n\n syscall::getppid().unwrap() as u32\n\n}\n", "file_path": "std/src/sys/redox/os.rs", "rank": 63, "score": 76416.09304480703 }, { "content": "fn default_hook(info: &PanicInfo) {\n\n #[cfg(feature = \"backtrace\")]\n\n use sys_common::backtrace;\n\n\n\n // If this is a double panic, make sure that we print a backtrace\n\n // for this panic. Otherwise only print it if logging is enabled.\n\n #[cfg(feature = \"backtrace\")]\n\n let log_backtrace = {\n\n let panics = update_panic_count(0);\n\n\n\n if panics >= 2 {\n\n Some(backtrace::PrintFormat::Full)\n\n } else {\n\n backtrace::log_enabled()\n\n }\n\n };\n\n\n\n let file = info.location.file;\n\n let line = info.location.line;\n\n let col = info.location.col;\n", "file_path": "std/src/panicking.rs", "rank": 64, "score": 76416.09304480703 }, { "content": "#[stable(feature = \"env\", since = \"1.0.0\")]\n\npub fn args_os() -> ArgsOs {\n\n ArgsOs { inner: sys::args::args() }\n\n}\n\n\n\n#[stable(feature = \"env\", since = \"1.0.0\")]\n\nimpl Iterator for Args {\n\n type Item = String;\n\n fn next(&mut self) -> Option<String> {\n\n self.inner.next().map(|s| s.into_string().unwrap())\n\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }\n\n}\n\n\n\n#[stable(feature = \"env\", since = \"1.0.0\")]\n\nimpl ExactSizeIterator for Args {\n\n fn len(&self) -> usize { self.inner.len() }\n\n fn is_empty(&self) -> bool { self.inner.is_empty() }\n\n}\n\n\n\n#[stable(feature = \"env_iterators\", since = \"1.12.0\")]\n", "file_path": "std/src/env.rs", "rank": 65, "score": 76416.09304480703 }, { "content": "/// Returns a vector of (variable, value) byte-vector pairs for all the\n\n/// environment variables of the current process.\n\npub fn env() -> Env {\n\n let mut variables: Vec<(OsString, OsString)> = Vec::new();\n\n if let Ok(mut file) = ::fs::File::open(\"env:\") {\n\n let mut string = String::new();\n\n if file.read_to_string(&mut string).is_ok() {\n\n for line in string.lines() {\n\n let mut parts = line.splitn(2, '=');\n\n if let Some(name) = parts.next() {\n\n let value = parts.next().unwrap_or(\"\");\n\n variables.push((OsString::from(name.to_string()),\n\n OsString::from(value.to_string())));\n\n }\n\n }\n\n }\n\n }\n\n Env { iter: variables.into_iter(), _dont_send_or_sync_me: PhantomData }\n\n}\n\n\n", "file_path": "std/src/sys/redox/os.rs", "rank": 66, "score": 76416.09304480703 }, { "content": "#[stable(feature = \"env\", since = \"1.0.0\")]\n\npub fn temp_dir() -> PathBuf {\n\n os_imp::temp_dir()\n\n}\n\n\n\n/// Returns the full filesystem path of the current running executable.\n\n///\n\n/// # Platform-specific behavior\n\n///\n\n/// If the executable was invoked through a symbolic link, some platforms will\n\n/// return the path of the symbolic link and other platforms will return the\n\n/// path of the symbolic link’s target.\n\n///\n\n/// # Errors\n\n///\n\n/// Acquiring the path of the current executable is a platform-specific operation\n\n/// that can fail for a good number of reasons. Some errors can include, but not\n\n/// be limited to, filesystem operations failing or general syscall failures.\n\n///\n\n/// # Security\n\n///\n", "file_path": "std/src/env.rs", "rank": 67, "score": 76416.09304480703 }, { "content": "pub fn getpid() -> u32 {\n\n syscall::getpid().unwrap() as u32\n\n}\n\n\n", "file_path": "std/src/sys/redox/os.rs", "rank": 68, "score": 76416.09304480703 }, { "content": "/// Returns the command line arguments\n\npub fn args() -> Args {\n\n imp::args()\n\n}\n\n\n\npub struct Args {\n\n iter: vec::IntoIter<OsString>,\n\n _dont_send_or_sync_me: PhantomData<*mut ()>,\n\n}\n\n\n\nimpl Args {\n\n pub fn inner_debug(&self) -> &[OsString] {\n\n self.iter.as_slice()\n\n }\n\n}\n\n\n\nimpl Iterator for Args {\n\n type Item = OsString;\n\n fn next(&mut self) -> Option<OsString> { self.iter.next() }\n\n fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }\n\n}\n", "file_path": "std/src/sys/redox/args.rs", "rank": 69, "score": 76416.09304480703 }, { "content": "/// Returns the platform-specific value of errno\n\npub fn errno() -> i32 {\n\n unsafe {\n\n (*errno_location())\n\n }\n\n}\n\n\n", "file_path": "std/src/sys/redox/os.rs", "rank": 70, "score": 76416.09304480703 }, { "content": "/// Starts a shell using `prefix` as the prefix for each line. This function\n\n/// never returns: it is perpetually in a shell loop.\n\npub fn shell(prefix: &str) -> ! {\n\n // \\r, \\n are ENTER\n\n // ASCII 8 and 127 are backspace and delete which erase a single char\n\n // if invalid ASCII -> ring BELL\n\n // receive a stream of incoming bytes\n\n // as each byte is received, check its value\n\n // if it if a valid value, put it on the stack\n\n // if it is an invalid value right BELL and do not change the stack\n\n // if it is a backspace/del, remove last item on the stack\n\n // send final stack array to the parse fun as a string slice\n\n // get back a Command with args arg\n\n // look at first arg, if echo then call echo and pass in remaining parameters\n\n // if not echo, then print\"unknown command\"\n\n\n\n // unimplemented!();\n\n\n\n const BEL: u8 = 0x7; // Bell\n\n const BS: u8 = 0x8; // Backspace\n\n const DEL: u8 = 0x7F; // Delete\n\n const LF: u8 = 0x0A; // Line Feed\n", "file_path": "kernel/src/shell.rs", "rank": 71, "score": 76084.71185796657 }, { "content": "/// Branches to the address `addr` unconditionally.\n\nfn jump_to(addr: *mut u8) -> ! {\n\n unsafe {\n\n asm!(\"br $0\" : : \"r\"(addr as usize));\n\n loop { asm!(\"nop\" :::: \"volatile\") }\n\n }\n\n}\n\n\n", "file_path": "bootloader/src/kmain.rs", "rank": 72, "score": 76079.32825622192 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub fn exit(code: i32) -> ! {\n\n ::sys_common::cleanup();\n\n ::sys::os::exit(code)\n\n}\n\n\n\n/// Terminates the process in an abnormal fashion.\n\n///\n\n/// The function will never return and will immediately terminate the current\n\n/// process in a platform specific \"abnormal\" manner.\n\n///\n\n/// Note that because this function never returns, and that it terminates the\n\n/// process, no destructors on the current stack or any other thread's stack\n\n/// will be run.\n\n///\n\n/// This is in contrast to the default behaviour of [`panic!`] which unwinds\n\n/// the current thread's stack and calls all destructors.\n\n/// When `panic=\"abort\"` is set, either as an argument to `rustc` or in a\n\n/// crate's Cargo.toml, [`panic!`] and `abort` are similar. However,\n\n/// [`panic!`] will still call the [panic hook] while `abort` will not.\n\n///\n", "file_path": "std/src/process.rs", "rank": 73, "score": 76079.32825622192 }, { "content": "// Detect scheme on Redox\n\nfn has_redox_scheme(s: &[u8]) -> bool {\n\n cfg!(target_os = \"redox\") && s.split(|b| *b == b'/').next().unwrap_or(b\"\").contains(&b':')\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n// Cross-platform, iterator-independent parsing\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "std/src/path.rs", "rank": 74, "score": 76079.32825622192 }, { "content": "/// Returns a number of frames to remove at the beginning and at the end of the\n\n/// backtrace, according to the backtrace format.\n\nfn filter_frames(frames: &[Frame],\n\n format: PrintFormat,\n\n context: &BacktraceContext) -> (usize, usize)\n\n{\n\n if format == PrintFormat::Full {\n\n return (0, 0);\n\n }\n\n\n\n let skipped_before = 0;\n\n\n\n let skipped_after = frames.len() - frames.iter().position(|frame| {\n\n let mut is_marker = false;\n\n let _ = resolve_symname(*frame, |symname| {\n\n if let Some(mangled_symbol_name) = symname {\n\n // Use grep to find the concerned functions\n\n if mangled_symbol_name.contains(\"__rust_begin_short_backtrace\") {\n\n is_marker = true;\n\n }\n\n }\n\n Ok(())\n", "file_path": "std/src/sys_common/backtrace.rs", "rank": 75, "score": 75311.64026079027 }, { "content": "pub fn min_stack() -> usize {\n\n static MIN: atomic::AtomicUsize = atomic::AtomicUsize::new(0);\n\n match MIN.load(Ordering::SeqCst) {\n\n 0 => {}\n\n n => return n - 1,\n\n }\n\n let amt = env::var(\"RUST_MIN_STACK\").ok().and_then(|s| s.parse().ok());\n\n let amt = amt.unwrap_or(imp::DEFAULT_MIN_STACK_SIZE);\n\n\n\n // 0 is our sentinel value, so ensure that we'll never see 0 after\n\n // initialization has run\n\n MIN.store(amt + 1, Ordering::SeqCst);\n\n amt\n\n}\n", "file_path": "std/src/sys_common/thread.rs", "rank": 76, "score": 75311.64026079027 }, { "content": "pub fn page_size() -> usize {\n\n 4096\n\n}\n\n\n", "file_path": "std/src/sys/redox/os.rs", "rank": 77, "score": 75311.64026079027 }, { "content": "#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub fn is_separator(c: char) -> bool {\n\n c.is_ascii() && is_sep_byte(c as u8)\n\n}\n\n\n\n/// The primary separator of path components for the current platform.\n\n///\n\n/// For example, `/` on Unix and `\\` on Windows.\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub const MAIN_SEPARATOR: char = ::sys::path::MAIN_SEP;\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n// Misc helpers\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "std/src/path.rs", "rank": 78, "score": 74730.23932314706 }, { "content": "#[stable(feature = \"catch_unwind\", since = \"1.9.0\")]\n\npub fn catch_unwind<F: FnOnce() -> R + UnwindSafe, R>(f: F) -> Result<R> {\n\n unsafe {\n\n panicking::try(f)\n\n }\n\n}\n\n\n\n/// Triggers a panic without invoking the panic hook.\n\n///\n\n/// This is designed to be used in conjunction with `catch_unwind` to, for\n\n/// example, carry a panic across a layer of C code.\n\n///\n\n/// # Notes\n\n///\n\n/// Note that panics in Rust are not always implemented via unwinding, but they\n\n/// may be implemented by aborting the process. If this function is called when\n\n/// panics are implemented this way then this function will abort the process,\n\n/// not trigger an unwind.\n\n///\n\n/// # Examples\n\n///\n", "file_path": "std/src/panic.rs", "rank": 79, "score": 74296.78476722192 }, { "content": "#[inline]\n\nfn calculate_offsets(hashes_size: usize,\n\n pairs_size: usize,\n\n pairs_align: usize)\n\n -> (usize, usize, bool) {\n\n let pairs_offset = round_up_to_next(hashes_size, pairs_align);\n\n let (end_of_pairs, oflo) = pairs_offset.overflowing_add(pairs_size);\n\n\n\n (pairs_offset, end_of_pairs, oflo)\n\n}\n\n\n", "file_path": "std/src/collections/hash/table.rs", "rank": 80, "score": 74257.56226451516 }, { "content": "pub fn temp_dir() -> PathBuf {\n\n ::env::var_os(\"TMPDIR\").map(PathBuf::from).unwrap_or_else(|| {\n\n PathBuf::from(\"/tmp\")\n\n })\n\n}\n\n\n", "file_path": "std/src/sys/redox/os.rs", "rank": 81, "score": 74257.56226451516 }, { "content": "// Returns a tuple of (minimum required malloc alignment,\n\n// array_size), from the start of a mallocated array.\n\nfn calculate_allocation(hash_size: usize,\n\n hash_align: usize,\n\n pairs_size: usize,\n\n pairs_align: usize)\n\n -> (usize, usize, bool) {\n\n let (_, end_of_pairs, oflo) = calculate_offsets(hash_size, pairs_size, pairs_align);\n\n\n\n let align = cmp::max(hash_align, pairs_align);\n\n\n\n (align, end_of_pairs, oflo)\n\n}\n\n\n", "file_path": "std/src/collections/hash/table.rs", "rank": 82, "score": 74257.56226451516 }, { "content": "#[stable(feature = \"env\", since = \"1.0.0\")]\n\npub fn home_dir() -> Option<PathBuf> {\n\n os_imp::home_dir()\n\n}\n\n\n\n/// Returns the path of a temporary directory.\n\n///\n\n/// # Unix\n\n///\n\n/// Returns the value of the `TMPDIR` environment variable if it is\n\n/// set, otherwise for non-Android it returns `/tmp`. If Android, since there\n\n/// is no global temporary folder (it is usually allocated per-app), it returns\n\n/// `/data/local/tmp`.\n\n///\n\n/// # Windows\n\n///\n\n/// Returns the value of, in order, the `TMP`, `TEMP`,\n\n/// `USERPROFILE` environment variable if any are set and not the empty\n\n/// string. Otherwise, `temp_dir` returns the path of the Windows directory.\n\n/// This behavior is identical to that of [`GetTempPath`][msdn], which this\n\n/// function uses internally.\n", "file_path": "std/src/env.rs", "rank": 83, "score": 73816.3483537203 }, { "content": "/// Spins until `ms` milliseconds have passed.\n\npub fn spin_sleep_ms(ms: u64) {\n\n spin_sleep_us(ms * 1000)\n\n}\n", "file_path": "pi/src/timer.rs", "rank": 84, "score": 73816.3483537203 }, { "content": "#[bench]\n\nfn find_existing(b: &mut Bencher) {\n\n use super::map::HashMap;\n\n\n\n let mut m = HashMap::new();\n\n\n\n for i in 1..1001 {\n\n m.insert(i, i);\n\n }\n\n\n\n b.iter(|| {\n\n for i in 1..1001 {\n\n m.contains_key(&i);\n\n }\n\n });\n\n}\n\n\n", "file_path": "std/src/collections/hash/bench.rs", "rank": 85, "score": 73816.3483537203 }, { "content": "#[bench]\n\nfn new_drop(b: &mut Bencher) {\n\n use super::map::HashMap;\n\n\n\n b.iter(|| {\n\n let m: HashMap<i32, i32> = HashMap::new();\n\n assert_eq!(m.len(), 0);\n\n })\n\n}\n\n\n", "file_path": "std/src/collections/hash/bench.rs", "rank": 86, "score": 73816.3483537203 }, { "content": "#[bench]\n\nfn grow_by_insertion(b: &mut Bencher) {\n\n use super::map::HashMap;\n\n\n\n let mut m = HashMap::new();\n\n\n\n for i in 1..1001 {\n\n m.insert(i, i);\n\n }\n\n\n\n let mut k = 1001;\n\n\n\n b.iter(|| {\n\n m.insert(k, k);\n\n k += 1;\n\n });\n\n}\n\n\n", "file_path": "std/src/collections/hash/bench.rs", "rank": 87, "score": 73816.3483537203 }, { "content": "pub fn exit(code: i32) -> ! {\n\n let _ = syscall::exit(code as usize);\n\n unreachable!();\n\n}\n\n\n", "file_path": "std/src/sys/redox/os.rs", "rank": 88, "score": 73816.3483537203 }, { "content": "#[bench]\n\nfn find_nonexisting(b: &mut Bencher) {\n\n use super::map::HashMap;\n\n\n\n let mut m = HashMap::new();\n\n\n\n for i in 1..1001 {\n\n m.insert(i, i);\n\n }\n\n\n\n b.iter(|| {\n\n for i in 1001..2001 {\n\n m.contains_key(&i);\n\n }\n\n });\n\n}\n\n\n", "file_path": "std/src/collections/hash/bench.rs", "rank": 89, "score": 73816.3483537203 }, { "content": "fn now(clock: clock_t) -> Timespec {\n\n let mut t = Timespec {\n\n t: syscall::TimeSpec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n }\n\n };\n\n cvt(syscall::clock_gettime(clock, &mut t.t)).unwrap();\n\n t\n\n}\n", "file_path": "std/src/sys/redox/time.rs", "rank": 90, "score": 73816.3483537203 }, { "content": "#[bench]\n\nfn hashmap_as_queue(b: &mut Bencher) {\n\n use super::map::HashMap;\n\n\n\n let mut m = HashMap::new();\n\n\n\n for i in 1..1001 {\n\n m.insert(i, i);\n\n }\n\n\n\n let mut k = 1;\n\n\n\n b.iter(|| {\n\n m.remove(&k);\n\n m.insert(k + 1000, k + 1000);\n\n k += 1;\n\n });\n\n}\n\n\n", "file_path": "std/src/collections/hash/bench.rs", "rank": 91, "score": 73816.3483537203 }, { "content": "/// How large a buffer to pre-allocate before reading the entire file.\n\nfn initial_buffer_size(file: &File) -> usize {\n\n // Allocate one extra byte so the buffer doesn't need to grow before the\n\n // final `read` call at the end of the file. Don't worry about `usize`\n\n // overflow because reading will fail regardless in that case.\n\n file.metadata().map(|m| m.len() as usize + 1).unwrap_or(0)\n\n}\n\n\n\n/// Read the entire contents of a file into a bytes vector.\n\n///\n\n/// This is a convenience function for using [`File::open`] and [`read_to_end`]\n\n/// with fewer imports and without an intermediate variable.\n\n///\n\n/// [`File::open`]: struct.File.html#method.open\n\n/// [`read_to_end`]: ../io/trait.Read.html#method.read_to_end\n\n///\n\n/// # Errors\n\n///\n\n/// This function will return an error if `path` does not already exist.\n\n/// Other errors may also be returned according to [`OpenOptions::open`].\n\n///\n", "file_path": "std/src/fs.rs", "rank": 92, "score": 73816.3483537203 }, { "content": "#[allow(dead_code)]\n\nfn output_fileline(w: &mut Write,\n\n file: &[u8],\n\n line: u32,\n\n format: PrintFormat) -> io::Result<()> {\n\n // prior line: \" ##: {:2$} - func\"\n\n w.write_all(b\"\")?;\n\n match format {\n\n PrintFormat::Full => write!(w,\n\n \" {:1$}\",\n\n \"\",\n\n HEX_WIDTH)?,\n\n PrintFormat::Short => write!(w, \" \")?,\n\n }\n\n\n\n let file = str::from_utf8(file).unwrap_or(\"<unknown>\");\n\n let file_path = Path::new(file);\n\n let mut already_printed = false;\n\n if format == PrintFormat::Short && file_path.is_absolute() {\n\n if let Ok(cwd) = env::current_dir() {\n\n if let Ok(stripped) = file_path.strip_prefix(&cwd) {\n", "file_path": "std/src/sys_common/backtrace.rs", "rank": 93, "score": 73816.3483537203 }, { "content": "/// Spins until `us` microseconds have passed.\n\npub fn spin_sleep_us(us: u64) {\n\n let end = current_time() + us;\n\n while current_time() <= end { }\n\n}\n\n\n", "file_path": "pi/src/timer.rs", "rank": 94, "score": 73816.3483537203 }, { "content": "/// Test system timer and gpio driver\n\nfn blinky(pin: u8, interval: u64) {\n\n let mut gpiopin = pi::gpio::Gpio::new(pin).into_output();\n\n\n\n for _ in 0..10 {\n\n gpiopin.set();\n\n pi::timer::spin_sleep_ms(interval);\n\n gpiopin.clear();\n\n pi::timer::spin_sleep_ms(interval);\n\n }\n\n}\n\n\n", "file_path": "kernel/src/kmain.rs", "rank": 95, "score": 73577.57811979398 }, { "content": "#[doc(hidden)]\n\npub fn _print(args: fmt::Arguments) {\n\n #[cfg(not(test))]\n\n {\n\n use std::fmt::Write;\n\n let mut console = CONSOLE.lock();\n\n console.write_fmt(args).unwrap();\n\n }\n\n\n\n #[cfg(test)]\n\n { print!(\"{}\", args); }\n\n}\n\n\n\n/// Like `println!`, but for kernel-space.\n\npub macro kprintln {\n\n () => (kprint!(\"\\n\")),\n\n ($fmt:expr) => (kprint!(concat!($fmt, \"\\n\"))),\n\n ($fmt:expr, $($arg:tt)*) => (kprint!(concat!($fmt, \"\\n\"), $($arg)*))\n\n}\n\n\n\n/// Like `print!`, but for kernel-space.\n\npub macro kprint($($arg:tt)*) {\n\n _print(format_args!($($arg)*))\n\n}\n\n\n", "file_path": "kernel/src/console.rs", "rank": 96, "score": 73571.71220466218 }, { "content": "#[inline]\n\npub fn requires_synchronized_create() -> bool {\n\n false\n\n}\n", "file_path": "std/src/sys/redox/thread_local.rs", "rank": 97, "score": 73250.48946189808 }, { "content": "// See note at the top of this module to understand why these are used:\n\nfn os_str_as_u8_slice(s: &OsStr) -> &[u8] {\n\n unsafe { &*(s as *const OsStr as *const [u8]) }\n\n}\n\nunsafe fn u8_slice_as_os_str(s: &[u8]) -> &OsStr {\n\n &*(s as *const [u8] as *const OsStr)\n\n}\n\n\n", "file_path": "std/src/path.rs", "rank": 98, "score": 72767.89819661976 }, { "content": "#[bench]\n\nfn new_insert_drop(b: &mut Bencher) {\n\n use super::map::HashMap;\n\n\n\n b.iter(|| {\n\n let mut m = HashMap::new();\n\n m.insert(0, 0);\n\n assert_eq!(m.len(), 1);\n\n })\n\n}\n\n\n", "file_path": "std/src/collections/hash/bench.rs", "rank": 99, "score": 72762.27035744519 } ]
Rust
src/types/fetch_attributes.rs
FelixPodint/imap-codec
48019304520dcaade13fc94a5f359d0a450b7a79
use std::num::NonZeroU32; #[cfg(feature = "arbitrary")] use arbitrary::Arbitrary; #[cfg(feature = "serdex")] use serde::{Deserialize, Serialize}; use crate::types::{ body::BodyStructure, core::NString, datetime::MyDateTime, envelope::Envelope, flag::Flag, section::Section, }; #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(feature = "serdex", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Macro { All, Fast, Full, } impl Macro { pub fn expand(&self) -> Vec<FetchAttribute> { use FetchAttribute::*; match self { Self::All => vec![Flags, InternalDate, Rfc822Size, Envelope], Self::Fast => vec![Flags, InternalDate, Rfc822Size], Self::Full => vec![Flags, InternalDate, Rfc822Size, Envelope, Body], } } } #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(feature = "serdex", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum MacroOrFetchAttributes { Macro(Macro), FetchAttributes(Vec<FetchAttribute>), } impl From<Macro> for MacroOrFetchAttributes { fn from(m: Macro) -> Self { MacroOrFetchAttributes::Macro(m) } } impl From<Vec<FetchAttribute>> for MacroOrFetchAttributes { fn from(attributes: Vec<FetchAttribute>) -> Self { MacroOrFetchAttributes::FetchAttributes(attributes) } } #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(feature = "serdex", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum FetchAttribute { Body, BodyExt { section: Option<Section>, partial: Option<(u32, NonZeroU32)>, peek: bool, }, BodyStructure, Envelope, Flags, InternalDate, Rfc822, Rfc822Header, Rfc822Size, Rfc822Text, Uid, } #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(feature = "serdex", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum FetchAttributeValue { Body(BodyStructure), BodyExt { section: Option<Section>, origin: Option<u32>, data: NString, }, BodyStructure(BodyStructure), Envelope(Envelope), Flags(Vec<Flag>), InternalDate(MyDateTime), Rfc822(NString), Rfc822Header(NString), Rfc822Size(u32), Rfc822Text(NString), Uid(NonZeroU32), }
use std::num::NonZeroU32; #[cfg(feature = "arbitrary")] use arbitrary::Arbitrary; #[cfg(feature = "serdex")] use serde::{Deserialize, Serialize}; use crate::types::{ body::BodyStructure, core::NString, datetime::MyDateTime, envelope::Envelope, flag::Flag, section::Section, }; #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(feature = "serdex", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Macro { All, Fast, Full, } impl Macro { pub fn expand(&self) -> Vec<FetchAttribute> { use FetchAttribut
} #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(feature = "serdex", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum MacroOrFetchAttributes { Macro(Macro), FetchAttributes(Vec<FetchAttribute>), } impl From<Macro> for MacroOrFetchAttributes { fn from(m: Macro) -> Self { MacroOrFetchAttributes::Macro(m) } } impl From<Vec<FetchAttribute>> for MacroOrFetchAttributes { fn from(attributes: Vec<FetchAttribute>) -> Self { MacroOrFetchAttributes::FetchAttributes(attributes) } } #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(feature = "serdex", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum FetchAttribute { Body, BodyExt { section: Option<Section>, partial: Option<(u32, NonZeroU32)>, peek: bool, }, BodyStructure, Envelope, Flags, InternalDate, Rfc822, Rfc822Header, Rfc822Size, Rfc822Text, Uid, } #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[cfg_attr(feature = "serdex", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum FetchAttributeValue { Body(BodyStructure), BodyExt { section: Option<Section>, origin: Option<u32>, data: NString, }, BodyStructure(BodyStructure), Envelope(Envelope), Flags(Vec<Flag>), InternalDate(MyDateTime), Rfc822(NString), Rfc822Header(NString), Rfc822Size(u32), Rfc822Text(NString), Uid(NonZeroU32), }
e::*; match self { Self::All => vec![Flags, InternalDate, Rfc822Size, Envelope], Self::Fast => vec![Flags, InternalDate, Rfc822Size], Self::Full => vec![Flags, InternalDate, Rfc822Size, Envelope, Body], } }
function_block-function_prefixed
[ { "content": "/// body = \"(\" (body-type-1part / body-type-mpart) \")\"\n\n///\n\n/// Note: This parser is recursively defined. Thus, in order to not overflow the stack,\n\n/// it is needed to limit how may recursions are allowed. (8 should suffice).\n\npub fn body(remaining_recursions: usize) -> impl Fn(&[u8]) -> IResult<&[u8], BodyStructure> {\n\n move |input: &[u8]| body_limited(input, remaining_recursions)\n\n}\n\n\n", "file_path": "src/parse/body.rs", "rank": 0, "score": 161266.52463108645 }, { "content": "/// list-wildcards = \"%\" / \"*\"\n\npub fn is_list_wildcards(i: u8) -> bool {\n\n i == b'%' || i == b'*'\n\n}\n\n\n", "file_path": "src/parse/mailbox.rs", "rank": 1, "score": 114466.12847857398 }, { "content": "/// list-char = ATOM-CHAR / list-wildcards / resp-specials\n\npub fn is_list_char(i: u8) -> bool {\n\n is_atom_char(i) || is_list_wildcards(i) || is_resp_specials(i)\n\n}\n\n\n", "file_path": "src/parse/mailbox.rs", "rank": 2, "score": 114466.12847857398 }, { "content": "/// Future expansion.\n\n///\n\n/// Client implementations MUST accept body-extension fields.\n\n/// Server implementations MUST NOT generate body-extension fields except as defined by\n\n/// future standard or standards-track revisions of this specification.\n\n///\n\n/// body-extension = nstring / number / \"(\" body-extension *(SP body-extension) \")\"\n\n///\n\n/// Note: This parser is recursively defined. Thus, in order to not overflow the stack,\n\n/// it is needed to limit how may recursions are allowed. (8 should suffice).\n\n///\n\n/// TODO: This recognizes extension data and returns &[u8].\n\nfn body_extension(remaining_recursions: usize) -> impl Fn(&[u8]) -> IResult<&[u8], &[u8]> {\n\n move |input: &[u8]| body_extension_limited(input, remaining_recursions)\n\n}\n\n\n", "file_path": "src/parse/body.rs", "rank": 3, "score": 113524.96205550681 }, { "content": "pub fn read_line(prompt: &str) -> String {\n\n print!(\"{}{}\", prompt, ColorClient.prefix());\n\n std::io::stdout().flush().unwrap();\n\n\n\n let mut line = String::new();\n\n std::io::stdin().read_line(&mut line).unwrap();\n\n\n\n print!(\"{}\", ColorClient.suffix());\n\n\n\n line\n\n}\n", "file_path": "examples/parse_command.rs", "rank": 4, "score": 112440.56580135255 }, { "content": "/// This parser is recursively defined. Thus, in order to not overflow the stack,\n\n/// it is needed to limit how may recursions are allowed. (8 should suffice).\n\nfn search_key(remaining_recursions: usize) -> impl Fn(&[u8]) -> IResult<&[u8], SearchKey> {\n\n move |input: &[u8]| search_key_limited(input, remaining_recursions)\n\n}\n\n\n", "file_path": "src/parse/command.rs", "rank": 5, "score": 112103.00435946426 }, { "content": "pub fn escape_quoted(unescaped: &str) -> Cow<str> {\n\n let mut escaped = Cow::Borrowed(unescaped);\n\n\n\n if escaped.contains('\\\\') {\n\n escaped = Cow::Owned(escaped.replace('\\\\', \"\\\\\\\\\"));\n\n }\n\n\n\n if escaped.contains('\\\"') {\n\n escaped = Cow::Owned(escaped.replace('\"', \"\\\\\\\"\"));\n\n }\n\n\n\n escaped\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 6, "score": 110168.24100554478 }, { "content": "pub fn unescape_quoted(escaped: &str) -> Cow<str> {\n\n let mut unescaped = Cow::Borrowed(escaped);\n\n\n\n if unescaped.contains(\"\\\\\\\\\") {\n\n unescaped = Cow::Owned(unescaped.replace(\"\\\\\\\\\", \"\\\\\"));\n\n }\n\n\n\n if unescaped.contains(\"\\\\\\\"\") {\n\n unescaped = Cow::Owned(unescaped.replace(\"\\\\\\\"\", \"\\\"\"));\n\n }\n\n\n\n unescaped\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_escape_quoted() {\n", "file_path": "src/utils.rs", "rank": 7, "score": 110168.24100554478 }, { "content": "/// This parser must be executed *instead* of the command parser\n\n/// when the server is in the IDLE state.\n\n///\n\n/// idle = \"IDLE\" CRLF \"DONE\"\n\n/// ^^^^^^\n\n/// |\n\n/// applied as separate parser (CRLF is not consumed through the command\n\n/// parser and must be consumed here)\n\n/// TODO: just interpret as command?\n\npub fn idle_done(input: &[u8]) -> IResult<&[u8], ()> {\n\n let mut parser = value((), tuple((tag_no_case(\"DONE\"), CRLF)));\n\n\n\n let (remaining, parsed_idle_done) = parser(input)?;\n\n\n\n Ok((remaining, parsed_idle_done))\n\n}\n\n\n\n/// # Command NonAuth\n\n\n", "file_path": "src/parse/command.rs", "rank": 8, "score": 108263.42862753515 }, { "content": "/// envelope = \"(\"\n\n/// env-date SP\n\n/// env-subject SP\n\n/// env-from SP\n\n/// env-sender SP\n\n/// env-reply-to SP\n\n/// env-to SP\n\n/// env-cc SP\n\n/// env-bcc SP\n\n/// env-in-reply-to SP\n\n/// env-message-id\n\n/// \")\"\n\npub fn envelope(input: &[u8]) -> IResult<&[u8], Envelope> {\n\n let mut parser = delimited(\n\n tag(b\"(\"),\n\n tuple((\n\n env_date,\n\n SP,\n\n env_subject,\n\n SP,\n\n env_from,\n\n SP,\n\n env_sender,\n\n SP,\n\n env_reply_to,\n\n SP,\n\n env_to,\n\n SP,\n\n env_cc,\n\n SP,\n\n env_bcc,\n\n SP,\n", "file_path": "src/parse/envelope.rs", "rank": 9, "score": 106194.36093100852 }, { "content": "/// command = tag SP (command-any /\n\n/// command-auth /\n\n/// command-nonauth /\n\n/// command-select) CRLF\n\npub fn command(input: &[u8]) -> IResult<&[u8], Command> {\n\n let mut parser = tuple((\n\n tag_imap,\n\n SP,\n\n alt((command_any, command_auth, command_nonauth, command_select)),\n\n CRLF,\n\n ));\n\n\n\n let (remaining, (tag, _, command_body, _)) = parser(input)?;\n\n\n\n Ok((remaining, Command::new(tag, command_body)))\n\n}\n\n\n\n/// # Command Any\n\n\n", "file_path": "src/parse/command.rs", "rank": 10, "score": 106194.36093100852 }, { "content": "/// flag = \"\\Answered\" / \"\\Flagged\" / \"\\Deleted\" / \"\\Seen\" / \"\\Draft\" /\n\n/// flag-keyword /\n\n/// flag-extension\n\n///\n\n/// Note: Does not include \"\\Recent\"\n\npub fn flag(input: &[u8]) -> IResult<&[u8], Flag> {\n\n alt((\n\n value(Flag::Answered, tag_no_case(b\"\\\\Answered\")),\n\n value(Flag::Flagged, tag_no_case(b\"\\\\Flagged\")),\n\n value(Flag::Deleted, tag_no_case(b\"\\\\Deleted\")),\n\n value(Flag::Seen, tag_no_case(b\"\\\\Seen\")),\n\n value(Flag::Draft, tag_no_case(b\"\\\\Draft\")),\n\n flag_keyword,\n\n map(flag_extension, |a| Flag::Extension(a.to_owned())),\n\n ))(input)\n\n}\n\n\n", "file_path": "src/parse/flag.rs", "rank": 11, "score": 106194.36093100852 }, { "content": "/// capability = (\"AUTH=\" auth-type) /\n\n/// \"COMPRESS=\" algorithm / ; RFC 4978\n\n/// atom\n\npub fn capability(input: &[u8]) -> IResult<&[u8], Capability> {\n\n alt((\n\n map(\n\n tuple((tag_no_case(b\"AUTH=\"), auth_type)),\n\n |(_, mechanism)| Capability::Auth(mechanism),\n\n ),\n\n map(\n\n tuple((tag_no_case(b\"COMPRESS=\"), algorithm)),\n\n |(_, algorithm)| Capability::Compress { algorithm },\n\n ),\n\n map(atom, |atom| {\n\n match atom.to_lowercase().as_ref() {\n\n \"imap4rev1\" => Capability::Imap4Rev1,\n\n \"logindisabled\" => Capability::LoginDisabled,\n\n \"starttls\" => Capability::StartTls,\n\n // RFC 2177 IMAP4 IDLE command\n\n \"idle\" => Capability::Idle,\n\n // RFC 2193 IMAP4 Mailbox Referrals\n\n \"mailbox-referrals\" => Capability::MailboxReferrals,\n\n // RFC 2221 IMAP4 Login Referrals\n", "file_path": "src/parse/response.rs", "rank": 12, "score": 106194.36093100852 }, { "content": "/// INBOX is case-insensitive. All case variants of INBOX (e.g., \"iNbOx\")\n\n/// MUST be interpreted as INBOX not as an astring.\n\n///\n\n/// An astring which consists of the case-insensitive sequence\n\n/// \"I\" \"N\" \"B\" \"O\" \"X\" is considered to be INBOX and not an astring.\n\n///\n\n/// Refer to section 5.1 for further semantic details of mailbox names.\n\n///\n\n/// mailbox = \"INBOX\" / astring\n\npub fn mailbox(input: &[u8]) -> IResult<&[u8], Mailbox> {\n\n map(astring, |astr| {\n\n match MailboxOther::try_from(astr.to_owned()) {\n\n Ok(other) => Mailbox::Other(other),\n\n Err(_) => Mailbox::Inbox,\n\n }\n\n })(input)\n\n}\n\n\n", "file_path": "src/parse/mailbox.rs", "rank": 13, "score": 106194.36093100852 }, { "content": "/// address = \"(\" addr-name SP\n\n/// addr-adl SP\n\n/// addr-mailbox SP\n\n/// addr-host \")\"\n\npub fn address(input: &[u8]) -> IResult<&[u8], Address> {\n\n let mut parser = delimited(\n\n tag(b\"(\"),\n\n tuple((addr_name, SP, addr_adl, SP, addr_mailbox, SP, addr_host)),\n\n tag(b\")\"),\n\n );\n\n\n\n let (remaining, (name, _, adl, _, mailbox, _, host)) = parser(input)?;\n\n\n\n Ok((\n\n remaining,\n\n Address::new(\n\n name.to_owned(),\n\n adl.to_owned(),\n\n mailbox.to_owned(),\n\n host.to_owned(),\n\n ),\n\n ))\n\n}\n\n\n\n#[inline]\n", "file_path": "src/parse/address.rs", "rank": 14, "score": 106194.36093100852 }, { "content": "/// response = *(continue-req / response-data) response-done\n\npub fn response(input: &[u8]) -> IResult<&[u8], Response> {\n\n // Divert from standard here for better usability.\n\n // response_data already contains the bye response, thus\n\n // response_done could also be response_tagged.\n\n //\n\n // However, I will keep it as it is for now.\n\n alt((\n\n map(continue_req, Response::Continuation),\n\n response_data,\n\n map(response_done, Response::Status),\n\n ))(input)\n\n}\n\n\n", "file_path": "src/parse/response.rs", "rank": 15, "score": 106194.36093100852 }, { "content": "/// greeting = \"*\" SP (resp-cond-auth / resp-cond-bye) CRLF\n\npub fn greeting(input: &[u8]) -> IResult<&[u8], Response> {\n\n let mut parser = tuple((\n\n tag(b\"*\"),\n\n SP,\n\n alt((\n\n map(\n\n resp_cond_auth,\n\n |(raw_status, (maybe_code, comment))| match raw_status.to_lowercase().as_ref() {\n\n \"ok\" => Status::Ok {\n\n tag: None,\n\n code: maybe_code,\n\n text: comment.to_owned(),\n\n },\n\n \"preauth\" => Status::PreAuth {\n\n code: maybe_code,\n\n text: comment.to_owned(),\n\n },\n\n _ => unreachable!(),\n\n },\n\n ),\n", "file_path": "src/parse/response.rs", "rank": 16, "score": 106194.36093100852 }, { "content": "fn split_trace(trace: &[u8]) -> impl Iterator<Item = (Who, &[u8])> {\n\n TraceLines { trace, offset: 0 }\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 17, "score": 105249.49792424245 }, { "content": "/// compress = \"COMPRESS\" SP algorithm\n\npub fn compress(input: &[u8]) -> IResult<&[u8], CommandBody> {\n\n map(preceded(tag_no_case(\"COMPRESS \"), algorithm), |algorithm| {\n\n CommandBody::Compress { algorithm }\n\n })(input)\n\n}\n\n\n", "file_path": "src/parse/command.rs", "rank": 18, "score": 104399.81390301499 }, { "content": "/// flag-perm = flag / \"\\*\"\n\npub fn flag_perm(input: &[u8]) -> IResult<&[u8], Flag> {\n\n alt((flag, value(Flag::Permanent, tag(b\"\\\\*\"))))(input)\n\n}\n\n\n\n#[inline]\n", "file_path": "src/parse/flag.rs", "rank": 19, "score": 104399.81390301499 }, { "content": "/// algorithm = \"DEFLATE\"\n\npub fn algorithm(input: &[u8]) -> IResult<&[u8], CompressionAlgorithm> {\n\n value(CompressionAlgorithm::Deflate, tag_no_case(\"DEFLATE\"))(input)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::convert::TryInto;\n\n\n\n use super::auth_type;\n\n use crate::types::AuthMechanism;\n\n\n\n #[test]\n\n fn test_auth_type() {\n\n let tests = [\n\n (b\"plain \".as_ref(), AuthMechanism::Plain),\n\n (b\"pLaiN \".as_ref(), AuthMechanism::Plain),\n\n (b\"lOgiN \".as_ref(), AuthMechanism::Login),\n\n (b\"login \".as_ref(), AuthMechanism::Login),\n\n (\n\n b\"loginX \".as_ref(),\n", "file_path": "src/parse/mod.rs", "rank": 20, "score": 104399.81390301499 }, { "content": "/// mailbox-data = \"FLAGS\" SP flag-list /\n\n/// \"LIST\" SP mailbox-list /\n\n/// \"LSUB\" SP mailbox-list /\n\n/// \"SEARCH\" *(SP nz-number) /\n\n/// \"STATUS\" SP mailbox SP \"(\" [status-att-list] \")\" /\n\n/// number SP \"EXISTS\" /\n\n/// number SP \"RECENT\"\n\npub fn mailbox_data(input: &[u8]) -> IResult<&[u8], Data> {\n\n alt((\n\n map(\n\n tuple((tag_no_case(b\"FLAGS\"), SP, flag_list)),\n\n |(_, _, flags)| Data::Flags(flags),\n\n ),\n\n map(\n\n tuple((tag_no_case(b\"LIST\"), SP, mailbox_list)),\n\n |(_, _, (items, delimiter, mailbox))| Data::List {\n\n items: items.unwrap_or_default(),\n\n mailbox,\n\n delimiter,\n\n },\n\n ),\n\n map(\n\n tuple((tag_no_case(b\"LSUB\"), SP, mailbox_list)),\n\n |(_, _, (items, delimiter, mailbox))| Data::Lsub {\n\n items: items.unwrap_or_default(),\n\n mailbox,\n\n delimiter,\n", "file_path": "src/parse/mailbox.rs", "rank": 21, "score": 104399.81390301499 }, { "content": "/// flag-fetch = flag / \"\\Recent\"\n\npub fn flag_fetch(input: &[u8]) -> IResult<&[u8], Flag> {\n\n alt((flag, value(Flag::Recent, tag_no_case(b\"\\\\Recent\"))))(input)\n\n}\n\n\n", "file_path": "src/parse/flag.rs", "rank": 22, "score": 104399.81390301499 }, { "content": "/// list-mailbox = 1*list-char / string\n\npub fn list_mailbox(input: &[u8]) -> IResult<&[u8], ListMailbox> {\n\n alt((\n\n map(take_while1(is_list_char), |bytes: &[u8]| {\n\n // Note: this is safe, because is_list_char enforces\n\n // that the string only contains ASCII characters\n\n ListMailbox::Token(\n\n ListCharString::try_from(unsafe { String::from_utf8_unchecked(bytes.to_vec()) })\n\n .unwrap(),\n\n ) // Safe to unwrap\n\n }),\n\n map(string, |istr| ListMailbox::String(istr.to_owned())),\n\n ))(input)\n\n}\n\n\n", "file_path": "src/parse/mailbox.rs", "rank": 23, "score": 102706.22702992859 }, { "content": "pub fn read_file(path: &str) -> IoResult<Vec<u8>> {\n\n let mut file = std::fs::File::open(path)?;\n\n let mut data = Vec::new();\n\n file.read_to_end(&mut data)?;\n\n\n\n Ok(data)\n\n}\n\n\n", "file_path": "examples/parse_greeting.rs", "rank": 24, "score": 102706.22702992859 }, { "content": "/// date-time = DQUOTE date-day-fixed \"-\" date-month \"-\" date-year SP time SP zone DQUOTE\n\npub fn date_time(input: &[u8]) -> IResult<&[u8], MyDateTime> {\n\n let mut parser = delimited(\n\n DQUOTE,\n\n tuple((\n\n date_day_fixed,\n\n tag(b\"-\"),\n\n date_month,\n\n tag(b\"-\"),\n\n date_year,\n\n SP,\n\n time,\n\n SP,\n\n zone,\n\n )),\n\n DQUOTE,\n\n );\n\n\n\n let (remaining, (d, _, m, _, y, _, time, _, zone)) = parser(input)?;\n\n\n\n let date = NaiveDate::from_ymd_opt(y.into(), m.into(), d.into());\n", "file_path": "src/parse/datetime.rs", "rank": 25, "score": 102706.22702992859 }, { "content": "/// auth-type = atom\n\n///\n\n/// Note: Defined by [SASL]\n\npub fn auth_type(input: &[u8]) -> IResult<&[u8], AuthMechanism> {\n\n let (rem, mechanism) = atom(input)?;\n\n\n\n Ok((rem, mechanism.to_owned().into()))\n\n}\n\n\n", "file_path": "src/parse/mod.rs", "rank": 26, "score": 102706.22702992859 }, { "content": "/// Set of seq-number values, regardless of order.\n\n/// Servers MAY coalesce overlaps and/or execute the sequence in any order.\n\n///\n\n/// Example: a message sequence number set of\n\n/// 2,4:7,9,12:* for a mailbox with 15 messages is\n\n/// equivalent to 2,4,5,6,7,9,12,13,14,15\n\n///\n\n/// Example: a message sequence number set of *:4,5:7\n\n/// for a mailbox with 10 messages is equivalent to\n\n/// 10,9,8,7,6,5,4,5,6,7 and MAY be reordered and\n\n/// overlap coalesced to be 4,5,6,7,8,9,10.\n\n///\n\n/// ; errata id: 261\n\n/// sequence-set = (seq-number / seq-range) [\",\" sequence-set]\n\n///\n\n/// Simplified:\n\n///\n\n/// sequence-set = (seq-number / seq-range) *(\",\" (seq-number / seq-range))\n\n///\n\n/// TODO: Why the errata?\n\npub fn sequence_set(input: &[u8]) -> IResult<&[u8], SequenceSet> {\n\n map(\n\n separated_list1(\n\n tag(b\",\"),\n\n alt((\n\n // Ordering is important!\n\n map(seq_range, |(from, to)| Sequence::Range(from, to)),\n\n map(seq_number, Sequence::Single),\n\n )),\n\n ),\n\n SequenceSet,\n\n )(input)\n\n}\n\n\n", "file_path": "src/parse/sequence.rs", "rank": 27, "score": 102706.22702992859 }, { "content": "pub fn read_file(path: &str) -> IoResult<Vec<u8>> {\n\n let mut file = std::fs::File::open(path)?;\n\n let mut data = Vec::new();\n\n file.read_to_end(&mut data)?;\n\n\n\n Ok(data)\n\n}\n\n\n", "file_path": "examples/parse_response.rs", "rank": 28, "score": 102706.22702992859 }, { "content": "/// section = \"[\" [section-spec] \"]\"\n\npub fn section(input: &[u8]) -> IResult<&[u8], Option<Section>> {\n\n delimited(tag(b\"[\"), opt(section_spec), tag(b\"]\"))(input)\n\n}\n\n\n", "file_path": "src/parse/section.rs", "rank": 29, "score": 102507.89909747444 }, { "content": "/// fetch-att = \"ENVELOPE\" /\n\n/// \"FLAGS\" /\n\n/// \"INTERNALDATE\" /\n\n/// \"RFC822\" [\".HEADER\" / \".SIZE\" / \".TEXT\"] /\n\n/// \"BODY\" [\"STRUCTURE\"] /\n\n/// \"UID\" /\n\n/// \"BODY\" section [\"<\" number \".\" nz-number \">\"] /\n\n/// \"BODY.PEEK\" section [\"<\" number \".\" nz-number \">\"]\n\npub fn fetch_att(input: &[u8]) -> IResult<&[u8], FetchAttribute> {\n\n alt((\n\n value(FetchAttribute::Envelope, tag_no_case(b\"ENVELOPE\")),\n\n value(FetchAttribute::Flags, tag_no_case(b\"FLAGS\")),\n\n value(FetchAttribute::InternalDate, tag_no_case(b\"INTERNALDATE\")),\n\n value(FetchAttribute::BodyStructure, tag_no_case(b\"BODYSTRUCTURE\")),\n\n map(\n\n tuple((\n\n tag_no_case(b\"BODY.PEEK\"),\n\n section,\n\n opt(delimited(\n\n tag(b\"<\"),\n\n tuple((number, tag(b\".\"), nz_number)),\n\n tag(b\">\"),\n\n )),\n\n )),\n\n |(_, section, byterange)| FetchAttribute::BodyExt {\n\n section,\n\n partial: byterange.map(|(start, _, end)| (start, end)),\n\n peek: true,\n", "file_path": "src/parse/fetch_attributes.rs", "rank": 30, "score": 101105.31347993358 }, { "content": "/// status-att = \"MESSAGES\" / \"RECENT\" / \"UIDNEXT\" / \"UIDVALIDITY\" / \"UNSEEN\"\n\npub fn status_att(input: &[u8]) -> IResult<&[u8], StatusAttribute> {\n\n alt((\n\n value(StatusAttribute::Messages, tag_no_case(b\"MESSAGES\")),\n\n value(StatusAttribute::Recent, tag_no_case(b\"RECENT\")),\n\n value(StatusAttribute::UidNext, tag_no_case(b\"UIDNEXT\")),\n\n value(StatusAttribute::UidValidity, tag_no_case(b\"UIDVALIDITY\")),\n\n value(StatusAttribute::Unseen, tag_no_case(b\"UNSEEN\")),\n\n ))(input)\n\n}\n\n\n", "file_path": "src/parse/status_attributes.rs", "rank": 31, "score": 101105.31347993358 }, { "content": "/// Use this parser instead of command when doing authentication.\n\n///\n\n/// ```text\n\n/// Parsed here (because this is not parsed through command,\n\n/// CRLF must be parsed additionally)\n\n/// |\n\n/// vvvvvvvvvvvvvv\n\n/// authenticate = \"AUTHENTICATE\" SP auth-type [SP (base64 / \"=\")] *(CRLF base64) // TODO: why the \"=\"?\n\n/// ^^^^^^^^^^^^^^^^^^^\n\n/// |\n\n/// Added by SASL-IR (RFC RFC 4959)\n\n/// ```\n\npub fn authenticate_data(input: &[u8]) -> IResult<&[u8], Vec<u8>> {\n\n terminated(base64, CRLF)(input) // FIXME: many0 deleted\n\n}\n\n\n\n/// # Command Select\n\n\n", "file_path": "src/parse/command.rs", "rank": 32, "score": 100817.48335352218 }, { "content": "/// date = date-text / DQUOTE date-text DQUOTE\n\npub fn date(input: &[u8]) -> IResult<&[u8], Option<MyNaiveDate>> {\n\n alt((date_text, delimited(DQUOTE, date_text, DQUOTE)))(input)\n\n}\n\n\n", "file_path": "src/parse/datetime.rs", "rank": 33, "score": 100814.31222438805 }, { "content": "/// flag-list = \"(\" [flag *(SP flag)] \")\"\n\npub fn flag_list(input: &[u8]) -> IResult<&[u8], Vec<Flag>> {\n\n delimited(tag(b\"(\"), separated_list0(SP, flag), tag(b\")\"))(input)\n\n}\n\n\n", "file_path": "src/parse/flag.rs", "rank": 34, "score": 100814.31222438805 }, { "content": "/// mbx-list-flags = *(mbx-list-oflag SP) mbx-list-sflag *(SP mbx-list-oflag) /\n\n/// mbx-list-oflag *(SP mbx-list-oflag)\n\n///\n\n/// Note: ABNF enforces that sflag is not used more than once.\n\n/// We parse any flag and check for multiple occurrences of sflag later.\n\npub fn mbx_list_flags(input: &[u8]) -> IResult<&[u8], Vec<FlagNameAttribute>> {\n\n let (remaining, flags) = separated_list1(SP, alt((mbx_list_sflag, mbx_list_oflag)))(input)?;\n\n\n\n let sflag_count = flags\n\n .iter()\n\n .filter(|&flag| FlagNameAttribute::is_selectability(flag))\n\n .count();\n\n\n\n if sflag_count > 1 {\n\n return Err(nom::Err::Error(nom::error::make_error(\n\n input,\n\n nom::error::ErrorKind::Verify, // TODO(verify): use `Failure` or `Error`?\n\n )));\n\n }\n\n\n\n Ok((remaining, flags))\n\n}\n\n\n", "file_path": "src/parse/flag.rs", "rank": 35, "score": 96263.77293082 }, { "content": "/// ; errata id: 261\n\n/// status-att-list = status-att-val *(SP status-att-val)\n\npub fn status_att_list(input: &[u8]) -> IResult<&[u8], Vec<StatusAttributeValue>> {\n\n separated_list1(SP, status_att_val)(input)\n\n}\n\n\n", "file_path": "src/parse/status_attributes.rs", "rank": 36, "score": 94896.40144812324 }, { "content": "/// msg-att = \"(\"\n\n/// (msg-att-dynamic / msg-att-static) *(SP (msg-att-dynamic / msg-att-static))\n\n/// \")\"\n\npub fn msg_att(input: &[u8]) -> IResult<&[u8], NonEmptyVec<FetchAttributeValue>> {\n\n delimited(\n\n tag(b\"(\"),\n\n map(\n\n separated_list1(SP, alt((msg_att_dynamic, msg_att_static))),\n\n |attrs| NonEmptyVec::try_from(attrs).unwrap(),\n\n ),\n\n tag(b\")\"),\n\n )(input)\n\n}\n\n\n", "file_path": "src/parse/fetch_attributes.rs", "rank": 37, "score": 93599.35186146824 }, { "content": "#[bench]\n\nfn bench_command_serialize(b: &mut Bencher) {\n\n // Setup\n\n let cmd = Command::fetch(\n\n \"1:*,2,3,4,5,6,7,8,9\",\n\n MacroOrFetchAttributes::FetchAttributes(vec![\n\n FetchAttribute::Rfc822Size,\n\n FetchAttribute::BodyExt {\n\n section: Some(Section::Text(None)),\n\n peek: true,\n\n partial: Some((1, 100)),\n\n },\n\n FetchAttribute::BodyStructure,\n\n FetchAttribute::Body,\n\n FetchAttribute::Envelope,\n\n ]),\n\n true,\n\n )\n\n .unwrap();\n\n\n\n // Bench\n\n b.iter(|| {\n\n let mut out = Vec::with_capacity(512);\n\n cmd.encode(&mut out).unwrap();\n\n // Make sure that serialization step is not removed as dead code.\n\n // Not sure if needed...\n\n test::black_box(out);\n\n });\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 38, "score": 89135.99925254655 }, { "content": "#[bench]\n\nfn bench_response_serialize(b: &mut Bencher) {\n\n // Setup\n\n let tag = \"ABC1234567\".try_into().unwrap();\n\n\n\n let rsp = Response::Status(\n\n Status::ok(\n\n Some(tag),\n\n Some(Code::Other(\"XXXXX\".try_into().unwrap(), None)),\n\n \"xyz...\",\n\n )\n\n .unwrap(),\n\n );\n\n\n\n // Bench\n\n b.iter(|| {\n\n let mut out = Vec::with_capacity(512);\n\n rsp.encode(&mut out).unwrap();\n\n // Make sure that serialization step is not removed as dead code.\n\n // Not sure if needed...\n\n test::black_box(out);\n\n });\n\n}\n", "file_path": "benches/bench.rs", "rank": 39, "score": 89135.99925254655 }, { "content": "enum Who {\n\n Client,\n\n Server,\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 40, "score": 78994.95764563055 }, { "content": "pub trait Encode {\n\n fn encode(&self, writer: &mut impl Write) -> std::io::Result<()>;\n\n}\n\n\n\n// ----- Command -----\n\n\n\nimpl Encode for Command {\n\n fn encode(&self, writer: &mut impl Write) -> std::io::Result<()> {\n\n self.tag.encode(writer)?;\n\n writer.write_all(b\" \")?;\n\n self.body.encode(writer)?;\n\n writer.write_all(b\"\\r\\n\")\n\n }\n\n}\n\n\n\nimpl Encode for Tag {\n\n fn encode(&self, writer: &mut impl Write) -> std::io::Result<()> {\n\n writer.write_all(self.0.as_bytes())\n\n }\n\n}\n", "file_path": "src/codec/mod.rs", "rank": 41, "score": 71099.86469165116 }, { "content": "#[test]\n\nfn test_from_store() {\n\n let trace = br#\"C: A003 STORE 2:4 +FLAGS (\\Deleted)\n\nS: * 2 FETCH (FLAGS (\\Deleted \\Seen))\n\nS: * 3 FETCH (FLAGS (\\Deleted))\n\nS: * 4 FETCH (FLAGS (\\Deleted \\Flagged \\Seen))\n\nS: A003 OK STORE completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 42, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_search() {\n\n // C: A284 SEARCH CHARSET UTF-8 TEXT {6}\n\n // C: XXXXXX\n\n let trace = br#\"C: A282 SEARCH FLAGGED SINCE 1-Feb-1994 NOT FROM \"Smith\"\n\nS: * SEARCH 2 84 882\n\nS: A282 OK SEARCH completed\n\nC: A283 SEARCH TEXT \"string not in mailbox\"\n\nS: * SEARCH\n\nS: A283 OK SEARCH completed\n\nS: * SEARCH 43\n\nS: A284 OK SEARCH completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 43, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_authenticate() {\n\n // S: * OK IMAP4rev1 Server\n\n // C: A001 AUTHENTICATE GSSAPI\n\n // S: +\n\n // C: YIIB+wYJKoZIhvcSAQICAQBuggHqMIIB5qADAgEFoQMCAQ6iBw\n\n // MFACAAAACjggEmYYIBIjCCAR6gAwIBBaESGxB1Lndhc2hpbmd0\n\n // b24uZWR1oi0wK6ADAgEDoSQwIhsEaW1hcBsac2hpdmFtcy5jYW\n\n // Mud2FzaGluZ3Rvbi5lZHWjgdMwgdCgAwIBAaEDAgEDooHDBIHA\n\n // cS1GSa5b+fXnPZNmXB9SjL8Ollj2SKyb+3S0iXMljen/jNkpJX\n\n // AleKTz6BQPzj8duz8EtoOuNfKgweViyn/9B9bccy1uuAE2HI0y\n\n // C/PHXNNU9ZrBziJ8Lm0tTNc98kUpjXnHZhsMcz5Mx2GR6dGknb\n\n // I0iaGcRerMUsWOuBmKKKRmVMMdR9T3EZdpqsBd7jZCNMWotjhi\n\n // vd5zovQlFqQ2Wjc2+y46vKP/iXxWIuQJuDiisyXF0Y8+5GTpAL\n\n // pHDc1/pIGmMIGjoAMCAQGigZsEgZg2on5mSuxoDHEA1w9bcW9n\n\n // FdFxDKpdrQhVGVRDIzcCMCTzvUboqb5KjY1NJKJsfjRQiBYBdE\n\n // NKfzK+g5DlV8nrw81uOcP8NOQCLR5XkoMHC0Dr/80ziQzbNqhx\n\n // O6652Npft0LQwJvenwDI13YxpwOdMXzkWZN/XrEqOWp6GCgXTB\n\n // vCyLWLlWnbaUkZdEYbKHBPjd8t/1x5Yg==\n\n // S: + YGgGCSqGSIb3EgECAgIAb1kwV6ADAgEFoQMCAQ+iSzBJoAMC\n\n // AQGiQgRAtHTEuOP2BXb9sBYFR4SJlDZxmg39IxmRBOhXRKdDA0\n\n // uHTCOT9Bq3OsUTXUlk0CsFLoa8j+gvGDlgHuqzWHPSQg==\n\n // C:\n\n // S: + YDMGCSqGSIb3EgECAgIBAAD/////6jcyG4GE3KkTzBeBiVHe\n\n // ceP2CWY0SR0fAQAgAAQEBAQ=\n\n // C: YDMGCSqGSIb3EgECAgIBAAD/////3LQBHXTpFfZgrejpLlLImP\n\n // wkhbfa2QteAQAgAG1yYwE=\n\n // S: A001 OK GSSAPI authentication successful\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 44, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_list() {\n\n let trace = br#\"C: A101 LIST \"\" \"\"\n\nS: * LIST (\\Noselect) \"/\" \"\"\n\nS: A101 OK LIST Completed\n\nC: A102 LIST #news.comp.mail.misc \"\"\n\nS: * LIST (\\Noselect) \".\" #news.\n\nS: A102 OK LIST Completed\n\nC: A103 LIST /usr/staff/jones \"\"\n\nS: * LIST (\\Noselect) \"/\" /\n\nS: A103 OK LIST Completed\n\nC: A202 LIST ~/Mail/ %\n\nS: * LIST (\\Noselect) \"/\" ~/Mail/foo\n\nS: * LIST () \"/\" ~/Mail/meetings\n\nS: A202 OK LIST completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 45, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_rename() {\n\n let trace = br#\"C: A682 LIST \"\" *\n\nS: * LIST () \"/\" blurdybloop\n\nS: * LIST (\\Noselect) \"/\" foo\n\nS: * LIST () \"/\" foo/bar\n\nS: A682 OK LIST completed\n\nC: A683 RENAME blurdybloop sarasoop\n\nS: A683 OK RENAME completed\n\nC: A684 RENAME foo zowie\n\nS: A684 OK RENAME Completed\n\nC: A685 LIST \"\" *\n\nS: * LIST () \"/\" sarasoop\n\nS: * LIST (\\Noselect) \"/\" zowie\n\nS: * LIST () \"/\" zowie/bar\n\nS: A685 OK LIST completed\n\nC: Z432 LIST \"\" *\n\nS: * LIST () \".\" INBOX\n\nS: * LIST () \".\" INBOX.bar\n\nS: Z432 OK LIST completed\n\nC: Z433 RENAME INBOX old-mail\n", "file_path": "tests/trace.rs", "rank": 46, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_status() {\n\n let trace = br#\"C: A042 STATUS blurdybloop (UIDNEXT MESSAGES)\n\nS: * STATUS blurdybloop (MESSAGES 231 UIDNEXT 44292)\n\nS: A042 OK STATUS completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 47, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_login() {\n\n let trace = b\"C: a001 LOGIN SMITH SESAME\n\nS: a001 OK LOGIN completed\n\n\";\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 48, "score": 61386.89060432435 }, { "content": "fn welcome() {\n\n let welcome = r#\"\n\n# Parsing of IMAP commands.\n\n\n\nAs a user, you are in the role of an IMAP client. Thus, you type IMAP commands. However, the example code shows typical server code, i.e., how to parse received commands.\n\n\n\n\"C:\" denotes the client, \"S:\" denotes the server, and \"..\" denotes the continuation of an (incomplete) command, e.g., due to the use of IMAP literals.\n\n\n\nEnter command (or \"exit\").\n\n\"#;\n\n\n\n println!(\"{}\", welcome);\n\n}\n\n\n", "file_path": "examples/parse_command.rs", "rank": 49, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_append() {\n\n // C: A003 APPEND saved-messages (\\Seen) {310}\n\n // S: + Ready for literal data\n\n // C: Date: Mon, 7 Feb 1994 21:52:25 -0800 (PST)\n\n // C: From: Fred Foobar <foobar@Blurdybloop.COM>\n\n // C: Subject: afternoon meeting\n\n // C: To: mooch@owatagu.siam.edu\n\n // C: Message-Id: <B27397-0100000@Blurdybloop.COM>\n\n // C: MIME-Version: 1.0\n\n // C: Content-Type: TEXT/PLAIN; CHARSET=US-ASCII\n\n // C:\n\n // C: Hello Joe, do you think we can meet at 3:30 tomorrow?\n\n // C:\n\n // S: A003 OK APPEND completed\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 50, "score": 61386.89060432435 }, { "content": "fn main() {\n\n welcome();\n\n\n\n let mut buffer = Vec::new();\n\n\n\n loop {\n\n // Try to parse the first command in `buffer`.\n\n match command(&buffer) {\n\n // Parser succeeded.\n\n Ok((remaining, command)) => {\n\n // Do something with the command ...\n\n println!(\"{:#?}\", command);\n\n\n\n // ... and proceed with the remaining data.\n\n buffer = remaining.to_vec();\n\n }\n\n // Parser needs more data.\n\n Err(nom::Err::Incomplete(_needed)) => {\n\n // Read more data.\n\n read_more(&mut buffer);\n", "file_path": "examples/parse_command.rs", "rank": 51, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_unsubscribe() {\n\n let trace = br#\"C: A002 UNSUBSCRIBE #news.comp.mail.mime\n\nS: A002 OK UNSUBSCRIBE completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 52, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_subscribe() {\n\n let trace = br#\"C: A002 SUBSCRIBE #news.comp.mail.mime\n\nS: A002 OK SUBSCRIBE completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 53, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_check() {\n\n let trace = br#\"C: FXXZ CHECK\n\nS: FXXZ OK CHECK Completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 54, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_noop() {\n\n let trace = br#\"C: a002 NOOP\n\nS: a002 OK NOOP completed\n\nC: a047 NOOP\n\nS: * 22 EXPUNGE\n\nS: * 23 EXISTS\n\nS: * 3 RECENT\n\nS: * 14 FETCH (FLAGS (\\Seen \\Deleted))\n\nS: a047 OK NOOP completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 55, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_capability() {\n\n let trace = b\"C: abcd CAPABILITY\n\nS: * CAPABILITY IMAP4rev1 STARTTLS AUTH=GSSAPI LOGINDISABLED\n\nS: abcd OK CAPABILITY completed\n\nC: efgh STARTTLS\n\nS: efgh OK STARTLS completed\n\nC: ijkl CAPABILITY\n\nS: * CAPABILITY IMAP4rev1 AUTH=GSSAPI AUTH=PLAIN\n\nS: ijkl OK CAPABILITY completed\n\n\";\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 56, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_expunge() {\n\n let trace = br#\"C: A202 EXPUNGE\n\nS: * 3 EXPUNGE\n\nS: * 3 EXPUNGE\n\nS: * 5 EXPUNGE\n\nS: * 8 EXPUNGE\n\nS: A202 OK EXPUNGE completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 57, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_starttls() {\n\n let trace = br#\"C: a001 CAPABILITY\n\nS: * CAPABILITY IMAP4rev1 STARTTLS LOGINDISABLED\n\nS: a001 OK CAPABILITY completed\n\nC: a002 STARTTLS\n\nS: a002 OK Begin TLS negotiation now\n\nC: a003 CAPABILITY\n\nS: * CAPABILITY IMAP4rev1 AUTH=PLAIN\n\nS: a003 OK CAPABILITY completed\n\nC: a004 LOGIN joe password\n\nS: a004 OK LOGIN completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 58, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_fetch() {\n\n // S: * 2 FETCH ....\n\n // S: * 3 FETCH ....\n\n // S: * 4 FETCH ....\n\n let trace = br#\"C: A654 FETCH 2:4 (FLAGS BODY[HEADER.FIELDS (DATE FROM)])\n\nS: A654 OK FETCH completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 59, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_create() {\n\n let trace = br#\"C: A003 CREATE owatagusiam/\n\nS: A003 OK CREATE completed\n\nC: A004 CREATE owatagusiam/blurdybloop\n\nS: A004 OK CREATE completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 60, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_select() {\n\n let trace = br#\"C: A142 SELECT INBOX\n\nS: * 172 EXISTS\n\nS: * 1 RECENT\n\nS: * OK [UNSEEN 12] Message 12 is first unseen\n\nS: * OK [UIDVALIDITY 3857529045] UIDs valid\n\nS: * OK [UIDNEXT 4392] Predicted next UID\n\nS: * FLAGS (\\Answered \\Flagged \\Deleted \\Seen \\Draft)\n\nS: * OK [PERMANENTFLAGS (\\Deleted \\Seen \\*)] Limited\n\nS: A142 OK [READ-WRITE] SELECT completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 61, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_delete() {\n\n let trace = br#\"C: A682 LIST \"\" *\n\nS: * LIST () \"/\" blurdybloop\n\nS: * LIST (\\Noselect) \"/\" foo\n\nS: * LIST () \"/\" foo/bar\n\nS: A682 OK LIST completed\n\nC: A683 DELETE blurdybloop\n\nS: A683 OK DELETE completed\n\nC: A684 DELETE foo\n\nS: A684 NO Name \"foo\" has inferior hierarchical names\n\nC: A685 DELETE foo/bar\n\nS: A685 OK DELETE Completed\n\nC: A686 LIST \"\" *\n\nS: * LIST (\\Noselect) \"/\" foo\n\nS: A686 OK LIST completed\n\nC: A687 DELETE foo\n\nS: A687 OK DELETE Completed\n\nC: A82 LIST \"\" *\n\nS: * LIST () \".\" blurdybloop\n\nS: * LIST () \".\" foo\n", "file_path": "tests/trace.rs", "rank": 62, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_lsub() {\n\n let trace = br#\"C: A002 LSUB \"news.\" \"comp.mail.*\"\n\nS: * LSUB () \".\" #news.comp.mail.mime\n\nS: * LSUB () \".\" #news.comp.mail.misc\n\nS: A002 OK LSUB completed\n\nC: A003 LSUB \"news.\" \"comp.%\"\n\nS: * LSUB (\\NoSelect) \".\" #news.comp.mail\n\nS: A003 OK LSUB completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 63, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_logout() {\n\n let trace = br#\"C: A023 LOGOUT\n\nS: * BYE IMAP4rev1 Server logging out\n\nS: A023 OK LOGOUT completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 64, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_uid() {\n\n let trace = br#\"C: A999 UID FETCH 4827313:4828442 FLAGS\n\nS: * 23 FETCH (FLAGS (\\Seen) UID 4827313)\n\nS: * 24 FETCH (FLAGS (\\Seen) UID 4827943)\n\nS: * 25 FETCH (FLAGS (\\Seen) UID 4828442)\n\nS: A999 OK UID FETCH completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n\n//#[test]\n\n//fn test_from_X() {\n\n// let trace = br#\"C: a441 CAPABILITY\n\n//S: * CAPABILITY IMAP4rev1 XPIG-LATIN\n\n//S: a441 OK CAPABILITY completed\n\n//C: A442 XPIG-LATIN\n\n//S: * XPIG-LATIN ow-nay eaking-spay ig-pay atin-lay\n\n//S: A442 OK XPIG-LATIN ompleted-cay\"#;\n\n//\n\n// test_lines_of_trace(trace);\n\n//}\n\n\n", "file_path": "tests/trace.rs", "rank": 65, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_close() {\n\n let trace = br#\"C: A341 CLOSE\n\nS: A341 OK CLOSE completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 66, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_examine() {\n\n let trace = br#\"C: A932 EXAMINE blurdybloop\n\nS: * 17 EXISTS\n\nS: * 2 RECENT\n\nS: * OK [UNSEEN 8] Message 8 is first unseen\n\nS: * OK [UIDVALIDITY 3857529045] UIDs valid\n\nS: * OK [UIDNEXT 4392] Predicted next UID\n\nS: * FLAGS (\\Answered \\Flagged \\Deleted \\Seen \\Draft)\n\nS: * OK [PERMANENTFLAGS ()] No permanent flags permitted\n\nS: A932 OK [READ-ONLY] EXAMINE completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 67, "score": 61386.89060432435 }, { "content": "#[test]\n\nfn test_from_copy() {\n\n let trace = br#\"C: A003 COPY 2:4 MEETING\n\nS: A003 OK COPY completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 68, "score": 61386.89060432435 }, { "content": "/// mailbox-list = \"(\" [mbx-list-flags] \")\" SP\n\n/// (DQUOTE QUOTED-CHAR DQUOTE / nil) SP\n\n/// mailbox\n\nfn mailbox_list(\n\n input: &[u8],\n\n) -> IResult<&[u8], (Option<Vec<FlagNameAttribute>>, Option<QuotedChar>, Mailbox)> {\n\n let mut parser = tuple((\n\n delimited(tag(b\"(\"), opt(mbx_list_flags), tag(b\")\")),\n\n SP,\n\n alt((\n\n map(delimited(DQUOTE, quoted_char, DQUOTE), Option::Some),\n\n value(None, nil),\n\n )),\n\n SP,\n\n mailbox,\n\n ));\n\n\n\n let (remaining, (mbx_list_flags, _, maybe_delimiter, _, mailbox)) = parser(input)?;\n\n\n\n Ok((remaining, (mbx_list_flags, maybe_delimiter, mailbox)))\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/parse/mailbox.rs", "rank": 69, "score": 60096.99859665254 }, { "content": "#[test]\n\nfn test_response_status_no() {\n\n let trace = br#\"C: A222 COPY 1:2 owatagusiam\n\nS: * NO Disk is 98% full, please delete unnecessary data\n\nS: A222 OK COPY completed\n\nC: A223 COPY 3:200 blurdybloop\n\nS: * NO Disk is 98% full, please delete unnecessary data\n\nS: * NO Disk is 99% full, please delete unnecessary data\n\nS: A223 NO COPY failed: disk is full\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 70, "score": 60096.99859665254 }, { "content": "#[test]\n\nfn test_transcript_from_rfc() {\n\n // S: * 12 FETCH (BODY[HEADER] {342}\n\n // S: Date: Wed, 17 Jul 1996 02:23:25 -0700 (PDT)\n\n // S: From: Terry Gray <gray@cac.washington.edu>\n\n // S: Subject: IMAP4rev1 WG mtg summary and minutes\n\n // S: To: imap@cac.washington.edu\n\n // S: cc: minutes@CNRI.Reston.VA.US, John Klensin <KLENSIN@MIT.EDU>\n\n // S: Message-Id: <B27397-0100000@cac.washington.edu>\n\n // S: MIME-Version: 1.0\n\n // S: Content-Type: TEXT/PLAIN; CHARSET=US-ASCII\n\n // S:\n\n // S: )\n\n\n\n let trace = br#\"S: * OK IMAP4rev1 Service Ready\n\nC: a001 login mrc secret\n\nS: a001 OK LOGIN completed\n\nC: a002 select inbox\n\nS: * 18 EXISTS\n\nS: * FLAGS (\\Answered \\Flagged \\Deleted \\Seen \\Draft)\n\nS: * 2 RECENT\n", "file_path": "tests/trace.rs", "rank": 71, "score": 60096.99859665254 }, { "content": "#[test]\n\nfn test_transcript_from_rfc5161() {\n\n let trace = br#\"C: t1 CAPABILITY\n\nS: * CAPABILITY IMAP4rev1 ID LITERAL+ ENABLE X-GOOD-IDEA\n\nS: t1 OK foo\n\nC: t2 ENABLE CONDSTORE X-GOOD-IDEA\n\nS: * ENABLED X-GOOD-IDEA\n\nS: t2 OK foo\n\nC: t3 CAPABILITY\n\nS: * CAPABILITY IMAP4rev1 ID LITERAL+ ENABLE X-GOOD-IDEA\n\nS: t3 OK foo again\n\nC: a1 ENABLE CONDSTORE\n\nS: * ENABLED CONDSTORE\n\nS: a1 OK Conditional Store enabled\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 72, "score": 60096.99859665254 }, { "content": "#[test]\n\nfn test_response_data_search() {\n\n let trace = br#\"S: * SEARCH 2 3 6\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 73, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_data_lsub() {\n\n let trace = br#\"S: * LSUB () \".\" #news.comp.mail.misc\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 74, "score": 58894.410078992805 }, { "content": "/// body-fld-dsp = \"(\" string SP body-fld-param \")\" / nil\n\nfn body_fld_dsp(\n\n input: &[u8],\n\n) -> IResult<&[u8], Option<(IStringRef, Vec<(IStringRef, IStringRef)>)>> {\n\n alt((\n\n delimited(\n\n tag(b\"(\"),\n\n map(\n\n tuple((string, SP, body_fld_param)),\n\n |(string, _, body_fld_param)| Some((string, body_fld_param)),\n\n ),\n\n tag(b\")\"),\n\n ),\n\n map(nil, |_| None),\n\n ))(input)\n\n}\n\n\n", "file_path": "src/parse/body.rs", "rank": 75, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_data_status() {\n\n let trace = br#\"S: * STATUS blurdybloop (MESSAGES 231 UIDNEXT 44292)\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 76, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_data_recent() {\n\n let trace = br#\"S: * 5 RECENT\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 77, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_data_continuation() {\n\n // C: A001 LOGIN {11}\n\n // C: FRED FOOBAR {7}\n\n // C: fat man\n\n // C: A044 BLURDYBLOOP {102856}\n\n\n\n let trace = br#\"S: + Ready for additional command text\n\nS: A001 OK LOGIN completed\n\nS: A044 BAD No such command as \"BLURDYBLOOP\"\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n", "file_path": "tests/trace.rs", "rank": 78, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_data_exists() {\n\n let trace = br#\"S: * 23 EXISTS\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 79, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_data_fetch() {\n\n let trace = br#\"S: * 23 FETCH (FLAGS (\\Seen) RFC822.SIZE 44827)\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 80, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_data_expunge() {\n\n let trace = br#\"S: * 44 EXPUNGE\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 81, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_data_list() {\n\n let trace = br#\"S: * LIST (\\Noselect) \"/\" ~/Mail/foo\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 82, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_status_preauth() {\n\n // This can only be parsed with `greeting`\n\n let line = b\"* PREAUTH IMAP4rev1 server logged in as Smith\\r\\n\";\n\n\n\n println!(\"S: {}\", String::from_utf8_lossy(line).trim());\n\n let (rem, parsed) = greeting(line).unwrap();\n\n println!(\"Parsed: {:?}\", parsed);\n\n assert!(rem.is_empty());\n\n let mut serialized = Vec::new();\n\n parsed.encode(&mut serialized).unwrap();\n\n println!(\n\n \"Serialized: {}\",\n\n String::from_utf8_lossy(&serialized).trim()\n\n );\n\n let (rem, parsed2) = greeting(&serialized).unwrap();\n\n assert!(rem.is_empty());\n\n assert_eq!(parsed, parsed2);\n\n println!()\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 83, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_data_capability() {\n\n let trace = br#\"S: * CAPABILITY IMAP4rev1 STARTTLS AUTH=GSSAPI XPIG-LATIN\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 84, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_status_bye() {\n\n let trace = br#\"S: * BYE Autologout; idle for too long\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 85, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_data_flags() {\n\n let trace = br#\"S: * FLAGS (\\Answered \\Flagged \\Deleted \\Seen \\Draft)\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 86, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_status_ok() {\n\n let trace = br#\"S: * OK IMAP4rev1 server ready\n\nC: A001 LOGIN fred blurdybloop\n\nS: * OK [ALERT] System shutdown in 10 minutes\n\nS: A001 OK LOGIN Completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 87, "score": 58894.410078992805 }, { "content": "#[test]\n\nfn test_response_status_bad() {\n\n let trace = br#\"S: * BAD Command line too long\n\nS: * BAD Empty command line\n\nC: A443 EXPUNGE\n\nS: * BAD Disk crash, attempting salvage to a new disk!\n\nS: * OK Salvage successful, no data lost\n\nS: A443 OK Expunge completed\n\n\"#;\n\n\n\n test_lines_of_trace(trace);\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 88, "score": 58894.410078992805 }, { "content": "fn body_limited<'a>(\n\n input: &'a [u8],\n\n remaining_recursions: usize,\n\n) -> IResult<&'a [u8], BodyStructure> {\n\n if remaining_recursions == 0 {\n\n return Err(nom::Err::Failure(nom::error::make_error(\n\n input,\n\n nom::error::ErrorKind::TooLarge,\n\n )));\n\n }\n\n\n\n let body_type_1part = move |input: &'a [u8]| {\n\n body_type_1part_limited(input, remaining_recursions.saturating_sub(1))\n\n };\n\n let body_type_mpart = move |input: &'a [u8]| {\n\n body_type_mpart_limited(input, remaining_recursions.saturating_sub(1))\n\n };\n\n\n\n delimited(\n\n tag(b\"(\"),\n\n alt((body_type_1part, body_type_mpart)),\n\n tag(b\")\"),\n\n )(input)\n\n}\n\n\n", "file_path": "src/parse/body.rs", "rank": 89, "score": 58682.123891943294 }, { "content": "/// body-type-mpart = 1*body SP media-subtype [SP body-ext-mpart]\n\n///\n\n/// Note: This parser is recursively defined. Thus, in order to not overflow the stack,\n\n/// it is needed to limit how may recursions are allowed.\n\nfn body_type_mpart_limited(\n\n input: &[u8],\n\n remaining_recursion: usize,\n\n) -> IResult<&[u8], BodyStructure> {\n\n if remaining_recursion == 0 {\n\n return Err(nom::Err::Failure(nom::error::make_error(\n\n input,\n\n nom::error::ErrorKind::TooLarge,\n\n )));\n\n }\n\n\n\n let mut parser = tuple((\n\n many1(body(remaining_recursion)),\n\n SP,\n\n media_subtype,\n\n opt(preceded(SP, body_ext_mpart)),\n\n ));\n\n\n\n let (remaining, (bodies, _, subtype, maybe_extension_data)) = parser(input)?;\n\n\n\n Ok((\n\n remaining,\n\n BodyStructure::Multi {\n\n bodies,\n\n subtype: subtype.to_owned(),\n\n extension_data: maybe_extension_data,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/parse/body.rs", "rank": 90, "score": 57770.5517675444 }, { "content": "fn body_extension_limited<'a>(\n\n input: &'a [u8],\n\n remaining_recursion: usize,\n\n) -> IResult<&'a [u8], &[u8]> {\n\n if remaining_recursion == 0 {\n\n return Err(nom::Err::Failure(nom::error::make_error(\n\n input,\n\n nom::error::ErrorKind::TooLarge,\n\n )));\n\n }\n\n\n\n let body_extension =\n\n move |input: &'a [u8]| body_extension_limited(input, remaining_recursion.saturating_sub(1));\n\n\n\n alt((\n\n recognize(nstring),\n\n recognize(number),\n\n recognize(delimited(\n\n tag(b\"(\"),\n\n separated_list1(SP, body_extension),\n\n tag(b\")\"),\n\n )),\n\n ))(input)\n\n}\n\n\n\n// ---\n\n\n", "file_path": "src/parse/body.rs", "rank": 91, "score": 57479.53537428356 }, { "content": "/// search-key = \"ALL\" /\n\n/// \"ANSWERED\" /\n\n/// \"BCC\" SP astring /\n\n/// \"BEFORE\" SP date /\n\n/// \"BODY\" SP astring /\n\n/// \"CC\" SP astring /\n\n/// \"DELETED\" /\n\n/// \"FLAGGED\" /\n\n/// \"FROM\" SP astring /\n\n/// \"KEYWORD\" SP flag-keyword /\n\n/// \"NEW\" /\n\n/// \"OLD\" /\n\n/// \"ON\" SP date /\n\n/// \"RECENT\" /\n\n/// \"SEEN\" /\n\n/// \"SINCE\" SP date /\n\n/// \"SUBJECT\" SP astring /\n\n/// \"TEXT\" SP astring /\n\n/// \"TO\" SP astring /\n\n/// \"UNANSWERED\" /\n\n/// \"UNDELETED\" /\n\n/// \"UNFLAGGED\" /\n\n/// \"UNKEYWORD\" SP flag-keyword /\n\n/// \"UNSEEN\" /\n\n/// ; Above this line were in [IMAP2]\n\n/// \"DRAFT\" /\n\n/// \"HEADER\" SP header-fld-name SP astring /\n\n/// \"LARGER\" SP number /\n\n/// \"NOT\" SP search-key /\n\n/// \"OR\" SP search-key SP search-key /\n\n/// \"SENTBEFORE\" SP date /\n\n/// \"SENTON\" SP date /\n\n/// \"SENTSINCE\" SP date /\n\n/// \"SMALLER\" SP number /\n\n/// \"UID\" SP sequence-set /\n\n/// \"UNDRAFT\" /\n\n/// sequence-set /\n\n/// \"(\" search-key *(SP search-key) \")\"\n\nfn search_key_limited<'a>(\n\n input: &'a [u8],\n\n remaining_recursion: usize,\n\n) -> IResult<&'a [u8], SearchKey> {\n\n if remaining_recursion == 0 {\n\n return Err(nom::Err::Failure(nom::error::make_error(\n\n input,\n\n nom::error::ErrorKind::TooLarge,\n\n )));\n\n }\n\n\n\n let search_key =\n\n move |input: &'a [u8]| search_key_limited(input, remaining_recursion.saturating_sub(1));\n\n\n\n alt((\n\n alt((\n\n value(SearchKey::All, tag_no_case(b\"ALL\")),\n\n value(SearchKey::Answered, tag_no_case(b\"ANSWERED\")),\n\n map(tuple((tag_no_case(b\"BCC\"), SP, astring)), |(_, _, val)| {\n\n SearchKey::Bcc(val.to_owned())\n", "file_path": "src/parse/command.rs", "rank": 92, "score": 57479.53537428356 }, { "content": "/// body-type-1part = (body-type-basic /\n\n/// body-type-msg /\n\n/// body-type-text) [SP body-ext-1part]\n\n///\n\n/// Note: This parser is recursively defined. Thus, in order to not overflow the stack,\n\n/// it is needed to limit how may recursions are allowed.\n\nfn body_type_1part_limited<'a>(\n\n input: &'a [u8],\n\n remaining_recursions: usize,\n\n) -> IResult<&'a [u8], BodyStructure> {\n\n if remaining_recursions == 0 {\n\n return Err(nom::Err::Failure(nom::error::make_error(\n\n input,\n\n nom::error::ErrorKind::TooLarge,\n\n )));\n\n }\n\n\n\n let body_type_msg =\n\n move |input: &'a [u8]| body_type_msg_limited(input, remaining_recursions.saturating_sub(1));\n\n\n\n let mut parser = tuple((\n\n alt((body_type_msg, body_type_text, body_type_basic)),\n\n opt(preceded(SP, body_ext_1part)),\n\n ));\n\n\n\n let (remaining, ((basic, specific), maybe_extension)) = parser(input)?;\n\n\n\n Ok((\n\n remaining,\n\n BodyStructure::Single {\n\n body: Body { basic, specific },\n\n extension: maybe_extension,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/parse/body.rs", "rank": 93, "score": 56355.67706283515 }, { "content": "/// body-type-msg = media-message SP\n\n/// body-fields SP\n\n/// envelope SP\n\n/// body SP\n\n/// body-fld-lines\n\n///\n\n/// Note: This parser is recursively defined. Thus, in order to not overflow the stack,\n\n/// it is needed to limit how may recursions are allowed. (8 should suffice).\n\nfn body_type_msg_limited<'a>(\n\n input: &'a [u8],\n\n remaining_recursions: usize,\n\n) -> IResult<&'a [u8], (BasicFields, SpecificFields)> {\n\n if remaining_recursions == 0 {\n\n return Err(nom::Err::Failure(nom::error::make_error(\n\n input,\n\n nom::error::ErrorKind::TooLarge,\n\n )));\n\n }\n\n\n\n let body = move |input: &'a [u8]| body_limited(input, remaining_recursions.saturating_sub(1));\n\n\n\n let mut parser = tuple((\n\n media_message,\n\n SP,\n\n body_fields,\n\n SP,\n\n envelope,\n\n SP,\n", "file_path": "src/parse/body.rs", "rank": 94, "score": 56355.67706283515 }, { "content": "fn test_lines_of_trace(trace: &[u8]) {\n\n for (who, line) in split_trace(trace) {\n\n match who {\n\n Who::Client => {\n\n println!(\"C: {}\", String::from_utf8_lossy(line).trim());\n\n let (rem, parsed) = command(line).unwrap();\n\n assert!(rem.is_empty());\n\n println!(\"Parsed {:?}\", parsed);\n\n let mut serialized = Vec::new();\n\n parsed.encode(&mut serialized).unwrap();\n\n println!(\n\n \"Serialized: {}\",\n\n String::from_utf8_lossy(&serialized).trim()\n\n );\n\n let (rem, parsed2) = command(&serialized).unwrap();\n\n assert!(rem.is_empty());\n\n assert_eq!(parsed, parsed2);\n\n println!()\n\n }\n\n Who::Server => {\n", "file_path": "tests/trace.rs", "rank": 95, "score": 55094.93554224975 }, { "content": "fn main() -> std::io::Result<()> {\n\n let mut args = std::env::args();\n\n\n\n if let Some(path) = args.nth(1) {\n\n let data = read_file(&path).unwrap();\n\n\n\n match response(&data) {\n\n Ok((remaining, response)) => {\n\n println!(\"{:#?}\", response);\n\n\n\n if !remaining.is_empty() {\n\n println!(\"Remaining data in buffer: {:?}\", remaining);\n\n }\n\n }\n\n Err(error) => {\n\n println!(\"Error parsing the response. Is it correct? ({:?})\", error);\n\n }\n\n }\n\n\n\n return Ok(());\n", "file_path": "examples/parse_response.rs", "rank": 96, "score": 53964.44184397847 }, { "content": "fn main() -> std::io::Result<()> {\n\n let mut args = std::env::args();\n\n\n\n if let Some(path) = args.nth(1) {\n\n let data = read_file(&path).unwrap();\n\n\n\n match greeting(&data) {\n\n Ok((remaining, greeting)) => {\n\n println!(\"{:#?}\", greeting);\n\n\n\n if !remaining.is_empty() {\n\n println!(\"Remaining data in buffer: {:?}\", remaining);\n\n }\n\n }\n\n Err(error) => {\n\n println!(\"Error parsing the greeting. Is it correct? ({:?})\", error);\n\n }\n\n }\n\n\n\n return Ok(());\n", "file_path": "examples/parse_greeting.rs", "rank": 97, "score": 53964.44184397847 }, { "content": "/// atom-specials = \"(\" / \")\" / \"{\" / SP / CTL / list-wildcards / quoted-specials / resp-specials\n\nfn is_atom_specials(i: u8) -> bool {\n\n match i {\n\n b'(' | b')' | b'{' | b' ' => true,\n\n c if is_CTL(c) => true,\n\n c if is_list_wildcards(c) => true,\n\n c if is_quoted_specials(c) => true,\n\n c if is_resp_specials(c) => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n#[inline]\n\n/// resp-specials = \"]\"\n\npub(crate) fn is_resp_specials(i: u8) -> bool {\n\n i == b']'\n\n}\n\n\n\n/// atom = 1*ATOM-CHAR\n\npub(crate) fn atom(input: &[u8]) -> IResult<&[u8], AtomRef> {\n\n let parser = take_while1(is_atom_char);\n", "file_path": "src/parse/core.rs", "rank": 98, "score": 53964.44184397847 }, { "content": "/// base64-char = ALPHA / DIGIT / \"+\" / \"/\" ; Case-sensitive\n\nfn is_base64_char(i: u8) -> bool {\n\n is_ALPHA(i) || is_DIGIT(i) || i == b'+' || i == b'/'\n\n}\n\n\n\n// base64-terminal = (2base64-char \"==\") / (3base64-char \"=\")\n\n\n\n// ----- charset -----\n\n\n\n/// charset = atom / quoted\n\n/// errata id: 261\n\npub(crate) fn charset(input: &[u8]) -> IResult<&[u8], Charset> {\n\n alt((\n\n map(atom, |atom| Charset::Atom(atom.to_owned())),\n\n map(quoted, |cow| {\n\n Charset::Quoted(Quoted::try_from(cow.to_string()).unwrap())\n\n }),\n\n ))(input)\n\n}\n\n\n\n// ----- tag -----\n", "file_path": "src/parse/core.rs", "rank": 99, "score": 53964.44184397847 } ]
Rust
rsocket/src/frame/setup.rs
sofkyle/rsocket-rust
136fbc70c006c53ee3e3839aa16e97eb7cf845da
use super::{Body, Frame, PayloadSupport, Version, FLAG_METADATA, FLAG_RESUME}; use crate::utils::{RSocketResult, Writeable, DEFAULT_MIME_TYPE}; use bytes::{Buf, BufMut, Bytes, BytesMut}; use std::time::Duration; #[derive(Debug, PartialEq)] pub struct Setup { version: Version, keepalive: u32, lifetime: u32, token: Option<Bytes>, mime_metadata: String, mime_data: String, metadata: Option<Bytes>, data: Option<Bytes>, } impl Writeable for Setup { fn len(&self) -> usize { let mut n: usize = 12; n += match &self.token { Some(v) => 2 + v.len(), None => 0, }; n += 2 + self.mime_metadata.len() + self.mime_data.len(); n += PayloadSupport::len(&self.metadata, &self.data); n } fn write_to(&self, bf: &mut BytesMut) { self.version.write_to(bf); bf.put_u32(self.keepalive); bf.put_u32(self.lifetime); if let Some(b) = &self.token { bf.put_u16(b.len() as u16); bf.put(b.bytes()); } bf.put_u8(self.mime_metadata.len() as u8); bf.put(Bytes::from(self.mime_metadata.clone())); bf.put_u8(self.mime_data.len() as u8); bf.put(Bytes::from(self.mime_data.clone())); PayloadSupport::write(bf, self.get_metadata(), self.get_data()); } } impl Setup { pub fn decode(flag: u16, b: &mut BytesMut) -> RSocketResult<Setup> { let major = b.get_u16(); let minor = b.get_u16(); let keepalive = b.get_u32(); let lifetime = b.get_u32(); let token: Option<Bytes> = if flag & FLAG_RESUME != 0 { let l = b.get_u16(); Some(b.split_to(l as usize).to_bytes()) } else { None }; let mut len_mime: usize = b[0] as usize; b.advance(1); let mime_metadata = b.split_to(len_mime); len_mime = b[0] as usize; b.advance(1); let mime_data = b.split_to(len_mime); let (metadata, data) = PayloadSupport::read(flag, b); Ok(Setup { version: Version::new(major, minor), keepalive, lifetime, token, mime_metadata: String::from_utf8(mime_metadata.to_vec()).unwrap(), mime_data: String::from_utf8(mime_data.to_vec()).unwrap(), metadata, data, }) } pub fn builder(stream_id: u32, flag: u16) -> SetupBuilder { SetupBuilder::new(stream_id, flag) } pub fn get_version(&self) -> Version { self.version } pub fn get_keepalive(&self) -> Duration { Duration::from_millis(u64::from(self.keepalive)) } pub fn get_lifetime(&self) -> Duration { Duration::from_millis(u64::from(self.lifetime)) } pub fn get_token(&self) -> Option<Bytes> { self.token.clone() } pub fn get_mime_metadata(&self) -> &String { &self.mime_metadata } pub fn get_mime_data(&self) -> &String { &self.mime_data } pub fn get_metadata(&self) -> &Option<Bytes> { &self.metadata } pub fn get_data(&self) -> &Option<Bytes> { &self.data } pub fn split(self) -> (Option<Bytes>, Option<Bytes>) { (self.data, self.metadata) } } pub struct SetupBuilder { stream_id: u32, flag: u16, value: Setup, } impl SetupBuilder { fn new(stream_id: u32, flag: u16) -> SetupBuilder { SetupBuilder { stream_id, flag, value: Setup { version: Version::default(), keepalive: 30_000, lifetime: 90_000, token: None, mime_metadata: String::from(DEFAULT_MIME_TYPE), mime_data: String::from(DEFAULT_MIME_TYPE), metadata: None, data: None, }, } } pub fn build(self) -> Frame { Frame::new(self.stream_id, Body::Setup(self.value), self.flag) } pub fn set_data(mut self, bs: Bytes) -> Self { self.value.data = Some(bs); self } pub fn set_metadata(mut self, bs: Bytes) -> Self { self.flag |= FLAG_METADATA; self.value.metadata = Some(bs); self } pub fn set_version(mut self, major: u16, minor: u16) -> Self { self.value.version = Version::new(major, minor); self } pub fn set_keepalive(mut self, duration: Duration) -> Self { self.value.keepalive = duration.as_millis() as u32; self } pub fn set_lifetime(mut self, duration: Duration) -> Self { self.value.lifetime = duration.as_millis() as u32; self } pub fn set_token(mut self, token: Bytes) -> Self { self.value.token = Some(token); self.flag |= FLAG_RESUME; self } pub fn set_mime_metadata(mut self, mime: &str) -> Self { if mime.len() > 256 { panic!("maximum mime length is 256"); } self.value.mime_metadata = String::from(mime); self } pub fn set_mime_data(mut self, mime: &str) -> Self { if mime.len() > 256 { panic!("maximum mime length is 256"); } self.value.mime_data = String::from(mime); self } }
use super::{Body, Frame, PayloadSupport, Version, FLAG_METADATA, FLAG_RESUME}; use crate::utils::{RSocketResult, Writeable, DEFAULT_MIME_TYPE}; use bytes::{Buf, BufMut, Bytes, BytesMut}; use std::time::Duration; #[derive(Debug, PartialEq)] pub struct Setup { version: Version, keepalive: u32, lifetime: u32, token: Option<Bytes>, mime_metadata: String, mime_data: String, metadata: Option<Bytes>, data: Option<Bytes>, } impl Writeable for Setup { fn len(&self) -> usize { let mut n: usize = 12; n += match &self.token { Some(v) => 2 + v.len(), None => 0, }; n += 2 + self.mime_metadata.len() + self.mime_data.len(); n += PayloadSupport::len(&self.metadata, &self.data); n } fn write_to(&self, bf: &mut BytesMut) { self.version.write_to(bf); bf.put_u32(self.keepalive); bf.put_u32(self.lifetime); if let Some(b) = &self.token { bf.put_u16(b.len() as u16); bf.put(b.bytes()); } bf.put_u8(self.mime_metadata.len() as u8); bf.put(Bytes::from(self.mime_metadata.clone())); bf.put_u8(self.mime_data.len() as u8); bf.put(Bytes::from(self.mime_data.clone())); PayloadSupport::write(bf, self.get_metadata(), self.get_data()); } } impl Setup { pub fn decode(flag: u16, b: &mut BytesMut) -> RSocketResult<Setup> { let major = b.get_u16(); let minor = b.get_u16(); let keepalive = b.get_u32(); let lifetime = b.get_u32(); let token: Option<Bytes> = if flag & FLAG_RESUME != 0 { let l = b.get_u16(); Some(b.split_to(l as usize).to_bytes()) } else { None }; let mut len_mime: usize = b[0] as usize; b.advance(1); let mime_metadata = b.split_to(len_mime); len_mime = b[0] as usize; b.advance(1); let mime_data = b.split_to(len_mime); let (metadata, data) = PayloadSupport::read(flag, b);
} pub fn builder(stream_id: u32, flag: u16) -> SetupBuilder { SetupBuilder::new(stream_id, flag) } pub fn get_version(&self) -> Version { self.version } pub fn get_keepalive(&self) -> Duration { Duration::from_millis(u64::from(self.keepalive)) } pub fn get_lifetime(&self) -> Duration { Duration::from_millis(u64::from(self.lifetime)) } pub fn get_token(&self) -> Option<Bytes> { self.token.clone() } pub fn get_mime_metadata(&self) -> &String { &self.mime_metadata } pub fn get_mime_data(&self) -> &String { &self.mime_data } pub fn get_metadata(&self) -> &Option<Bytes> { &self.metadata } pub fn get_data(&self) -> &Option<Bytes> { &self.data } pub fn split(self) -> (Option<Bytes>, Option<Bytes>) { (self.data, self.metadata) } } pub struct SetupBuilder { stream_id: u32, flag: u16, value: Setup, } impl SetupBuilder { fn new(stream_id: u32, flag: u16) -> SetupBuilder { SetupBuilder { stream_id, flag, value: Setup { version: Version::default(), keepalive: 30_000, lifetime: 90_000, token: None, mime_metadata: String::from(DEFAULT_MIME_TYPE), mime_data: String::from(DEFAULT_MIME_TYPE), metadata: None, data: None, }, } } pub fn build(self) -> Frame { Frame::new(self.stream_id, Body::Setup(self.value), self.flag) } pub fn set_data(mut self, bs: Bytes) -> Self { self.value.data = Some(bs); self } pub fn set_metadata(mut self, bs: Bytes) -> Self { self.flag |= FLAG_METADATA; self.value.metadata = Some(bs); self } pub fn set_version(mut self, major: u16, minor: u16) -> Self { self.value.version = Version::new(major, minor); self } pub fn set_keepalive(mut self, duration: Duration) -> Self { self.value.keepalive = duration.as_millis() as u32; self } pub fn set_lifetime(mut self, duration: Duration) -> Self { self.value.lifetime = duration.as_millis() as u32; self } pub fn set_token(mut self, token: Bytes) -> Self { self.value.token = Some(token); self.flag |= FLAG_RESUME; self } pub fn set_mime_metadata(mut self, mime: &str) -> Self { if mime.len() > 256 { panic!("maximum mime length is 256"); } self.value.mime_metadata = String::from(mime); self } pub fn set_mime_data(mut self, mime: &str) -> Self { if mime.len() > 256 { panic!("maximum mime length is 256"); } self.value.mime_data = String::from(mime); self } }
Ok(Setup { version: Version::new(major, minor), keepalive, lifetime, token, mime_metadata: String::from_utf8(mime_metadata.to_vec()).unwrap(), mime_data: String::from_utf8(mime_data.to_vec()).unwrap(), metadata, data, })
call_expression
[ { "content": "#[inline]\n\nfn to_frame_type(body: &Body) -> u16 {\n\n match body {\n\n Body::Setup(_) => TYPE_SETUP,\n\n Body::Lease(_) => TYPE_LEASE,\n\n Body::Keepalive(_) => TYPE_KEEPALIVE,\n\n Body::RequestResponse(_) => TYPE_REQUEST_RESPONSE,\n\n Body::RequestFNF(_) => TYPE_REQUEST_FNF,\n\n Body::RequestStream(_) => TYPE_REQUEST_STREAM,\n\n Body::RequestChannel(_) => TYPE_REQUEST_CHANNEL,\n\n Body::RequestN(_) => TYPE_REQUEST_N,\n\n Body::Cancel() => TYPE_CANCEL,\n\n Body::Payload(_) => TYPE_PAYLOAD,\n\n Body::Error(_) => TYPE_ERROR,\n\n Body::MetadataPush(_) => TYPE_METADATA_PUSH,\n\n Body::Resume(_) => TYPE_RESUME,\n\n Body::ResumeOK(_) => TYPE_RESUME_OK,\n\n }\n\n}\n", "file_path": "rsocket/src/frame/mod.rs", "rank": 0, "score": 113316.49496674543 }, { "content": "#[test]\n\nfn test_keepalive() {\n\n let f = Keepalive::builder(1234, FLAG_RESPOND)\n\n .set_last_received_position(123)\n\n .set_data(Bytes::from(\"foobar\"))\n\n .build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 1, "score": 100679.32348295943 }, { "content": "#[test]\n\nfn test_setup() {\n\n let f = Setup::builder(1234, 0)\n\n .set_mime_data(\"application/binary\")\n\n .set_mime_metadata(\"text/plain\")\n\n .set_token(Bytes::from(\"this_is_a_token\"))\n\n .set_data(Bytes::from(String::from(\"Hello World!\")))\n\n .set_metadata(Bytes::from(String::from(\"foobar\")))\n\n .build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 2, "score": 100397.91920861062 }, { "content": "#[test]\n\nfn test_metadata_push() {\n\n let f = MetadataPush::builder(1234, 0)\n\n .set_metadata(Bytes::from(\"Hello Rust!\"))\n\n .build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 3, "score": 97855.42945228599 }, { "content": "fn try_codec(f: Frame) {\n\n println!(\"******* codec: {:?}\", f);\n\n let mut bf = BytesMut::with_capacity(f.len() as usize);\n\n f.write_to(&mut bf);\n\n println!(\"####### encode: {}\", hex::encode(bf.to_vec()));\n\n let f2 = Frame::decode(&mut bf).unwrap();\n\n println!(\"####### decode: {:?}\", f2);\n\n assert_eq!(\n\n f, f2,\n\n \"frames doesn't match: expect={:?}, actual={:?}\",\n\n f, f2\n\n );\n\n}\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 4, "score": 88976.9179607121 }, { "content": "#[inline]\n\nfn translate(code: &u32) -> &str {\n\n match *code {\n\n ERR_APPLICATION => \"APPLICATION\",\n\n ERR_INVALID_SETUP => \"INVALID_SETUP\",\n\n ERR_UNSUPPORTED_SETUP => \"UNSUPPORTED_SETUP\",\n\n ERR_REJECT_SETUP => \"REJECT_SETUP\",\n\n ERR_REJECT_RESUME => \"REJECT_RESUME\",\n\n ERR_CONN_FAILED => \"CONN_FAILED\",\n\n ERR_CONN_CLOSED => \"CONN_CLOSED\",\n\n ERR_REJECTED => \"REJECTED\",\n\n ERR_CANCELED => \"CANCELED\",\n\n ERR_INVALID => \"INVALID\",\n\n _ => \"UNKNOWN\",\n\n }\n\n}\n", "file_path": "rsocket/src/errors.rs", "rank": 5, "score": 82361.2668137411 }, { "content": "pub trait Writeable {\n\n fn write_to(&self, bf: &mut BytesMut);\n\n fn len(&self) -> usize;\n\n fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n\n}\n\n\n\npub struct U24;\n\n\n\nimpl U24 {\n\n pub fn max() -> usize {\n\n 0x00FF_FFFF\n\n }\n\n\n\n pub fn write(n: u32, bf: &mut BytesMut) {\n\n bf.put_u8((0xFF & (n >> 16)) as u8);\n\n bf.put_u8((0xFF & (n >> 8)) as u8);\n\n bf.put_u8((0xFF & n) as u8);\n\n }\n", "file_path": "rsocket/src/utils.rs", "rank": 6, "score": 80844.76820799406 }, { "content": "#[test]\n\nfn routing_metadata_codec() {\n\n let m = RoutingMetadata::builder()\n\n .push_str(\"/orders\")\n\n .push_str(\"/orders/77778888\")\n\n .push_str(\"/users\")\n\n .push_str(\"/users/1234\")\n\n .build();\n\n\n\n let mut bf = BytesMut::new();\n\n m.write_to(&mut bf);\n\n println!(\"encode routing metadata: {}\", hex::encode(bf.to_vec()));\n\n let m2 = RoutingMetadata::decode(&mut bf).unwrap();\n\n let tags = m2.get_tags();\n\n for tag in tags {\n\n println!(\"decode tag: {}\", tag);\n\n }\n\n assert_eq!(4, tags.len());\n\n assert_eq!(m.get_tags(), tags);\n\n}\n", "file_path": "rsocket-test/tests/test_routing_metadata.rs", "rank": 7, "score": 78294.46168603006 }, { "content": "#[inline]\n\nfn on_setup_noop(\n\n _setup: SetupPayload,\n\n _socket: Box<dyn RSocket>,\n\n) -> Result<Box<dyn RSocket>, Box<dyn Error>> {\n\n Ok(Box::new(EmptyRSocket))\n\n}\n", "file_path": "rsocket/src/x/server.rs", "rank": 8, "score": 76725.60418159094 }, { "content": "#[test]\n\nfn encode_and_decode_composite_metadata() {\n\n let bingo = |metadatas: Vec<&Metadata>| {\n\n assert_eq!(2, metadatas.len());\n\n assert_eq!(\"text/plain\", metadatas[0].get_mime());\n\n assert_eq!(b\"Hello World!\", metadatas[0].get_payload().as_ref());\n\n assert_eq!(\"application/not_well\", metadatas[1].get_mime());\n\n assert_eq!(b\"Not Well!\", metadatas[1].get_payload().as_ref());\n\n };\n\n\n\n let cm = CompositeMetadata::builder()\n\n .push(\"text/plain\", b\"Hello World!\")\n\n .push(\"application/not_well\", \"Not Well!\")\n\n .build();\n\n bingo(cm.iter().collect());\n\n\n\n let mut bf = BytesMut::new();\n\n cm.write_to(&mut bf);\n\n let cm2 = CompositeMetadata::decode(&mut bf).unwrap();\n\n bingo(cm2.iter().collect());\n\n}\n", "file_path": "rsocket-test/tests/test_composite_metadata.rs", "rank": 9, "score": 76715.103290838 }, { "content": "#[test]\n\nfn test_payload() {\n\n let f = Payload::builder(1234, FLAG_NEXT | FLAG_COMPLETE)\n\n .set_data(Bytes::from(\"Hello World!\"))\n\n .set_metadata(Bytes::from(\"foobar\"))\n\n .build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 10, "score": 70566.29560966417 }, { "content": "#[test]\n\nfn test_request_n() {\n\n let f = RequestN::builder(1234, 0).set_n(77778888).build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 11, "score": 70566.29560966417 }, { "content": "#[test]\n\nfn test_error() {\n\n let f = Error::builder(1234, 0)\n\n .set_data(Bytes::from(\"Hello World!\"))\n\n .set_code(4444)\n\n .build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 12, "score": 70566.29560966417 }, { "content": "#[test]\n\nfn test_resume() {\n\n let f = Resume::builder(0, FLAG_RESUME)\n\n .set_last_received_server_position(123)\n\n .set_first_available_client_position(22)\n\n .set_token(Bytes::from(\"this is a token\"))\n\n .build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 13, "score": 70566.29560966417 }, { "content": "#[test]\n\nfn resume_ok() {\n\n let f = ResumeOK::builder(1234, 0).set_position(2333).build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 14, "score": 70566.29560966417 }, { "content": "#[test]\n\nfn test_lease() {\n\n let f = Lease::builder(1234, 0)\n\n .set_metadata(Bytes::from(\"Hello Rust!\"))\n\n .set_number_of_requests(333)\n\n .set_ttl(1000)\n\n .build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 15, "score": 70566.29560966417 }, { "content": "#[test]\n\nfn test_cancel() {\n\n let f = Cancel::builder(1234, 0).build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 16, "score": 70566.29560966417 }, { "content": "#[test]\n\nfn test_request_channel() {\n\n let f = RequestChannel::builder(1234, 0)\n\n .set_initial_request_n(1)\n\n .set_data(Bytes::from(\"Hello World!\"))\n\n .set_metadata(Bytes::from(\"foobar\"))\n\n .build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 17, "score": 68739.53167424379 }, { "content": "#[test]\n\nfn test_request_fnf() {\n\n let f = RequestFNF::builder(1234, 0)\n\n .set_data(Bytes::from(\"Hello\"))\n\n .set_metadata(Bytes::from(\"World\"))\n\n .build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 18, "score": 68739.53167424379 }, { "content": "#[test]\n\nfn test_request_response() {\n\n let f = RequestResponse::builder(1234, 0)\n\n .set_data(Bytes::from(\"Hello World\"))\n\n .set_metadata(Bytes::from(\"Foobar\"))\n\n .build();\n\n try_codec(f);\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_frames.rs", "rank": 19, "score": 68739.53167424379 }, { "content": "#[inline]\n\nfn read_binary(value: JsValue, incoming: Tx<Frame>) {\n\n let reader = FileReader::new().unwrap_throw();\n\n\n\n let state = Rc::new(RefCell::new(None));\n\n\n\n let onload = {\n\n let state = state.clone();\n\n let reader = reader.clone();\n\n\n\n Closure::once(move |_: ProgressEvent| {\n\n *state.borrow_mut() = None;\n\n let data: ArrayBuffer = reader.result().unwrap_throw().unchecked_into();\n\n let raw: Vec<u8> = Uint8Array::new(&data).to_vec();\n\n // Use data...\n\n let mut bf = BytesMut::from(&raw[..]);\n\n let msg = Frame::decode(&mut bf).unwrap();\n\n incoming.unbounded_send(msg).unwrap();\n\n })\n\n };\n\n\n", "file_path": "rsocket-transport-wasm/src/client.rs", "rank": 20, "score": 66619.52570325794 }, { "content": "use crate::utils::Writeable;\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct Version {\n\n major: u16,\n\n minor: u16,\n\n}\n\n\n\nimpl Default for Version {\n\n fn default() -> Version {\n\n Version { major: 1, minor: 0 }\n\n }\n\n}\n\n\n\nimpl Writeable for Version {\n\n fn write_to(&self, bf: &mut BytesMut) {\n\n bf.put_u16(self.major);\n\n bf.put_u16(self.minor);\n\n }\n", "file_path": "rsocket/src/frame/version.rs", "rank": 21, "score": 64421.396058711565 }, { "content": " fn len(&self) -> usize {\n\n 4\n\n }\n\n}\n\n\n\nimpl Version {\n\n pub fn new(major: u16, minor: u16) -> Version {\n\n Version { major, minor }\n\n }\n\n\n\n pub fn get_major(self) -> u16 {\n\n self.major\n\n }\n\n\n\n pub fn get_minor(self) -> u16 {\n\n self.minor\n\n }\n\n}\n", "file_path": "rsocket/src/frame/version.rs", "rank": 22, "score": 64408.99975758479 }, { "content": "use super::{Body, Frame};\n\nuse crate::utils::{RSocketResult, Writeable};\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Keepalive {\n\n last_received_position: u64,\n\n data: Option<Bytes>,\n\n}\n\n\n\npub struct KeepaliveBuilder {\n\n stream_id: u32,\n\n flag: u16,\n\n keepalive: Keepalive,\n\n}\n\n\n\nimpl KeepaliveBuilder {\n\n fn new(stream_id: u32, flag: u16) -> KeepaliveBuilder {\n\n KeepaliveBuilder {\n\n stream_id,\n", "file_path": "rsocket/src/frame/keepalive.rs", "rank": 23, "score": 64322.32189807603 }, { "content": " }\n\n}\n\n\n\nimpl Keepalive {\n\n pub fn decode(flag: u16, bf: &mut BytesMut) -> RSocketResult<Keepalive> {\n\n let position = bf.get_u64();\n\n let mut d: Option<Bytes> = None;\n\n if !bf.is_empty() {\n\n d = Some(bf.to_bytes());\n\n }\n\n Ok(Keepalive {\n\n last_received_position: position,\n\n data: d,\n\n })\n\n }\n\n\n\n pub fn builder(stream_id: u32, flag: u16) -> KeepaliveBuilder {\n\n KeepaliveBuilder::new(stream_id, flag)\n\n }\n\n\n", "file_path": "rsocket/src/frame/keepalive.rs", "rank": 24, "score": 64321.41348858209 }, { "content": " pub fn get_last_received_position(&self) -> u64 {\n\n self.last_received_position\n\n }\n\n\n\n pub fn get_data(&self) -> &Option<Bytes> {\n\n &self.data\n\n }\n\n\n\n pub fn split(self) -> (Option<Bytes>, Option<Bytes>) {\n\n (self.data, None)\n\n }\n\n}\n\n\n\nimpl Writeable for Keepalive {\n\n fn write_to(&self, bf: &mut BytesMut) {\n\n bf.put_u64(self.last_received_position);\n\n match &self.data {\n\n Some(v) => bf.put(v.bytes()),\n\n None => (),\n\n }\n", "file_path": "rsocket/src/frame/keepalive.rs", "rank": 25, "score": 64318.47235928565 }, { "content": " flag,\n\n keepalive: Keepalive {\n\n last_received_position: 0,\n\n data: None,\n\n },\n\n }\n\n }\n\n\n\n pub fn set_data(mut self, data: Bytes) -> Self {\n\n self.keepalive.data = Some(data);\n\n self\n\n }\n\n\n\n pub fn set_last_received_position(mut self, position: u64) -> Self {\n\n self.keepalive.last_received_position = position;\n\n self\n\n }\n\n\n\n pub fn build(self) -> Frame {\n\n Frame::new(self.stream_id, Body::Keepalive(self.keepalive), self.flag)\n", "file_path": "rsocket/src/frame/keepalive.rs", "rank": 26, "score": 64313.045666915605 }, { "content": " }\n\n\n\n fn len(&self) -> usize {\n\n 8 + match &self.data {\n\n Some(v) => v.len(),\n\n None => 0,\n\n }\n\n }\n\n}\n", "file_path": "rsocket/src/frame/keepalive.rs", "rank": 27, "score": 64300.5206488281 }, { "content": " pub fn builder(stream_id: u32, flag: u16) -> MetadataPushBuiler {\n\n MetadataPushBuiler::new(stream_id, flag)\n\n }\n\n\n\n pub fn get_metadata(&self) -> &Option<Bytes> {\n\n &self.metadata\n\n }\n\n\n\n pub fn split(self) -> (Option<Bytes>, Option<Bytes>) {\n\n (None, self.metadata)\n\n }\n\n}\n\n\n\nimpl Writeable for MetadataPush {\n\n fn write_to(&self, bf: &mut BytesMut) {\n\n match &self.metadata {\n\n Some(v) => bf.put(v.bytes()),\n\n None => (),\n\n }\n\n }\n\n\n\n fn len(&self) -> usize {\n\n match &self.metadata {\n\n Some(v) => v.len(),\n\n None => 0,\n\n }\n\n }\n\n}\n", "file_path": "rsocket/src/frame/metadata_push.rs", "rank": 38, "score": 62025.36345470279 }, { "content": "use super::{Body, Frame};\n\nuse crate::utils::{RSocketResult, Writeable};\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct MetadataPush {\n\n metadata: Option<Bytes>,\n\n}\n\n\n\npub struct MetadataPushBuiler {\n\n stream_id: u32,\n\n flag: u16,\n\n value: MetadataPush,\n\n}\n\n\n\nimpl MetadataPushBuiler {\n\n fn new(stream_id: u32, flag: u16) -> MetadataPushBuiler {\n\n MetadataPushBuiler {\n\n stream_id,\n\n flag,\n", "file_path": "rsocket/src/frame/metadata_push.rs", "rank": 39, "score": 62018.613149814824 }, { "content": " value: MetadataPush { metadata: None },\n\n }\n\n }\n\n\n\n pub fn set_metadata(mut self, metadata: Bytes) -> Self {\n\n self.value.metadata = Some(metadata);\n\n self\n\n }\n\n\n\n pub fn build(self) -> Frame {\n\n Frame::new(self.stream_id, Body::MetadataPush(self.value), self.flag)\n\n }\n\n}\n\n\n\nimpl MetadataPush {\n\n pub fn decode(flag: u16, bf: &mut BytesMut) -> RSocketResult<MetadataPush> {\n\n let m = Bytes::from(bf.to_vec());\n\n Ok(MetadataPush { metadata: Some(m) })\n\n }\n\n\n", "file_path": "rsocket/src/frame/metadata_push.rs", "rank": 40, "score": 62016.85161297839 }, { "content": "#[derive(Clone)]\n\nstruct Responder {\n\n inner: Arc<RwLock<Box<dyn RSocket>>>,\n\n}\n\n\n", "file_path": "rsocket/src/transport/socket.rs", "rank": 41, "score": 51684.52581966804 }, { "content": "type FnStart = fn();\n\n\n\npub struct ServerBuilder<T, C>\n\nwhere\n\n T: Send + Sync + ServerTransport<Item = C>,\n\n C: Send + Sync + ClientTransport,\n\n{\n\n transport: Option<T>,\n\n on_setup: FnAcceptorWithSetup,\n\n start_handler: Option<FnStart>,\n\n}\n\n\n\nimpl<T, C> ServerBuilder<T, C>\n\nwhere\n\n T: Send + Sync + ServerTransport<Item = C> + 'static,\n\n C: Send + Sync + ClientTransport + 'static,\n\n{\n\n pub(crate) fn new() -> ServerBuilder<T, C> {\n\n ServerBuilder {\n\n transport: None,\n", "file_path": "rsocket/src/x/server.rs", "rank": 42, "score": 50695.38560862335 }, { "content": "pub trait Spawner {\n\n fn spawn<F>(&self, task: F)\n\n where\n\n F: Send + Future<Output = ()> + 'static;\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct DefaultSpawner;\n\n\n\nimpl Spawner for DefaultSpawner {\n\n fn spawn<F>(&self, task: F)\n\n where\n\n F: Send + Future<Output = ()> + 'static,\n\n {\n\n tokio::spawn(task);\n\n }\n\n}\n", "file_path": "rsocket/src/runtime/mod.rs", "rank": 43, "score": 47408.568261870394 }, { "content": "pub trait ClientTransport {\n\n fn attach(self, incoming: Tx<Frame>, sending: Rx<Frame>) -> SafeFuture<BoxResult<()>>;\n\n}\n\n\n", "file_path": "rsocket/src/transport/spi.rs", "rank": 44, "score": 46387.13268151024 }, { "content": "pub trait ServerTransport {\n\n type Item;\n\n fn start(\n\n self,\n\n starter: Option<fn()>,\n\n acceptor: impl Fn(Self::Item) + Send + Sync + 'static,\n\n ) -> SafeFuture<BoxResult<()>>\n\n where\n\n Self::Item: ClientTransport + Sized;\n\n}\n\n\n\npub type FnAcceptorWithSetup =\n\n fn(SetupPayload, Box<dyn RSocket>) -> Result<Box<dyn RSocket>, Box<dyn Error>>;\n\n\n\npub(crate) enum Acceptor {\n\n Simple(Arc<fn() -> Box<dyn RSocket>>),\n\n Generate(Arc<FnAcceptorWithSetup>),\n\n Empty(),\n\n}\n", "file_path": "rsocket/src/transport/spi.rs", "rank": 45, "score": 46387.13268151024 }, { "content": "fn init() {\n\n let _ = env_logger::builder()\n\n .format_timestamp_millis()\n\n .is_test(true)\n\n .try_init();\n\n}\n\n\n", "file_path": "rsocket-test/tests/test_clients.rs", "rank": 46, "score": 43723.4232471763 }, { "content": "#[test]\n\nfn test_tcp() {\n\n init();\n\n\n\n let addr = \"127.0.0.1:7878\";\n\n\n\n let server_runtime = Runtime::new().unwrap();\n\n\n\n // spawn a server\n\n server_runtime.spawn(async move {\n\n RSocketFactory::receive()\n\n .transport(TcpServerTransport::from(addr))\n\n .acceptor(|setup, _socket| {\n\n info!(\"accept setup: {:?}\", setup);\n\n Ok(Box::new(EchoRSocket))\n\n })\n\n .on_start(|| info!(\"+++++++ tcp echo server started! +++++++\"))\n\n .serve()\n\n .await\n\n });\n\n\n", "file_path": "rsocket-test/tests/test_clients.rs", "rank": 47, "score": 42716.80846612124 }, { "content": "#[test]\n\nfn test_websocket() {\n\n init();\n\n\n\n let addr = \"127.0.0.1:8080\";\n\n\n\n let server_runtime = Runtime::new().unwrap();\n\n\n\n // spawn a server\n\n server_runtime.spawn(async move {\n\n RSocketFactory::receive()\n\n .transport(WebsocketServerTransport::from(addr))\n\n .acceptor(|setup, _socket| {\n\n info!(\"accept setup: {:?}\", setup);\n\n Ok(Box::new(EchoRSocket))\n\n })\n\n .on_start(|| info!(\"+++++++ websocket echo server started! +++++++\"))\n\n .serve()\n\n .await\n\n });\n\n\n", "file_path": "rsocket-test/tests/test_clients.rs", "rank": 48, "score": 42716.80846612124 }, { "content": "#[test]\n\nfn test_wellknown() {\n\n let got = WellKnownMIME::from(\"application/json\");\n\n assert_eq!(WellKnownMIME::ApplicationJson, got);\n\n WellKnownMIME::foreach(|m| {\n\n let mut result = WellKnownMIME::from(m.raw());\n\n assert_eq!(m, &result);\n\n result = WellKnownMIME::from(format!(\"{}\", m));\n\n assert_eq!(m, &result);\n\n });\n\n}\n", "file_path": "rsocket-test/tests/test_mimes.rs", "rank": 49, "score": 42716.80846612124 }, { "content": "pub trait RSocket: Sync + Send {\n\n fn metadata_push(&self, req: Payload) -> Mono<()>;\n\n fn fire_and_forget(&self, req: Payload) -> Mono<()>;\n\n fn request_response(&self, req: Payload) -> Mono<Result<Payload, RSocketError>>;\n\n fn request_stream(&self, req: Payload) -> Flux<Result<Payload, RSocketError>>;\n\n fn request_channel(\n\n &self,\n\n reqs: Flux<Result<Payload, RSocketError>>,\n\n ) -> Flux<Result<Payload, RSocketError>>;\n\n}\n\n\n\npub struct EchoRSocket;\n\n\n\nimpl RSocket for EchoRSocket {\n\n fn metadata_push(&self, req: Payload) -> Mono<()> {\n\n info!(\"{:?}\", req);\n\n Box::pin(async {})\n\n }\n\n\n\n fn fire_and_forget(&self, req: Payload) -> Mono<()> {\n", "file_path": "rsocket/src/spi.rs", "rank": 50, "score": 42109.33640024303 }, { "content": "\n\nimpl SetupPayload {\n\n pub fn metadata(&self) -> &Option<Bytes> {\n\n &self.m\n\n }\n\n\n\n pub fn data(&self) -> &Option<Bytes> {\n\n &self.d\n\n }\n\n\n\n pub fn split(self) -> (Option<Bytes>, Option<Bytes>) {\n\n (self.d, self.m)\n\n }\n\n\n\n pub fn keepalive_interval(&self) -> Duration {\n\n self.keepalive.0\n\n }\n\n\n\n pub fn keepalive_lifetime(&self) -> Duration {\n\n self.keepalive.1\n", "file_path": "rsocket/src/payload/setup.rs", "rank": 51, "score": 33113.62275446546 }, { "content": " pub fn builder() -> SetupPayloadBuilder {\n\n SetupPayloadBuilder::new()\n\n }\n\n}\n\n\n\nimpl SetupPayloadBuilder {\n\n fn new() -> SetupPayloadBuilder {\n\n SetupPayloadBuilder {\n\n inner: SetupPayload {\n\n m: None,\n\n d: None,\n\n keepalive: (Duration::from_secs(20), Duration::from_secs(90)),\n\n mime_m: Some(String::from(DEFAULT_MIME_TYPE)),\n\n mime_d: Some(String::from(DEFAULT_MIME_TYPE)),\n\n },\n\n }\n\n }\n\n\n\n pub fn set_metadata(mut self, metadata: Bytes) -> Self {\n\n self.inner.m = Some(metadata);\n", "file_path": "rsocket/src/payload/setup.rs", "rank": 52, "score": 33113.518602798635 }, { "content": "use crate::frame::Setup;\n\nuse crate::utils::DEFAULT_MIME_TYPE;\n\nuse bytes::Bytes;\n\nuse std::time::Duration;\n\n\n\n#[derive(Debug)]\n\npub struct SetupPayload {\n\n m: Option<Bytes>,\n\n d: Option<Bytes>,\n\n keepalive: (Duration, Duration),\n\n mime_m: Option<String>,\n\n mime_d: Option<String>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct SetupPayloadBuilder {\n\n inner: SetupPayload,\n\n}\n\n\n\nimpl SetupPayload {\n", "file_path": "rsocket/src/payload/setup.rs", "rank": 53, "score": 33112.389422954235 }, { "content": " }\n\n\n\n pub fn metadata_mime_type(&self) -> &Option<String> {\n\n &self.mime_m\n\n }\n\n\n\n pub fn data_mime_type(&self) -> &Option<String> {\n\n &self.mime_d\n\n }\n\n}\n\n\n\nimpl From<Setup> for SetupPayload {\n\n fn from(input: Setup) -> SetupPayload {\n\n let mut bu = SetupPayload::builder();\n\n // TODO: fill other properties.\n\n bu = bu.set_data_mime_type(input.get_mime_data());\n\n bu = bu.set_metadata_mime_type(input.get_mime_metadata());\n\n // bu.set_data_mime_type(String::input.get_mime_data());\n\n let ka = (input.get_keepalive(), input.get_lifetime());\n\n let (d, m) = input.split();\n", "file_path": "rsocket/src/payload/setup.rs", "rank": 54, "score": 33112.24555470488 }, { "content": " self\n\n }\n\n\n\n pub fn set_metadata_utf8(self, metadata: &str) -> Self {\n\n self.set_metadata(Bytes::from(String::from(metadata)))\n\n }\n\n\n\n pub fn set_data(mut self, data: Bytes) -> Self {\n\n self.inner.d = Some(data);\n\n self\n\n }\n\n\n\n pub fn set_data_utf8(self, data: &str) -> Self {\n\n self.set_data(Bytes::from(String::from(data)))\n\n }\n\n\n\n pub fn set_keepalive(\n\n mut self,\n\n tick_period: Duration,\n\n ack_timeout: Duration,\n", "file_path": "rsocket/src/payload/setup.rs", "rank": 55, "score": 33111.31550866148 }, { "content": " missed_acks: u64,\n\n ) -> Self {\n\n let lifetime_mills = (ack_timeout.as_millis() as u64) * missed_acks;\n\n self.inner.keepalive = (tick_period, Duration::from_millis(lifetime_mills));\n\n self\n\n }\n\n\n\n pub fn set_data_mime_type(mut self, mime: &str) -> Self {\n\n self.inner.mime_d = Some(String::from(mime));\n\n self\n\n }\n\n pub fn set_metadata_mime_type(mut self, mime: &str) -> Self {\n\n self.inner.mime_m = Some(String::from(mime));\n\n self\n\n }\n\n\n\n pub fn build(self) -> SetupPayload {\n\n self.inner\n\n }\n\n}\n", "file_path": "rsocket/src/payload/setup.rs", "rank": 56, "score": 33108.99853980372 }, { "content": " if let Some(b) = d {\n\n bu = bu.set_data(b);\n\n }\n\n if let Some(b) = m {\n\n bu = bu.set_metadata(b);\n\n }\n\n let mut pa = bu.build();\n\n pa.keepalive = ka;\n\n pa\n\n }\n\n}\n", "file_path": "rsocket/src/payload/setup.rs", "rank": 57, "score": 33098.78813580361 }, { "content": "use bytes::{Buf, BufMut, Bytes, BytesMut};\n\nuse rsocket_rust::extension::{CompositeMetadata, Metadata};\n\nuse rsocket_rust::utils::Writeable;\n\n\n\n#[test]\n", "file_path": "rsocket-test/tests/test_composite_metadata.rs", "rank": 58, "score": 31108.8388750378 }, { "content": "extern crate rsocket_rust;\n\n\n\nuse bytes::BytesMut;\n\nuse rsocket_rust::extension::RoutingMetadata;\n\nuse rsocket_rust::utils::Writeable;\n\n\n\n#[test]\n", "file_path": "rsocket-test/tests/test_routing_metadata.rs", "rank": 59, "score": 31107.18950180876 }, { "content": " match self.get_data() {\n\n Some(b) => String::from_utf8(b.to_vec()).unwrap(),\n\n None => String::from(\"\"),\n\n }\n\n }\n\n\n\n pub fn get_data(&self) -> &Option<Bytes> {\n\n &self.data\n\n }\n\n\n\n pub fn get_code(&self) -> u32 {\n\n self.code\n\n }\n\n}\n\n\n\nimpl Writeable for Error {\n\n fn write_to(&self, bf: &mut BytesMut) {\n\n bf.put_u32(self.code);\n\n match &self.data {\n\n Some(v) => bf.put(v.bytes()),\n", "file_path": "rsocket/src/frame/error.rs", "rank": 60, "score": 30925.433362747925 }, { "content": " Frame::new(self.stream_id, Body::Error(self.value), self.flag)\n\n }\n\n}\n\n\n\nimpl Error {\n\n pub fn decode(flag: u16, bf: &mut BytesMut) -> RSocketResult<Error> {\n\n let code = bf.get_u32();\n\n let d: Option<Bytes> = if !bf.is_empty() {\n\n Some(bf.to_bytes())\n\n } else {\n\n None\n\n };\n\n Ok(Error { code, data: d })\n\n }\n\n\n\n pub fn builder(stream_id: u32, flag: u16) -> ErrorBuilder {\n\n ErrorBuilder::new(stream_id, flag)\n\n }\n\n\n\n pub fn get_data_utf8(&self) -> String {\n", "file_path": "rsocket/src/frame/error.rs", "rank": 61, "score": 30924.444244540304 }, { "content": "use super::FLAG_METADATA;\n\nuse crate::utils::U24;\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\npub(crate) struct PayloadSupport {}\n\n\n\nimpl PayloadSupport {\n\n pub fn len(metadata: &Option<Bytes>, data: &Option<Bytes>) -> usize {\n\n let a = match metadata {\n\n Some(v) => 3 + v.len(),\n\n None => 0,\n\n };\n\n let b = match data {\n\n Some(v) => v.len(),\n\n None => 0,\n\n };\n\n a + b\n\n }\n\n\n\n // TODO: change to Result\n", "file_path": "rsocket/src/frame/utils.rs", "rank": 62, "score": 30924.010401125666 }, { "content": " pub fn read(flag: u16, bf: &mut BytesMut) -> (Option<Bytes>, Option<Bytes>) {\n\n let m: Option<Bytes> = if flag & FLAG_METADATA != 0 {\n\n let n = U24::read_advance(bf);\n\n Some(bf.split_to(n as usize).to_bytes())\n\n } else {\n\n None\n\n };\n\n let d: Option<Bytes> = if bf.is_empty() {\n\n None\n\n } else {\n\n Some(Bytes::from(bf.to_vec()))\n\n };\n\n (m, d)\n\n }\n\n\n\n pub fn write(bf: &mut BytesMut, metadata: &Option<Bytes>, data: &Option<Bytes>) {\n\n if let Some(v) = metadata {\n\n let n = v.len() as u32;\n\n U24::write(n, bf);\n\n bf.put(v.bytes());\n\n }\n\n if let Some(v) = data {\n\n bf.put(v.bytes())\n\n }\n\n }\n\n}\n", "file_path": "rsocket/src/frame/utils.rs", "rank": 63, "score": 30923.838445197533 }, { "content": " pub fn builder(stream_id: u32, flag: u16) -> RequestNBuilder {\n\n RequestNBuilder::new(stream_id, flag)\n\n }\n\n\n\n pub fn get_n(&self) -> u32 {\n\n self.n\n\n }\n\n}\n\n\n\nimpl Writeable for RequestN {\n\n fn write_to(&self, bf: &mut BytesMut) {\n\n bf.put_u32(self.get_n())\n\n }\n\n\n\n fn len(&self) -> usize {\n\n 4\n\n }\n\n}\n", "file_path": "rsocket/src/frame/request_n.rs", "rank": 64, "score": 30923.649311592253 }, { "content": "use super::{Body, Frame, PayloadSupport, FLAG_METADATA};\n\nuse crate::utils::{RSocketResult, Writeable};\n\nuse bytes::{BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Payload {\n\n metadata: Option<Bytes>,\n\n data: Option<Bytes>,\n\n}\n\n\n\npub struct PayloadBuilder {\n\n stream_id: u32,\n\n flag: u16,\n\n value: Payload,\n\n}\n\n\n\nimpl PayloadBuilder {\n\n fn new(stream_id: u32, flag: u16) -> PayloadBuilder {\n\n PayloadBuilder {\n\n stream_id,\n", "file_path": "rsocket/src/frame/payload.rs", "rank": 65, "score": 30923.24576132232 }, { "content": "\n\n pub fn set_number_of_requests(mut self, n: u32) -> Self {\n\n self.value.number_of_requests = n;\n\n self\n\n }\n\n\n\n pub fn build(self) -> Frame {\n\n Frame::new(self.stream_id, Body::Lease(self.value), self.flag)\n\n }\n\n}\n\n\n\nimpl Lease {\n\n pub fn decode(flag: u16, bf: &mut BytesMut) -> RSocketResult<Lease> {\n\n let ttl = bf.get_u32();\n\n let n = bf.get_u32();\n\n let m = if flag & FLAG_METADATA != 0 {\n\n Some(bf.to_bytes())\n\n } else {\n\n None\n\n };\n", "file_path": "rsocket/src/frame/lease.rs", "rank": 66, "score": 30923.06718018626 }, { "content": " self.ttl\n\n }\n\n}\n\n\n\nimpl Writeable for Lease {\n\n fn write_to(&self, bf: &mut BytesMut) {\n\n bf.put_u32(self.ttl);\n\n bf.put_u32(self.number_of_requests);\n\n match &self.metadata {\n\n Some(v) => bf.put(v.bytes()),\n\n None => (),\n\n }\n\n }\n\n\n\n fn len(&self) -> usize {\n\n 8 + match &self.metadata {\n\n Some(v) => v.len(),\n\n None => 0,\n\n }\n\n }\n\n}\n", "file_path": "rsocket/src/frame/lease.rs", "rank": 67, "score": 30922.71555297238 }, { "content": "use super::{Body, Frame, REQUEST_MAX};\n\nuse crate::utils::{RSocketResult, Writeable};\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct RequestN {\n\n n: u32,\n\n}\n\n\n\npub struct RequestNBuilder {\n\n stream_id: u32,\n\n flag: u16,\n\n value: RequestN,\n\n}\n\n\n\nimpl RequestNBuilder {\n\n fn new(stream_id: u32, flag: u16) -> RequestNBuilder {\n\n RequestNBuilder {\n\n stream_id,\n\n flag,\n", "file_path": "rsocket/src/frame/request_n.rs", "rank": 68, "score": 30922.279950609096 }, { "content": "use super::{Body, Frame, FLAG_METADATA};\n\nuse crate::utils::{RSocketResult, Writeable};\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Lease {\n\n ttl: u32,\n\n number_of_requests: u32,\n\n metadata: Option<Bytes>,\n\n}\n\n\n\npub struct LeaseBuilder {\n\n stream_id: u32,\n\n flag: u16,\n\n value: Lease,\n\n}\n\n\n\nimpl LeaseBuilder {\n\n fn new(stream_id: u32, flag: u16) -> LeaseBuilder {\n\n LeaseBuilder {\n", "file_path": "rsocket/src/frame/lease.rs", "rank": 69, "score": 30921.70819185024 }, { "content": "use super::{Body, Frame, Version};\n\nuse crate::utils::{RSocketResult, Writeable};\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Resume {\n\n version: Version,\n\n token: Option<Bytes>,\n\n last_received_server_position: u64,\n\n first_available_client_position: u64,\n\n}\n\n\n\npub struct ResumeBuilder {\n\n stream_id: u32,\n\n flag: u16,\n\n inner: Resume,\n\n}\n\n\n\nimpl Resume {\n\n fn new() -> Resume {\n", "file_path": "rsocket/src/frame/resume.rs", "rank": 70, "score": 30921.586135530295 }, { "content": " Resume(Resume),\n\n ResumeOK(ResumeOK),\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Frame {\n\n stream_id: u32,\n\n body: Body,\n\n flag: u16,\n\n}\n\n\n\nimpl Writeable for Frame {\n\n fn write_to(&self, bf: &mut BytesMut) {\n\n bf.put_u32(self.stream_id);\n\n bf.put_u16((to_frame_type(&self.body) << 10) | self.flag);\n\n match &self.body {\n\n Body::Setup(v) => v.write_to(bf),\n\n Body::RequestResponse(v) => v.write_to(bf),\n\n Body::RequestStream(v) => v.write_to(bf),\n\n Body::RequestChannel(v) => v.write_to(bf),\n", "file_path": "rsocket/src/frame/mod.rs", "rank": 71, "score": 30921.55294871707 }, { "content": " Resume {\n\n version: Version::default(),\n\n token: None,\n\n last_received_server_position: 0,\n\n first_available_client_position: 0,\n\n }\n\n }\n\n\n\n pub fn decode(flag: u16, b: &mut BytesMut) -> RSocketResult<Resume> {\n\n let major = b.get_u16();\n\n let minor = b.get_u16();\n\n let token_size = b.get_u16();\n\n let token = if token_size > 0 {\n\n Some(b.split_to(token_size as usize).to_bytes())\n\n } else {\n\n None\n\n };\n\n let p1 = b.get_u64();\n\n let p2 = b.get_u64();\n\n Ok(Resume {\n", "file_path": "rsocket/src/frame/resume.rs", "rank": 72, "score": 30921.32512201738 }, { "content": " Frame::new(self.stream_id, Body::Payload(self.value), self.flag)\n\n }\n\n}\n\n\n\nimpl Payload {\n\n pub fn decode(flag: u16, bf: &mut BytesMut) -> RSocketResult<Payload> {\n\n let (m, d) = PayloadSupport::read(flag, bf);\n\n Ok(Payload {\n\n metadata: m,\n\n data: d,\n\n })\n\n }\n\n\n\n pub fn builder(stream_id: u32, flag: u16) -> PayloadBuilder {\n\n PayloadBuilder::new(stream_id, flag)\n\n }\n\n\n\n pub fn get_metadata(&self) -> &Option<Bytes> {\n\n &self.metadata\n\n }\n", "file_path": "rsocket/src/frame/payload.rs", "rank": 73, "score": 30921.22930317415 }, { "content": "use super::{Body, Frame};\n\nuse crate::utils::{RSocketResult, Writeable};\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\nuse std::fmt;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Error {\n\n code: u32,\n\n data: Option<Bytes>,\n\n}\n\n\n\npub struct ErrorBuilder {\n\n stream_id: u32,\n\n flag: u16,\n\n value: Error,\n\n}\n\n\n\nimpl ErrorBuilder {\n\n fn new(stream_id: u32, flag: u16) -> ErrorBuilder {\n\n ErrorBuilder {\n", "file_path": "rsocket/src/frame/error.rs", "rank": 74, "score": 30920.81021047401 }, { "content": " value: RequestN { n: REQUEST_MAX },\n\n }\n\n }\n\n\n\n pub fn set_n(mut self, n: u32) -> Self {\n\n self.value.n = n;\n\n self\n\n }\n\n\n\n pub fn build(self) -> Frame {\n\n Frame::new(self.stream_id, Body::RequestN(self.value), self.flag)\n\n }\n\n}\n\n\n\nimpl RequestN {\n\n pub fn decode(flag: u16, bf: &mut BytesMut) -> RSocketResult<RequestN> {\n\n let n = bf.get_u32();\n\n Ok(RequestN { n })\n\n }\n\n\n", "file_path": "rsocket/src/frame/request_n.rs", "rank": 75, "score": 30920.375536469317 }, { "content": "\n\n pub fn get_data(&self) -> &Option<Bytes> {\n\n &self.data\n\n }\n\n\n\n pub fn split(self) -> (Option<Bytes>, Option<Bytes>) {\n\n (self.data, self.metadata)\n\n }\n\n}\n\n\n\nimpl Writeable for Payload {\n\n fn write_to(&self, bf: &mut BytesMut) {\n\n PayloadSupport::write(bf, self.get_metadata(), self.get_data());\n\n }\n\n\n\n fn len(&self) -> usize {\n\n PayloadSupport::len(self.get_metadata(), self.get_data())\n\n }\n\n}\n", "file_path": "rsocket/src/frame/payload.rs", "rank": 76, "score": 30917.922297048455 }, { "content": " flag,\n\n value: Payload {\n\n metadata: None,\n\n data: None,\n\n },\n\n }\n\n }\n\n\n\n pub fn set_data(mut self, data: Bytes) -> Self {\n\n self.value.data = Some(data);\n\n self\n\n }\n\n\n\n pub fn set_metadata(mut self, metadata: Bytes) -> Self {\n\n self.value.metadata = Some(metadata);\n\n self.flag |= FLAG_METADATA;\n\n self\n\n }\n\n\n\n pub fn build(self) -> Frame {\n", "file_path": "rsocket/src/frame/payload.rs", "rank": 77, "score": 30917.495719633123 }, { "content": "\n\nimpl Writeable for Resume {\n\n fn write_to(&self, bf: &mut BytesMut) {\n\n self.version.write_to(bf);\n\n if let Some(b) = self.get_token() {\n\n bf.put_u16(b.len() as u16);\n\n bf.put(b.bytes());\n\n }\n\n bf.put_u64(self.get_last_received_server_position());\n\n bf.put_u64(self.get_first_available_client_position());\n\n }\n\n\n\n fn len(&self) -> usize {\n\n let mut size: usize = 22;\n\n if let Some(b) = self.get_token() {\n\n size += b.len();\n\n }\n\n size\n\n }\n\n}\n", "file_path": "rsocket/src/frame/resume.rs", "rank": 78, "score": 30916.93919009099 }, { "content": "\n\npub use cancel::Cancel;\n\npub use error::Error;\n\npub use keepalive::Keepalive;\n\npub use lease::Lease;\n\npub use metadata_push::MetadataPush;\n\npub use payload::Payload;\n\npub use request_channel::RequestChannel;\n\npub use request_fnf::RequestFNF;\n\npub use request_n::RequestN;\n\npub use request_response::RequestResponse;\n\npub use request_stream::RequestStream;\n\npub use resume::Resume;\n\npub use resume_ok::ResumeOK;\n\npub use setup::{Setup, SetupBuilder};\n\npub use utils::*;\n\npub use version::Version;\n\n\n\npub const FLAG_NEXT: u16 = 0x01 << 5;\n\npub const FLAG_COMPLETE: u16 = 0x01 << 6;\n", "file_path": "rsocket/src/frame/mod.rs", "rank": 79, "score": 30915.9702958215 }, { "content": " version: Version::new(major, minor),\n\n token,\n\n last_received_server_position: p1,\n\n first_available_client_position: p2,\n\n })\n\n }\n\n\n\n pub fn builder(stream_id: u32, flag: u16) -> ResumeBuilder {\n\n ResumeBuilder::new(stream_id, flag)\n\n }\n\n\n\n pub fn get_version(&self) -> Version {\n\n self.version\n\n }\n\n\n\n pub fn get_token(&self) -> &Option<Bytes> {\n\n &self.token\n\n }\n\n\n\n pub fn get_last_received_server_position(&self) -> u64 {\n", "file_path": "rsocket/src/frame/resume.rs", "rank": 80, "score": 30915.62219687552 }, { "content": " flag,\n\n }\n\n }\n\n\n\n pub fn decode(b: &mut BytesMut) -> RSocketResult<Frame> {\n\n // TODO: check size\n\n let sid = b.get_u32();\n\n let n = b.get_u16();\n\n let (flag, kind) = (n & 0x03FF, (n & 0xFC00) >> 10);\n\n let body = match kind {\n\n TYPE_SETUP => Setup::decode(flag, b).map(Body::Setup),\n\n TYPE_REQUEST_RESPONSE => RequestResponse::decode(flag, b).map(Body::RequestResponse),\n\n TYPE_REQUEST_STREAM => RequestStream::decode(flag, b).map(Body::RequestStream),\n\n TYPE_REQUEST_CHANNEL => RequestChannel::decode(flag, b).map(Body::RequestChannel),\n\n TYPE_REQUEST_FNF => RequestFNF::decode(flag, b).map(Body::RequestFNF),\n\n TYPE_REQUEST_N => RequestN::decode(flag, b).map(Body::RequestN),\n\n TYPE_METADATA_PUSH => MetadataPush::decode(flag, b).map(Body::MetadataPush),\n\n TYPE_KEEPALIVE => Keepalive::decode(flag, b).map(Body::Keepalive),\n\n TYPE_PAYLOAD => Payload::decode(flag, b).map(Body::Payload),\n\n TYPE_LEASE => Lease::decode(flag, b).map(Body::Lease),\n", "file_path": "rsocket/src/frame/mod.rs", "rank": 81, "score": 30915.12798206673 }, { "content": " stream_id,\n\n flag,\n\n value: Error {\n\n code: 0,\n\n data: None,\n\n },\n\n }\n\n }\n\n\n\n pub fn set_code(mut self, code: u32) -> Self {\n\n self.value.code = code;\n\n self\n\n }\n\n\n\n pub fn set_data(mut self, data: Bytes) -> Self {\n\n self.value.data = Some(data);\n\n self\n\n }\n\n\n\n pub fn build(self) -> Frame {\n", "file_path": "rsocket/src/frame/error.rs", "rank": 82, "score": 30913.406998243696 }, { "content": "use super::{Body, Frame};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Cancel {}\n\n\n\npub struct CancelBuilder {\n\n stream_id: u32,\n\n flag: u16,\n\n}\n\n\n\nimpl CancelBuilder {\n\n pub fn build(self) -> Frame {\n\n Frame::new(self.stream_id, Body::Cancel(), self.flag)\n\n }\n\n}\n\n\n\nimpl Cancel {\n\n pub fn builder(stream_id: u32, flag: u16) -> CancelBuilder {\n\n CancelBuilder { stream_id, flag }\n\n }\n\n}\n", "file_path": "rsocket/src/frame/cancel.rs", "rank": 83, "score": 30911.76993630775 }, { "content": " stream_id,\n\n flag,\n\n value: Lease {\n\n ttl: 0,\n\n number_of_requests: 0,\n\n metadata: None,\n\n },\n\n }\n\n }\n\n\n\n pub fn set_metadata(mut self, metadata: Bytes) -> Self {\n\n self.value.metadata = Some(metadata);\n\n self.flag |= FLAG_METADATA;\n\n self\n\n }\n\n\n\n pub fn set_ttl(mut self, ttl: u32) -> Self {\n\n self.value.ttl = ttl;\n\n self\n\n }\n", "file_path": "rsocket/src/frame/lease.rs", "rank": 84, "score": 30911.062388839742 }, { "content": " self.last_received_server_position\n\n }\n\n\n\n pub fn get_first_available_client_position(&self) -> u64 {\n\n self.first_available_client_position\n\n }\n\n}\n\n\n\nimpl ResumeBuilder {\n\n fn new(stream_id: u32, flag: u16) -> ResumeBuilder {\n\n ResumeBuilder {\n\n stream_id,\n\n flag,\n\n inner: Resume::new(),\n\n }\n\n }\n\n\n\n pub fn set_token(mut self, token: Bytes) -> Self {\n\n self.inner.token = Some(token);\n\n self\n", "file_path": "rsocket/src/frame/resume.rs", "rank": 85, "score": 30910.92307808257 }, { "content": "pub const TYPE_RESUME_OK: u16 = 0x0E;\n\n\n\npub const REQUEST_MAX: u32 = 0x7FFF_FFFF; // 2147483647\n\n\n\nconst LEN_HEADER: usize = 6;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum Body {\n\n Setup(Setup),\n\n Lease(Lease),\n\n Keepalive(Keepalive),\n\n RequestFNF(RequestFNF),\n\n RequestResponse(RequestResponse),\n\n RequestStream(RequestStream),\n\n RequestChannel(RequestChannel),\n\n RequestN(RequestN),\n\n Cancel(),\n\n Payload(Payload),\n\n Error(Error),\n\n MetadataPush(MetadataPush),\n", "file_path": "rsocket/src/frame/mod.rs", "rank": 86, "score": 30910.90165373796 }, { "content": "pub const FLAG_FOLLOW: u16 = 0x01 << 7;\n\npub const FLAG_METADATA: u16 = 0x01 << 8;\n\npub const FLAG_IGNORE: u16 = 0x01 << 9;\n\npub const FLAG_LEASE: u16 = FLAG_COMPLETE;\n\npub const FLAG_RESUME: u16 = FLAG_FOLLOW;\n\npub const FLAG_RESPOND: u16 = FLAG_FOLLOW;\n\n\n\npub const TYPE_SETUP: u16 = 0x01;\n\npub const TYPE_LEASE: u16 = 0x02;\n\npub const TYPE_KEEPALIVE: u16 = 0x03;\n\npub const TYPE_REQUEST_RESPONSE: u16 = 0x04;\n\npub const TYPE_REQUEST_FNF: u16 = 0x05;\n\npub const TYPE_REQUEST_STREAM: u16 = 0x06;\n\npub const TYPE_REQUEST_CHANNEL: u16 = 0x07;\n\npub const TYPE_REQUEST_N: u16 = 0x08;\n\npub const TYPE_CANCEL: u16 = 0x09;\n\npub const TYPE_PAYLOAD: u16 = 0x0A;\n\npub const TYPE_ERROR: u16 = 0x0B;\n\npub const TYPE_METADATA_PUSH: u16 = 0x0C;\n\npub const TYPE_RESUME: u16 = 0x0D;\n", "file_path": "rsocket/src/frame/mod.rs", "rank": 87, "score": 30910.321027870235 }, { "content": " Body::RequestChannel(v) => v.len(),\n\n Body::RequestFNF(v) => v.len(),\n\n Body::RequestN(v) => v.len(),\n\n Body::MetadataPush(v) => v.len(),\n\n Body::Keepalive(v) => v.len(),\n\n Body::Payload(v) => v.len(),\n\n Body::Lease(v) => v.len(),\n\n Body::Cancel() => 0,\n\n Body::Error(v) => v.len(),\n\n Body::ResumeOK(v) => v.len(),\n\n Body::Resume(v) => v.len(),\n\n }\n\n }\n\n}\n\n\n\nimpl Frame {\n\n pub fn new(stream_id: u32, body: Body, flag: u16) -> Frame {\n\n Frame {\n\n stream_id,\n\n body,\n", "file_path": "rsocket/src/frame/mod.rs", "rank": 88, "score": 30910.20105816147 }, { "content": "use crate::errors::RSocketError;\n\nuse crate::utils::{RSocketResult, Writeable};\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\nmod cancel;\n\nmod error;\n\nmod keepalive;\n\nmod lease;\n\nmod metadata_push;\n\nmod payload;\n\nmod request_channel;\n\nmod request_fnf;\n\nmod request_n;\n\nmod request_response;\n\nmod request_stream;\n\nmod resume;\n\nmod resume_ok;\n\nmod setup;\n\nmod utils;\n\nmod version;\n", "file_path": "rsocket/src/frame/mod.rs", "rank": 89, "score": 30909.244936200314 }, { "content": " Ok(Lease {\n\n ttl,\n\n number_of_requests: n,\n\n metadata: m,\n\n })\n\n }\n\n\n\n pub fn builder(stream_id: u32, flag: u16) -> LeaseBuilder {\n\n LeaseBuilder::new(stream_id, flag)\n\n }\n\n\n\n pub fn get_number_of_requests(&self) -> u32 {\n\n self.number_of_requests\n\n }\n\n\n\n pub fn get_metadata(&self) -> &Option<Bytes> {\n\n &self.metadata\n\n }\n\n\n\n pub fn get_ttl(&self) -> u32 {\n", "file_path": "rsocket/src/frame/lease.rs", "rank": 90, "score": 30908.03206494794 }, { "content": " Body::RequestFNF(v) => v.write_to(bf),\n\n Body::RequestN(v) => v.write_to(bf),\n\n Body::MetadataPush(v) => v.write_to(bf),\n\n Body::Keepalive(v) => v.write_to(bf),\n\n Body::Payload(v) => v.write_to(bf),\n\n Body::Lease(v) => v.write_to(bf),\n\n Body::Error(v) => v.write_to(bf),\n\n Body::Cancel() => (),\n\n Body::ResumeOK(v) => v.write_to(bf),\n\n Body::Resume(v) => v.write_to(bf),\n\n }\n\n }\n\n\n\n fn len(&self) -> usize {\n\n // header len\n\n LEN_HEADER\n\n + match &self.body {\n\n Body::Setup(v) => v.len(),\n\n Body::RequestResponse(v) => v.len(),\n\n Body::RequestStream(v) => v.len(),\n", "file_path": "rsocket/src/frame/mod.rs", "rank": 91, "score": 30905.976394600835 }, { "content": " None => (),\n\n }\n\n }\n\n\n\n fn len(&self) -> usize {\n\n 4 + match &self.data {\n\n Some(v) => v.len(),\n\n None => 0,\n\n }\n\n }\n\n}\n", "file_path": "rsocket/src/frame/error.rs", "rank": 92, "score": 30903.136668209983 }, { "content": " TYPE_CANCEL => Ok(Body::Cancel()),\n\n TYPE_ERROR => Error::decode(flag, b).map(Body::Error),\n\n TYPE_RESUME_OK => ResumeOK::decode(flag, b).map(Body::ResumeOK),\n\n TYPE_RESUME => Resume::decode(flag, b).map(Body::Resume),\n\n _ => Err(RSocketError::from(format!(\"illegal frame type: {}\", kind))),\n\n };\n\n body.map(|it| Frame::new(sid, it, flag))\n\n }\n\n\n\n pub fn get_body(self) -> Body {\n\n self.body\n\n }\n\n\n\n pub fn get_frame_type(&self) -> u16 {\n\n to_frame_type(&self.body)\n\n }\n\n\n\n pub fn get_flag(&self) -> u16 {\n\n self.flag\n\n }\n", "file_path": "rsocket/src/frame/mod.rs", "rank": 93, "score": 30901.986763331694 }, { "content": " }\n\n\n\n pub fn set_last_received_server_position(mut self, position: u64) -> Self {\n\n self.inner.last_received_server_position = position;\n\n self\n\n }\n\n\n\n pub fn set_first_available_client_position(mut self, position: u64) -> Self {\n\n self.inner.first_available_client_position = position;\n\n self\n\n }\n\n\n\n pub fn build(self) -> Frame {\n\n Frame {\n\n stream_id: self.stream_id,\n\n flag: self.flag,\n\n body: Body::Resume(self.inner),\n\n }\n\n }\n\n}\n", "file_path": "rsocket/src/frame/resume.rs", "rank": 94, "score": 30900.88018133091 }, { "content": "\n\n pub fn get_stream_id(&self) -> u32 {\n\n self.stream_id\n\n }\n\n\n\n pub fn has_next(&self) -> bool {\n\n self.flag & FLAG_NEXT != 0\n\n }\n\n\n\n pub fn has_complete(&self) -> bool {\n\n self.flag & FLAG_COMPLETE != 0\n\n }\n\n}\n\n\n\n#[inline]\n", "file_path": "rsocket/src/frame/mod.rs", "rank": 95, "score": 30899.08998818012 }, { "content": "use super::{Body, Frame, PayloadSupport, FLAG_METADATA, REQUEST_MAX};\n\nuse crate::utils::{RSocketResult, Writeable};\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct RequestChannel {\n\n initial_request_n: u32,\n\n metadata: Option<Bytes>,\n\n data: Option<Bytes>,\n\n}\n\n\n\npub struct RequestChannelBuilder {\n\n stream_id: u32,\n\n flag: u16,\n\n value: RequestChannel,\n\n}\n\n\n\nimpl RequestChannelBuilder {\n\n pub fn new(stream_id: u32, flag: u16) -> RequestChannelBuilder {\n\n RequestChannelBuilder {\n", "file_path": "rsocket/src/frame/request_channel.rs", "rank": 96, "score": 29838.60701848796 }, { "content": "use super::{Body, Frame, PayloadSupport, FLAG_METADATA, REQUEST_MAX};\n\nuse crate::utils::{RSocketResult, Writeable, U24};\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct RequestStream {\n\n initial_request_n: u32,\n\n metadata: Option<Bytes>,\n\n data: Option<Bytes>,\n\n}\n\n\n\npub struct RequestStreamBuilder {\n\n stream_id: u32,\n\n flag: u16,\n\n value: RequestStream,\n\n}\n\n\n\nimpl RequestStreamBuilder {\n\n pub fn build(self) -> Frame {\n\n Frame::new(self.stream_id, Body::RequestStream(self.value), self.flag)\n", "file_path": "rsocket/src/frame/request_stream.rs", "rank": 97, "score": 29838.576484146008 }, { "content": "use super::{Body, Frame, PayloadSupport, FLAG_METADATA};\n\nuse crate::utils::{RSocketResult, Writeable};\n\nuse bytes::{BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct RequestFNF {\n\n metadata: Option<Bytes>,\n\n data: Option<Bytes>,\n\n}\n\n\n\npub struct RequestFNFBuilder {\n\n stream_id: u32,\n\n flag: u16,\n\n value: RequestFNF,\n\n}\n\n\n\nimpl RequestFNFBuilder {\n\n fn new(stream_id: u32, flag: u16) -> RequestFNFBuilder {\n\n RequestFNFBuilder {\n\n stream_id,\n", "file_path": "rsocket/src/frame/request_fnf.rs", "rank": 98, "score": 29838.490630100965 }, { "content": "use super::{Body, Frame, PayloadSupport, FLAG_METADATA};\n\nuse crate::utils::{RSocketResult, Writeable, U24};\n\nuse bytes::{BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct RequestResponse {\n\n metadata: Option<Bytes>,\n\n data: Option<Bytes>,\n\n}\n\n\n\npub struct RequestResponseBuilder {\n\n stream_id: u32,\n\n flag: u16,\n\n value: RequestResponse,\n\n}\n\n\n\nimpl RequestResponseBuilder {\n\n fn new(stream_id: u32, flag: u16) -> RequestResponseBuilder {\n\n RequestResponseBuilder {\n\n stream_id,\n", "file_path": "rsocket/src/frame/request_response.rs", "rank": 99, "score": 29838.320676483887 } ]
Rust
src/pka/compare.rs
jeandudey/cc13x2-rs
215918099301ec75e9dfad531f5cf46e13077a39
#[doc = "Reader of register COMPARE"] pub type R = crate::R<u32, super::COMPARE>; #[doc = "Writer for register COMPARE"] pub type W = crate::W<u32, super::COMPARE>; #[doc = "Register COMPARE `reset()`'s with value 0x01"] impl crate::ResetValue for super::COMPARE { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x01 } } #[doc = "Reader of field `RESERVED3`"] pub type RESERVED3_R = crate::R<u32, u32>; #[doc = "Write proxy for field `RESERVED3`"] pub struct RESERVED3_W<'a> { w: &'a mut W, } impl<'a> RESERVED3_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1fff_ffff << 3)) | (((value as u32) & 0x1fff_ffff) << 3); self.w } } #[doc = "Reader of field `A_GREATER_THAN_B`"] pub type A_GREATER_THAN_B_R = crate::R<bool, bool>; #[doc = "Write proxy for field `A_GREATER_THAN_B`"] pub struct A_GREATER_THAN_B_W<'a> { w: &'a mut W, } impl<'a> A_GREATER_THAN_B_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `A_LESS_THAN_B`"] pub type A_LESS_THAN_B_R = crate::R<bool, bool>; #[doc = "Write proxy for field `A_LESS_THAN_B`"] pub struct A_LESS_THAN_B_W<'a> { w: &'a mut W, } impl<'a> A_LESS_THAN_B_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `A_EQUALS_B`"] pub type A_EQUALS_B_R = crate::R<bool, bool>; #[doc = "Write proxy for field `A_EQUALS_B`"] pub struct A_EQUALS_B_W<'a> { w: &'a mut W, } impl<'a> A_EQUALS_B_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl R { #[doc = "Bits 3:31 - 31:3\\] Ignore on read"] #[inline(always)] pub fn reserved3(&self) -> RESERVED3_R { RESERVED3_R::new(((self.bits >> 3) & 0x1fff_ffff) as u32) } #[doc = "Bit 2 - 2:2\\] Vector_A is greater than Vector_B"] #[inline(always)] pub fn a_greater_than_b(&self) -> A_GREATER_THAN_B_R { A_GREATER_THAN_B_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 1 - 1:1\\] Vector_A is less than Vector_B"] #[inline(always)] pub fn a_less_than_b(&self) -> A_LESS_THAN_B_R { A_LESS_THAN_B_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 0 - 0:0\\] Vector_A is equal to Vector_B"] #[inline(always)] pub fn a_equals_b(&self) -> A_EQUALS_B_R { A_EQUALS_B_R::new((self.bits & 0x01) != 0) } } impl W { #[doc = "Bits 3:31 - 31:3\\] Ignore on read"] #[inline(always)] pub fn reserved3(&mut self) -> RESERVED3_W { RESERVED3_W { w: self } } #[doc = "Bit 2 - 2:2\\] Vector_A is greater than Vector_B"] #[inline(always)] pub fn a_greater_than_b(&mut self) -> A_GREATER_THAN_B_W { A_GREATER_THAN_B_W { w: self } } #[doc = "Bit 1 - 1:1\\] Vector_A is less than Vector_B"] #[inline(always)] pub fn a_less_than_b(&mut self) -> A_LESS_THAN_B_W { A_LESS_THAN_B_W { w: self } } #[doc = "Bit 0 - 0:0\\] Vector_A is equal to Vector_B"] #[inline(always)] pub fn a_equals_b(&mut self) -> A_EQUALS_B_W { A_EQUALS_B_W { w: self } } }
#[doc = "Reader of register COMPARE"] pub type R = crate::R<u32, super::COMPARE>; #[doc = "Writer for register COMPARE"] pub type W = crate::W<u32, super::COMPARE>; #[doc = "Register COMPARE `reset()`'s with value 0x01"] impl crate::ResetValue for super::COMPARE { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x01 } } #[doc = "Reader of field `RESERVED3`"] pub type RESERVED3_R = crate::R<u32, u32>; #[doc = "Write proxy for field `RESERVED3`"] pub struct RESERVED3_W<'a> { w: &'a mut W, } impl<'a> RESERVED3_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1fff_ffff << 3)) | (((value as u32) & 0x1fff_ffff) << 3); self.w } } #[doc = "Reader of field `A_GREATER_THAN_B`"] pub type A_GREATER_THAN_B_R = crate::R<bool, bool>; #[doc = "Write proxy for field `A_GREATER_THAN_B`"] pub struct A_GREATER_THAN_B_W<'a> { w: &'a mut W, } impl<'a> A_GREATER_THAN_B_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `A_LESS_THAN_B`"] pub type A_LESS_THAN_B_R = crate::R<bool, bool>; #[doc = "Write proxy for field `A_LESS_THAN_B`"] pub struct A_LESS_THAN_B_W<'a> { w: &'a mut W, } impl<'a> A_LESS_THAN_B_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `A_EQUALS_B`"] pub type A_EQUALS_B_R = crate::R<bool, bool>; #[doc = "Write proxy for field `A_EQUALS_B`"] pub struct A_EQUALS_B_W<'a> { w: &'a mut W, } impl<'a> A_EQUALS_B_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a
ls_b(&mut self) -> A_EQUALS_B_W { A_EQUALS_B_W { w: self } } }
mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl R { #[doc = "Bits 3:31 - 31:3\\] Ignore on read"] #[inline(always)] pub fn reserved3(&self) -> RESERVED3_R { RESERVED3_R::new(((self.bits >> 3) & 0x1fff_ffff) as u32) } #[doc = "Bit 2 - 2:2\\] Vector_A is greater than Vector_B"] #[inline(always)] pub fn a_greater_than_b(&self) -> A_GREATER_THAN_B_R { A_GREATER_THAN_B_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 1 - 1:1\\] Vector_A is less than Vector_B"] #[inline(always)] pub fn a_less_than_b(&self) -> A_LESS_THAN_B_R { A_LESS_THAN_B_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 0 - 0:0\\] Vector_A is equal to Vector_B"] #[inline(always)] pub fn a_equals_b(&self) -> A_EQUALS_B_R { A_EQUALS_B_R::new((self.bits & 0x01) != 0) } } impl W { #[doc = "Bits 3:31 - 31:3\\] Ignore on read"] #[inline(always)] pub fn reserved3(&mut self) -> RESERVED3_W { RESERVED3_W { w: self } } #[doc = "Bit 2 - 2:2\\] Vector_A is greater than Vector_B"] #[inline(always)] pub fn a_greater_than_b(&mut self) -> A_GREATER_THAN_B_W { A_GREATER_THAN_B_W { w: self } } #[doc = "Bit 1 - 1:1\\] Vector_A is less than Vector_B"] #[inline(always)] pub fn a_less_than_b(&mut self) -> A_LESS_THAN_B_W { A_LESS_THAN_B_W { w: self } } #[doc = "Bit 0 - 0:0\\] Vector_A is equal to Vector_B"] #[inline(always)] pub fn a_equa
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "src/generic.rs", "rank": 0, "score": 171992.5304185512 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 1, "score": 65686.33176443687 }, { "content": "#[doc = \"Reader of register VALUE\"]\n\npub type R = crate::R<u32, super::VALUE>;\n\n#[doc = \"Writer for register VALUE\"]\n\npub type W = crate::W<u32, super::VALUE>;\n\n#[doc = \"Register VALUE `reset()`'s with value 0xffff_ffff\"]\n\nimpl crate::ResetValue for super::VALUE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xffff_ffff\n\n }\n\n}\n\n#[doc = \"Reader of field `WDTVALUE`\"]\n\npub type WDTVALUE_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `WDTVALUE`\"]\n\npub struct WDTVALUE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WDTVALUE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/wdt/value.rs", "rank": 3, "score": 61053.37581025073 }, { "content": "#[doc = \"Reader of register RESERVED3\"]\n\npub type R = crate::R<u32, super::RESERVED3>;\n\n#[doc = \"Writer for register RESERVED3\"]\n\npub type W = crate::W<u32, super::RESERVED3>;\n\n#[doc = \"Register RESERVED3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RESERVED3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\nimpl R {}\n\nimpl W {}\n", "file_path": "src/uart1/reserved3.rs", "rank": 4, "score": 61040.03229957482 }, { "content": "#[doc = \"Reader of register RESERVED3\"]\n\npub type R = crate::R<u32, super::RESERVED3>;\n\n#[doc = \"Writer for register RESERVED3\"]\n\npub type W = crate::W<u32, super::RESERVED3>;\n\n#[doc = \"Register RESERVED3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RESERVED3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\nimpl R {}\n\nimpl W {}\n", "file_path": "src/uart0/reserved3.rs", "rank": 5, "score": 61040.03229957482 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u32) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:31 - 31:0\\\\]\n\nThis register contains the current count value of the timer.\"]\n\n #[inline(always)]\n\n pub fn wdtvalue(&self) -> WDTVALUE_R {\n\n WDTVALUE_R::new((self.bits & 0xffff_ffff) as u32)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:31 - 31:0\\\\]\n\nThis register contains the current count value of the timer.\"]\n\n #[inline(always)]\n\n pub fn wdtvalue(&mut self) -> WDTVALUE_W {\n\n WDTVALUE_W { w: self }\n\n }\n\n}\n", "file_path": "src/wdt/value.rs", "rank": 9, "score": 61021.71777374255 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "src/generic.rs", "rank": 14, "score": 60448.31842569391 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "src/generic.rs", "rank": 15, "score": 60435.18781493757 }, { "content": "#[doc = \"Reader of register MPU_TYPE\"]\n\npub type R = crate::R<u32, super::MPU_TYPE>;\n\n#[doc = \"Writer for register MPU_TYPE\"]\n\npub type W = crate::W<u32, super::MPU_TYPE>;\n\n#[doc = \"Register MPU_TYPE `reset()`'s with value 0x0800\"]\n\nimpl crate::ResetValue for super::MPU_TYPE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0800\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED24`\"]\n\npub type RESERVED24_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED24`\"]\n\npub struct RESERVED24_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED24_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 16, "score": 56863.86964735494 }, { "content": "#[doc = \"Reader of register FCFG_BNK_TYPE\"]\n\npub type R = crate::R<u32, super::FCFG_BNK_TYPE>;\n\n#[doc = \"Writer for register FCFG_BNK_TYPE\"]\n\npub type W = crate::W<u32, super::FCFG_BNK_TYPE>;\n\n#[doc = \"Register FCFG_BNK_TYPE `reset()`'s with value 0x04\"]\n\nimpl crate::ResetValue for super::FCFG_BNK_TYPE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x04\n\n }\n\n}\n\n#[doc = \"Reader of field `B7_TYPE`\"]\n\npub type B7_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B7_TYPE`\"]\n\npub struct B7_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> B7_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 17, "score": 56861.26716827224 }, { "content": "impl<'a> B4_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 16)) | (((value as u32) & 0x0f) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `B3_TYPE`\"]\n\npub type B3_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B3_TYPE`\"]\n\npub struct B3_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> B3_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 12)) | (((value as u32) & 0x0f) << 12);\n\n self.w\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 18, "score": 56856.77792211683 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> B1_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 4)) | (((value as u32) & 0x0f) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `B0_TYPE`\"]\n\npub type B0_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B0_TYPE`\"]\n\npub struct B0_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> B0_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 19, "score": 56856.13182653674 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 28)) | (((value as u32) & 0x0f) << 28);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `B6_TYPE`\"]\n\npub type B6_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B6_TYPE`\"]\n\npub struct B6_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> B6_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 24)) | (((value as u32) & 0x0f) << 24);\n\n self.w\n\n }\n\n}\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 20, "score": 56854.55243612419 }, { "content": "#[doc = \"Reader of field `B5_TYPE`\"]\n\npub type B5_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B5_TYPE`\"]\n\npub struct B5_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> B5_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 20)) | (((value as u32) & 0x0f) << 20);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `B4_TYPE`\"]\n\npub type B4_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B4_TYPE`\"]\n\npub struct B4_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 21, "score": 56853.65623695384 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `B2_TYPE`\"]\n\npub type B2_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B2_TYPE`\"]\n\npub struct B2_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> B2_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `B1_TYPE`\"]\n\npub type B1_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B1_TYPE`\"]\n\npub struct B1_TYPE_W<'a> {\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 22, "score": 56853.24413580829 }, { "content": "impl<'a> RESERVED1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x7f << 1)) | (((value as u32) & 0x7f) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SEPARATE`\"]\n\npub type SEPARATE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SEPARATE`\"]\n\npub struct SEPARATE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SEPARATE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 23, "score": 56851.65208144454 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0xff << 24)) | (((value as u32) & 0xff) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `IREGION`\"]\n\npub type IREGION_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `IREGION`\"]\n\npub struct IREGION_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IREGION_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16);\n\n self.w\n\n }\n\n}\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 24, "score": 56847.74122948297 }, { "content": "#[doc = \"Reader of field `DREGION`\"]\n\npub type DREGION_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DREGION`\"]\n\npub struct DREGION_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DREGION_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED1`\"]\n\npub type RESERVED1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED1`\"]\n\npub struct RESERVED1_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 25, "score": 56847.21826506014 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 24:31 - 31:24\\\\]\n\nReads 0.\"]\n\n #[inline(always)]\n\n pub fn reserved24(&self) -> RESERVED24_R {\n\n RESERVED24_R::new(((self.bits >> 24) & 0xff) as u8)\n\n }\n\n #[doc = \"Bits 16:23 - 23:16\\\\]\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 26, "score": 56833.13703270037 }, { "content": " }\n\n #[doc = \"Bits 4:7 - 7:4\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b1_type(&self) -> B1_TYPE_R {\n\n B1_TYPE_R::new(((self.bits >> 4) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 0:3 - 3:0\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b0_type(&self) -> B0_TYPE_R {\n\n B0_TYPE_R::new((self.bits & 0x0f) as u8)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 28:31 - 31:28\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b7_type(&mut self) -> B7_TYPE_W {\n\n B7_TYPE_W { w: self }\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 27, "score": 56812.91862775838 }, { "content": " self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 28:31 - 31:28\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b7_type(&self) -> B7_TYPE_R {\n\n B7_TYPE_R::new(((self.bits >> 28) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 24:27 - 27:24\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b6_type(&self) -> B6_TYPE_R {\n\n B6_TYPE_R::new(((self.bits >> 24) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 20:23 - 23:20\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 28, "score": 56811.49302249188 }, { "content": " pub fn separate(&self) -> SEPARATE_R {\n\n SEPARATE_R::new((self.bits & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 24:31 - 31:24\\\\]\n\nReads 0.\"]\n\n #[inline(always)]\n\n pub fn reserved24(&mut self) -> RESERVED24_W {\n\n RESERVED24_W { w: self }\n\n }\n\n #[doc = \"Bits 16:23 - 23:16\\\\]\n\nThe processor core uses only a unified MPU, this field always reads 0x0.\"]\n\n #[inline(always)]\n\n pub fn iregion(&mut self) -> IREGION_W {\n\n IREGION_W { w: self }\n\n }\n\n #[doc = \"Bits 8:15 - 15:8\\\\]\n\nNumber of supported MPU regions field. This field reads 0x08 indicating eight MPU regions.\"]\n\n #[inline(always)]\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 29, "score": 56805.51673021407 }, { "content": " }\n\n #[doc = \"Bits 24:27 - 27:24\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b6_type(&mut self) -> B6_TYPE_W {\n\n B6_TYPE_W { w: self }\n\n }\n\n #[doc = \"Bits 20:23 - 23:20\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b5_type(&mut self) -> B5_TYPE_W {\n\n B5_TYPE_W { w: self }\n\n }\n\n #[doc = \"Bits 16:19 - 19:16\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b4_type(&mut self) -> B4_TYPE_W {\n\n B4_TYPE_W { w: self }\n\n }\n\n #[doc = \"Bits 12:15 - 15:12\\\\]\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 30, "score": 56804.399430923775 }, { "content": "Internal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b3_type(&mut self) -> B3_TYPE_W {\n\n B3_TYPE_W { w: self }\n\n }\n\n #[doc = \"Bits 8:11 - 11:8\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b2_type(&mut self) -> B2_TYPE_W {\n\n B2_TYPE_W { w: self }\n\n }\n\n #[doc = \"Bits 4:7 - 7:4\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b1_type(&mut self) -> B1_TYPE_W {\n\n B1_TYPE_W { w: self }\n\n }\n\n #[doc = \"Bits 0:3 - 3:0\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b0_type(&mut self) -> B0_TYPE_W {\n\n B0_TYPE_W { w: self }\n\n }\n\n}\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 31, "score": 56804.21312188887 }, { "content": " pub fn b5_type(&self) -> B5_TYPE_R {\n\n B5_TYPE_R::new(((self.bits >> 20) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 16:19 - 19:16\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b4_type(&self) -> B4_TYPE_R {\n\n B4_TYPE_R::new(((self.bits >> 16) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 12:15 - 15:12\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b3_type(&self) -> B3_TYPE_R {\n\n B3_TYPE_R::new(((self.bits >> 12) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 8:11 - 11:8\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b2_type(&self) -> B2_TYPE_R {\n\n B2_TYPE_R::new(((self.bits >> 8) & 0x0f) as u8)\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 32, "score": 56798.81269031206 }, { "content": " pub fn dregion(&mut self) -> DREGION_W {\n\n DREGION_W { w: self }\n\n }\n\n #[doc = \"Bits 1:7 - 7:1\\\\]\n\nReads 0.\"]\n\n #[inline(always)]\n\n pub fn reserved1(&mut self) -> RESERVED1_W {\n\n RESERVED1_W { w: self }\n\n }\n\n #[doc = \"Bit 0 - 0:0\\\\]\n\nThe processor core uses only a unified MPU, thus this field is always 0.\"]\n\n #[inline(always)]\n\n pub fn separate(&mut self) -> SEPARATE_W {\n\n SEPARATE_W { w: self }\n\n }\n\n}\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 33, "score": 56795.850033473675 }, { "content": "The processor core uses only a unified MPU, this field always reads 0x0.\"]\n\n #[inline(always)]\n\n pub fn iregion(&self) -> IREGION_R {\n\n IREGION_R::new(((self.bits >> 16) & 0xff) as u8)\n\n }\n\n #[doc = \"Bits 8:15 - 15:8\\\\]\n\nNumber of supported MPU regions field. This field reads 0x08 indicating eight MPU regions.\"]\n\n #[inline(always)]\n\n pub fn dregion(&self) -> DREGION_R {\n\n DREGION_R::new(((self.bits >> 8) & 0xff) as u8)\n\n }\n\n #[doc = \"Bits 1:7 - 7:1\\\\]\n\nReads 0.\"]\n\n #[inline(always)]\n\n pub fn reserved1(&self) -> RESERVED1_R {\n\n RESERVED1_R::new(((self.bits >> 1) & 0x7f) as u8)\n\n }\n\n #[doc = \"Bit 0 - 0:0\\\\]\n\nThe processor core uses only a unified MPU, thus this field is always 0.\"]\n\n #[inline(always)]\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 34, "score": 56789.382690554514 }, { "content": "#[doc = \"Reader of register SMIS\"]\n\npub type R = crate::R<u32, super::SMIS>;\n\n#[doc = \"Writer for register SMIS\"]\n\npub type W = crate::W<u32, super::SMIS>;\n\n#[doc = \"Register SMIS `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SMIS {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2c0/smis.rs", "rank": 35, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO2PSEL\"]\n\npub type R = crate::R<u32, super::IO2PSEL>;\n\n#[doc = \"Writer for register IO2PSEL\"]\n\npub type W = crate::W<u32, super::IO2PSEL>;\n\n#[doc = \"Register IO2PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO2PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio0/io2psel.rs", "rank": 36, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO6PSEL\"]\n\npub type R = crate::R<u32, super::IO6PSEL>;\n\n#[doc = \"Writer for register IO6PSEL\"]\n\npub type W = crate::W<u32, super::IO6PSEL>;\n\n#[doc = \"Register IO6PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO6PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio0/io6psel.rs", "rank": 37, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register TIMER2CLKSTAT\"]\n\npub type R = crate::R<u32, super::TIMER2CLKSTAT>;\n\n#[doc = \"Writer for register TIMER2CLKSTAT\"]\n\npub type W = crate::W<u32, super::TIMER2CLKSTAT>;\n\n#[doc = \"Register TIMER2CLKSTAT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TIMER2CLKSTAT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/timer2clkstat.rs", "rank": 38, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register DMACTL\"]\n\npub type R = crate::R<u32, super::DMACTL>;\n\n#[doc = \"Writer for register DMACTL\"]\n\npub type W = crate::W<u32, super::DMACTL>;\n\n#[doc = \"Register DMACTL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DMACTL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_evctl/dmactl.rs", "rank": 39, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO2PSEL\"]\n\npub type R = crate::R<u32, super::IO2PSEL>;\n\n#[doc = \"Writer for register IO2PSEL\"]\n\npub type W = crate::W<u32, super::IO2PSEL>;\n\n#[doc = \"Register IO2PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO2PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio3/io2psel.rs", "rank": 40, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register RFCMODESEL\"]\n\npub type R = crate::R<u32, super::RFCMODESEL>;\n\n#[doc = \"Writer for register RFCMODESEL\"]\n\npub type W = crate::W<u32, super::RFCMODESEL>;\n\n#[doc = \"Register RFCMODESEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RFCMODESEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prcm/rfcmodesel.rs", "rank": 41, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register CFG\"]\n\npub type R = crate::R<u32, super::CFG>;\n\n#[doc = \"Writer for register CFG\"]\n\npub type W = crate::W<u32, super::CFG>;\n\n#[doc = \"Register CFG `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CFG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/gpt0/cfg.rs", "rank": 42, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO0PSEL\"]\n\npub type R = crate::R<u32, super::IO0PSEL>;\n\n#[doc = \"Writer for register IO0PSEL\"]\n\npub type W = crate::W<u32, super::IO0PSEL>;\n\n#[doc = \"Register IO0PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO0PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio3/io0psel.rs", "rank": 43, "score": 102.8054813112049 }, { "content": "#[doc = \"Reader of register IO1PSEL\"]\n\npub type R = crate::R<u32, super::IO1PSEL>;\n\n#[doc = \"Writer for register IO1PSEL\"]\n\npub type W = crate::W<u32, super::IO1PSEL>;\n\n#[doc = \"Register IO1PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO1PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio3/io1psel.rs", "rank": 44, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register ACCSHIFT\"]\n\npub type R = crate::R<u32, super::ACCSHIFT>;\n\n#[doc = \"Writer for register ACCSHIFT\"]\n\npub type W = crate::W<u32, super::ACCSHIFT>;\n\n#[doc = \"Register ACCSHIFT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACCSHIFT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/accshift.rs", "rank": 45, "score": 102.80548131120486 }, { "content": "#[doc = \"Reader of register FMAC\"]\n\npub type R = crate::R<u32, super::FMAC>;\n\n#[doc = \"Writer for register FMAC\"]\n\npub type W = crate::W<u32, super::FMAC>;\n\n#[doc = \"Register FMAC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FMAC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/fmac.rs", "rank": 46, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO5PSEL\"]\n\npub type R = crate::R<u32, super::IO5PSEL>;\n\n#[doc = \"Writer for register IO5PSEL\"]\n\npub type W = crate::W<u32, super::IO5PSEL>;\n\n#[doc = \"Register IO5PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO5PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio2/io5psel.rs", "rank": 47, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register PWRCTL\"]\n\npub type R = crate::R<u32, super::PWRCTL>;\n\n#[doc = \"Writer for register PWRCTL\"]\n\npub type W = crate::W<u32, super::PWRCTL>;\n\n#[doc = \"Register PWRCTL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PWRCTL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aon_pmctl/pwrctl.rs", "rank": 48, "score": 102.8054813112049 }, { "content": "#[doc = \"Reader of register IO4PSEL\"]\n\npub type R = crate::R<u32, super::IO4PSEL>;\n\n#[doc = \"Writer for register IO4PSEL\"]\n\npub type W = crate::W<u32, super::IO4PSEL>;\n\n#[doc = \"Register IO4PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO4PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio3/io4psel.rs", "rank": 49, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO4PSEL\"]\n\npub type R = crate::R<u32, super::IO4PSEL>;\n\n#[doc = \"Writer for register IO4PSEL\"]\n\npub type W = crate::W<u32, super::IO4PSEL>;\n\n#[doc = \"Register IO4PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO4PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio0/io4psel.rs", "rank": 50, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO3PSEL\"]\n\npub type R = crate::R<u32, super::IO3PSEL>;\n\n#[doc = \"Writer for register IO3PSEL\"]\n\npub type W = crate::W<u32, super::IO3PSEL>;\n\n#[doc = \"Register IO3PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO3PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio3/io3psel.rs", "rank": 51, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register PDCTL0\"]\n\npub type R = crate::R<u32, super::PDCTL0>;\n\n#[doc = \"Writer for register PDCTL0\"]\n\npub type W = crate::W<u32, super::PDCTL0>;\n\n#[doc = \"Register PDCTL0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PDCTL0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prcm/pdctl0.rs", "rank": 52, "score": 102.80548131120486 }, { "content": "#[doc = \"Reader of register EVSYNCRATE\"]\n\npub type R = crate::R<u32, super::EVSYNCRATE>;\n\n#[doc = \"Writer for register EVSYNCRATE\"]\n\npub type W = crate::W<u32, super::EVSYNCRATE>;\n\n#[doc = \"Register EVSYNCRATE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::EVSYNCRATE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/evsyncrate.rs", "rank": 53, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register FWFLAG\"]\n\npub type R = crate::R<u32, super::FWFLAG>;\n\n#[doc = \"Writer for register FWFLAG\"]\n\npub type W = crate::W<u32, super::FWFLAG>;\n\n#[doc = \"Register FWFLAG `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FWFLAG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/fwflag.rs", "rank": 54, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO5PSEL\"]\n\npub type R = crate::R<u32, super::IO5PSEL>;\n\n#[doc = \"Writer for register IO5PSEL\"]\n\npub type W = crate::W<u32, super::IO5PSEL>;\n\n#[doc = \"Register IO5PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO5PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio0/io5psel.rs", "rank": 55, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO7PSEL\"]\n\npub type R = crate::R<u32, super::IO7PSEL>;\n\n#[doc = \"Writer for register IO7PSEL\"]\n\npub type W = crate::W<u32, super::IO7PSEL>;\n\n#[doc = \"Register IO7PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO7PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio1/io7psel.rs", "rank": 56, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO6PSEL\"]\n\npub type R = crate::R<u32, super::IO6PSEL>;\n\n#[doc = \"Writer for register IO6PSEL\"]\n\npub type W = crate::W<u32, super::IO6PSEL>;\n\n#[doc = \"Register IO6PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO6PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio2/io6psel.rs", "rank": 57, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO3PSEL\"]\n\npub type R = crate::R<u32, super::IO3PSEL>;\n\n#[doc = \"Writer for register IO3PSEL\"]\n\npub type W = crate::W<u32, super::IO3PSEL>;\n\n#[doc = \"Register IO3PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO3PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio2/io3psel.rs", "rank": 58, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register CTL\"]\n\npub type R = crate::R<u32, super::CTL>;\n\n#[doc = \"Writer for register CTL\"]\n\npub type W = crate::W<u32, super::CTL>;\n\n#[doc = \"Register CTL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CTL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/wdt/ctl.rs", "rank": 59, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register SRIS\"]\n\npub type R = crate::R<u32, super::SRIS>;\n\n#[doc = \"Writer for register SRIS\"]\n\npub type W = crate::W<u32, super::SRIS>;\n\n#[doc = \"Register SRIS `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SRIS {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2c0/sris.rs", "rank": 60, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO7PSEL\"]\n\npub type R = crate::R<u32, super::IO7PSEL>;\n\n#[doc = \"Writer for register IO7PSEL\"]\n\npub type W = crate::W<u32, super::IO7PSEL>;\n\n#[doc = \"Register IO7PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO7PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio0/io7psel.rs", "rank": 61, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register SSTAT\"]\n\npub type R = crate::R<u32, super::SSTAT>;\n\n#[doc = \"Writer for register SSTAT\"]\n\npub type W = crate::W<u32, super::SSTAT>;\n\n#[doc = \"Register SSTAT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SSTAT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2c0/sstat.rs", "rank": 62, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO5PSEL\"]\n\npub type R = crate::R<u32, super::IO5PSEL>;\n\n#[doc = \"Writer for register IO5PSEL\"]\n\npub type W = crate::W<u32, super::IO5PSEL>;\n\n#[doc = \"Register IO5PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO5PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio1/io5psel.rs", "rank": 63, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO6PSEL\"]\n\npub type R = crate::R<u32, super::IO6PSEL>;\n\n#[doc = \"Writer for register IO6PSEL\"]\n\npub type W = crate::W<u32, super::IO6PSEL>;\n\n#[doc = \"Register IO6PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO6PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio3/io6psel.rs", "rank": 64, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register STMPCTL\"]\n\npub type R = crate::R<u32, super::STMPCTL>;\n\n#[doc = \"Writer for register STMPCTL\"]\n\npub type W = crate::W<u32, super::STMPCTL>;\n\n#[doc = \"Register STMPCTL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::STMPCTL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2s0/stmpctl.rs", "rank": 65, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO7PSEL\"]\n\npub type R = crate::R<u32, super::IO7PSEL>;\n\n#[doc = \"Writer for register IO7PSEL\"]\n\npub type W = crate::W<u32, super::IO7PSEL>;\n\n#[doc = \"Register IO7PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO7PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio2/io7psel.rs", "rank": 66, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO4PSEL\"]\n\npub type R = crate::R<u32, super::IO4PSEL>;\n\n#[doc = \"Writer for register IO4PSEL\"]\n\npub type W = crate::W<u32, super::IO4PSEL>;\n\n#[doc = \"Register IO4PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO4PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio2/io4psel.rs", "rank": 67, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO3PSEL\"]\n\npub type R = crate::R<u32, super::IO3PSEL>;\n\n#[doc = \"Writer for register IO3PSEL\"]\n\npub type W = crate::W<u32, super::IO3PSEL>;\n\n#[doc = \"Register IO3PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO3PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio0/io3psel.rs", "rank": 68, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO2PSEL\"]\n\npub type R = crate::R<u32, super::IO2PSEL>;\n\n#[doc = \"Writer for register IO2PSEL\"]\n\npub type W = crate::W<u32, super::IO2PSEL>;\n\n#[doc = \"Register IO2PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO2PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio2/io2psel.rs", "rank": 69, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO6PSEL\"]\n\npub type R = crate::R<u32, super::IO6PSEL>;\n\n#[doc = \"Writer for register IO6PSEL\"]\n\npub type W = crate::W<u32, super::IO6PSEL>;\n\n#[doc = \"Register IO6PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO6PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio1/io6psel.rs", "rank": 70, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register SWEVSET\"]\n\npub type R = crate::R<u32, super::SWEVSET>;\n\n#[doc = \"Writer for register SWEVSET\"]\n\npub type W = crate::W<u32, super::SWEVSET>;\n\n#[doc = \"Register SWEVSET `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SWEVSET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_evctl/swevset.rs", "rank": 71, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register AIFWCLKSRC\"]\n\npub type R = crate::R<u32, super::AIFWCLKSRC>;\n\n#[doc = \"Writer for register AIFWCLKSRC\"]\n\npub type W = crate::W<u32, super::AIFWCLKSRC>;\n\n#[doc = \"Register AIFWCLKSRC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::AIFWCLKSRC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2s0/aifwclksrc.rs", "rank": 72, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO1PSEL\"]\n\npub type R = crate::R<u32, super::IO1PSEL>;\n\n#[doc = \"Writer for register IO1PSEL\"]\n\npub type W = crate::W<u32, super::IO1PSEL>;\n\n#[doc = \"Register IO1PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO1PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio2/io1psel.rs", "rank": 73, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO0PSEL\"]\n\npub type R = crate::R<u32, super::IO0PSEL>;\n\n#[doc = \"Writer for register IO0PSEL\"]\n\npub type W = crate::W<u32, super::IO0PSEL>;\n\n#[doc = \"Register IO0PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO0PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio2/io0psel.rs", "rank": 74, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register SIMR\"]\n\npub type R = crate::R<u32, super::SIMR>;\n\n#[doc = \"Writer for register SIMR\"]\n\npub type W = crate::W<u32, super::SIMR>;\n\n#[doc = \"Register SIMR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SIMR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2c0/simr.rs", "rank": 75, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO1PSEL\"]\n\npub type R = crate::R<u32, super::IO1PSEL>;\n\n#[doc = \"Writer for register IO1PSEL\"]\n\npub type W = crate::W<u32, super::IO1PSEL>;\n\n#[doc = \"Register IO1PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO1PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio1/io1psel.rs", "rank": 76, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register SYSBUSCLKDIV\"]\n\npub type R = crate::R<u32, super::SYSBUSCLKDIV>;\n\n#[doc = \"Writer for register SYSBUSCLKDIV\"]\n\npub type W = crate::W<u32, super::SYSBUSCLKDIV>;\n\n#[doc = \"Register SYSBUSCLKDIV `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SYSBUSCLKDIV {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prcm/sysbusclkdiv.rs", "rank": 77, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register CFG\"]\n\npub type R = crate::R<u32, super::CFG>;\n\n#[doc = \"Writer for register CFG\"]\n\npub type W = crate::W<u32, super::CFG>;\n\n#[doc = \"Register CFG `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CFG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/gpt3/cfg.rs", "rank": 78, "score": 102.8054813112049 }, { "content": "#[doc = \"Reader of register IO2PSEL\"]\n\npub type R = crate::R<u32, super::IO2PSEL>;\n\n#[doc = \"Writer for register IO2PSEL\"]\n\npub type W = crate::W<u32, super::IO2PSEL>;\n\n#[doc = \"Register IO2PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO2PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio1/io2psel.rs", "rank": 79, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register FWLOCK\"]\n\npub type R = crate::R<u32, super::FWLOCK>;\n\n#[doc = \"Writer for register FWLOCK\"]\n\npub type W = crate::W<u32, super::FWLOCK>;\n\n#[doc = \"Register FWLOCK `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FWLOCK {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/fwlock.rs", "rank": 80, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register TIMER2CLKCTL\"]\n\npub type R = crate::R<u32, super::TIMER2CLKCTL>;\n\n#[doc = \"Writer for register TIMER2CLKCTL\"]\n\npub type W = crate::W<u32, super::TIMER2CLKCTL>;\n\n#[doc = \"Register TIMER2CLKCTL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TIMER2CLKCTL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/timer2clkctl.rs", "rank": 81, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register IO4PSEL\"]\n\npub type R = crate::R<u32, super::IO4PSEL>;\n\n#[doc = \"Writer for register IO4PSEL\"]\n\npub type W = crate::W<u32, super::IO4PSEL>;\n\n#[doc = \"Register IO4PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO4PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio1/io4psel.rs", "rank": 82, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO7PSEL\"]\n\npub type R = crate::R<u32, super::IO7PSEL>;\n\n#[doc = \"Writer for register IO7PSEL\"]\n\npub type W = crate::W<u32, super::IO7PSEL>;\n\n#[doc = \"Register IO7PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO7PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio3/io7psel.rs", "rank": 83, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register SWPWRPROF\"]\n\npub type R = crate::R<u32, super::SWPWRPROF>;\n\n#[doc = \"Writer for register SWPWRPROF\"]\n\npub type W = crate::W<u32, super::SWPWRPROF>;\n\n#[doc = \"Register SWPWRPROF `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SWPWRPROF {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/swpwrprof.rs", "rank": 84, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO3PSEL\"]\n\npub type R = crate::R<u32, super::IO3PSEL>;\n\n#[doc = \"Writer for register IO3PSEL\"]\n\npub type W = crate::W<u32, super::IO3PSEL>;\n\n#[doc = \"Register IO3PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO3PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio1/io3psel.rs", "rank": 85, "score": 102.80548131120486 }, { "content": "#[doc = \"Reader of register IO0PSEL\"]\n\npub type R = crate::R<u32, super::IO0PSEL>;\n\n#[doc = \"Writer for register IO0PSEL\"]\n\npub type W = crate::W<u32, super::IO0PSEL>;\n\n#[doc = \"Register IO0PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO0PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio1/io0psel.rs", "rank": 86, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register CFG\"]\n\npub type R = crate::R<u32, super::CFG>;\n\n#[doc = \"Writer for register CFG\"]\n\npub type W = crate::W<u32, super::CFG>;\n\n#[doc = \"Register CFG `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CFG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/gpt1/cfg.rs", "rank": 87, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register PDSTAT0\"]\n\npub type R = crate::R<u32, super::PDSTAT0>;\n\n#[doc = \"Writer for register PDSTAT0\"]\n\npub type W = crate::W<u32, super::PDSTAT0>;\n\n#[doc = \"Register PDSTAT0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PDSTAT0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prcm/pdstat0.rs", "rank": 88, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO0PSEL\"]\n\npub type R = crate::R<u32, super::IO0PSEL>;\n\n#[doc = \"Writer for register IO0PSEL\"]\n\npub type W = crate::W<u32, super::IO0PSEL>;\n\n#[doc = \"Register IO0PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO0PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio0/io0psel.rs", "rank": 89, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO1PSEL\"]\n\npub type R = crate::R<u32, super::IO1PSEL>;\n\n#[doc = \"Writer for register IO1PSEL\"]\n\npub type W = crate::W<u32, super::IO1PSEL>;\n\n#[doc = \"Register IO1PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO1PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio0/io1psel.rs", "rank": 90, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register CFG\"]\n\npub type R = crate::R<u32, super::CFG>;\n\n#[doc = \"Writer for register CFG\"]\n\npub type W = crate::W<u32, super::CFG>;\n\n#[doc = \"Register CFG `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CFG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/gpt2/cfg.rs", "rank": 91, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register FBMODE\"]\n\npub type R = crate::R<u32, super::FBMODE>;\n\n#[doc = \"Writer for register FBMODE\"]\n\npub type W = crate::W<u32, super::FBMODE>;\n\n#[doc = \"Register FBMODE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FBMODE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/fbmode.rs", "rank": 92, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register IO5PSEL\"]\n\npub type R = crate::R<u32, super::IO5PSEL>;\n\n#[doc = \"Writer for register IO5PSEL\"]\n\npub type W = crate::W<u32, super::IO5PSEL>;\n\n#[doc = \"Register IO5PSEL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IO5PSEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_aiodio3/io5psel.rs", "rank": 93, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register AUTOTAKE\"]\n\npub type R = crate::R<u32, super::AUTOTAKE>;\n\n#[doc = \"Writer for register AUTOTAKE\"]\n\npub type W = crate::W<u32, super::AUTOTAKE>;\n\n#[doc = \"Register AUTOTAKE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::AUTOTAKE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_smph/autotake.rs", "rank": 94, "score": 102.80548131120487 }, { "content": "#[doc = \"Reader of register SICR\"]\n\npub type R = crate::R<u32, super::SICR>;\n\n#[doc = \"Writer for register SICR\"]\n\npub type W = crate::W<u32, super::SICR>;\n\n#[doc = \"Register SICR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SICR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2c0/sicr.rs", "rank": 95, "score": 102.80548131120489 }, { "content": "#[doc = \"Reader of register ICTR\"]\n\npub type R = crate::R<u32, super::ICTR>;\n\n#[doc = \"Writer for register ICTR\"]\n\npub type W = crate::W<u32, super::ICTR>;\n\n#[doc = \"Register ICTR `reset()`'s with value 0x01\"]\n\nimpl crate::ResetValue for super::ICTR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x01\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cpu_scs/ictr.rs", "rank": 96, "score": 102.15789257423107 }, { "content": "#[doc = \"Reader of register IOSTRMED\"]\n\npub type R = crate::R<u32, super::IOSTRMED>;\n\n#[doc = \"Writer for register IOSTRMED\"]\n\npub type W = crate::W<u32, super::IOSTRMED>;\n\n#[doc = \"Register IOSTRMED `reset()`'s with value 0x06\"]\n\nimpl crate::ResetValue for super::IOSTRMED {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x06\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aon_ioc/iostrmed.rs", "rank": 97, "score": 102.15789257423108 }, { "content": "#[doc = \"Reader of register IOSTRMIN\"]\n\npub type R = crate::R<u32, super::IOSTRMIN>;\n\n#[doc = \"Writer for register IOSTRMIN\"]\n\npub type W = crate::W<u32, super::IOSTRMIN>;\n\n#[doc = \"Register IOSTRMIN `reset()`'s with value 0x03\"]\n\nimpl crate::ResetValue for super::IOSTRMIN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x03\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aon_ioc/iostrmin.rs", "rank": 98, "score": 102.15789257423108 }, { "content": "#[doc = \"Reader of register IOSTRMAX\"]\n\npub type R = crate::R<u32, super::IOSTRMAX>;\n\n#[doc = \"Writer for register IOSTRMAX\"]\n\npub type W = crate::W<u32, super::IOSTRMAX>;\n\n#[doc = \"Register IOSTRMAX `reset()`'s with value 0x05\"]\n\nimpl crate::ResetValue for super::IOSTRMAX {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x05\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED3`\"]\n\npub type RESERVED3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED3`\"]\n\npub struct RESERVED3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aon_ioc/iostrmax.rs", "rank": 99, "score": 102.15789257423107 } ]
Rust
src/p2p/session.rs
placefortea/teatree
7a56755a428fe63ee753df67a7c71d8f668e17ec
use bytes::{BufMut, BytesMut}; use futures::stream::SplitSink; use futures::Sink; use std::collections::HashMap; use std::net::SocketAddr; use tokio::codec::BytesCodec; use tokio::net::UdpFramed; use crate::actor::prelude::*; use crate::primitives::functions::{try_resend_times, DEFAULT_TIMES}; use crate::traits::actor::P2PBridgeActor; use super::codec::{P2PBody, P2PHead, HEAD_LENGTH}; use super::content::P2PContent; use super::p2p::P2PActor; #[derive(Clone)] pub struct P2PMessage(pub P2PHead, pub P2PContent, pub SocketAddr); impl Message for P2PMessage { type Result = (); } #[derive(Clone)] pub struct CodecMessage(pub BytesMut, pub SocketAddr); impl Message for CodecMessage { type Result = (); } #[derive(Clone)] pub(crate) struct P2PAddrMessage<A: P2PBridgeActor>(pub Addr<P2PActor<A>>); impl<A: P2PBridgeActor> Message for P2PAddrMessage<A> { type Result = (); } pub struct P2PSessionActor<A: P2PBridgeActor> { pub sinks: Vec<SplitSink<UdpFramed<BytesCodec>>>, pub p2p_addr: Option<Addr<P2PActor<A>>>, pub waitings: Vec<(P2PHead, P2PBody, SocketAddr)>, pub receivings: HashMap<[u8; 8], Vec<u8>>, } impl<A: P2PBridgeActor> P2PSessionActor<A> { fn send_udp( &mut self, mut bytes: Vec<u8>, mut prev_sign: [u8; 8], self_sign: [u8; 8], socket: SocketAddr, ctx: &mut Context<Self>, ) { let mut send_bytes = vec![]; let (mut now, next, next_sign) = if bytes.len() > 65400 { let now = bytes.drain(0..65400).as_slice().into(); (now, bytes, rand::random()) } else { (bytes, vec![], self_sign.clone()) }; send_bytes.extend_from_slice(&mut prev_sign); send_bytes.extend_from_slice(&mut self_sign.clone()); send_bytes.extend_from_slice(&mut next_sign.clone()); send_bytes.append(&mut now); let mut dst = BytesMut::new(); dst.reserve(send_bytes.len()); dst.put(send_bytes); self.sinks.pop().and_then(|sink| { let _ = sink .send((dst.into(), socket.clone())) .into_actor(self) .then(move |res, act, ctx| { match res { Ok(sink) => { act.sinks.push(sink); if !next.is_empty() { act.send_udp(next, self_sign, next_sign, socket, ctx); } } Err(_) => panic!("DEBUG: NETWORK HAVE ERROR"), } actor_ok(()) }) .wait(ctx); Some(()) }); } } impl<A: P2PBridgeActor> Actor for P2PSessionActor<A> { type Context = Context<Self>; } impl<A: P2PBridgeActor> Handler<P2PAddrMessage<A>> for P2PSessionActor<A> { type Result = (); fn handle(&mut self, msg: P2PAddrMessage<A>, _ctx: &mut Context<Self>) { self.p2p_addr = Some(msg.0); } } impl<A: P2PBridgeActor> StreamHandler<CodecMessage, std::io::Error> for P2PSessionActor<A> { fn handle(&mut self, msg: CodecMessage, _ctx: &mut Context<Self>) { let (mut src, socket) = (msg.0, msg.1); if src.len() < 16 { return; } let (head_sign, new_data) = src.split_at_mut(24); let (prev, me_next) = head_sign.split_at_mut(8); let (me, next) = me_next.split_at_mut(8); let mut prev_sign = [0u8; 8]; prev_sign.copy_from_slice(prev); let mut sign = [0u8; 8]; sign.copy_from_slice(me); let mut next_sign = [0u8; 8]; next_sign.copy_from_slice(next); let mut data = vec![]; if let Some(mut prev_data) = self.receivings.remove(&prev_sign) { prev_sign = sign; data.append(&mut prev_data); } data.extend_from_slice(new_data); if let Some(mut next_data) = self.receivings.remove(&next_sign) { data.append(&mut next_data); } let head = { if data.len() < HEAD_LENGTH || prev_sign != sign { self.receivings.insert(sign, data); return; } P2PHead::decode(data.as_ref()) }; let size = head.len as usize; if data.len() >= size + HEAD_LENGTH { let (_, data) = data.split_at_mut(HEAD_LENGTH); let (buf, _) = data.split_at_mut(size); let content = bincode::deserialize(buf).unwrap_or(P2PContent::None); if self.p2p_addr.is_some() { let _ = try_resend_times( self.p2p_addr.clone().unwrap(), P2PMessage(head, content, socket), DEFAULT_TIMES, ) .map_err(|_| { println!("Send Message to p2p fail"); }); } } else { self.receivings.insert(sign, data); } } } impl<A: P2PBridgeActor> Handler<P2PMessage> for P2PSessionActor<A> { type Result = (); fn handle(&mut self, msg: P2PMessage, ctx: &mut Context<Self>) { self.waitings.push((msg.0, P2PBody(msg.1), msg.2)); if self.sinks.is_empty() { return; } while !self.waitings.is_empty() { let w = self.waitings.remove(0); if self.sinks.is_empty() { self.waitings.push(w); break; } let (mut head, body, socket) = (w.0, w.1, w.2); let mut body_bytes: Vec<u8> = bincode::serialize(&body).unwrap_or(vec![]); head.update_len(body_bytes.len() as u32); let mut head_bytes = head.encode().to_vec(); let mut bytes = vec![]; bytes.append(&mut head_bytes); bytes.append(&mut body_bytes); let sign: [u8; 8] = rand::random(); self.send_udp(bytes, sign.clone(), sign, socket, ctx); } } }
use bytes::{BufMut, BytesMut}; use futures::stream::SplitSink; use futures::Sink; use std::collections::HashMap; use std::net::SocketAddr; use tokio::codec::BytesCodec; use tokio::net::UdpFramed; use crate::actor::prelude::*; use crate::primitives::functions::{try_resend_times, DEFAULT_TIMES}; use crate::traits::actor::P2PBridgeActor; use super::codec::{P2PBody, P2PHead, HEAD_LENGTH}; use super::content::P2PContent; use super::p2p::P2PActor; #[derive(Clone)] pub struct P2PMessage(pub P2PHead, pub P2PContent, pub SocketAddr); impl Message for P2PMessage { type Result = (); } #[derive(Clone)] pub struct CodecMessage(pub BytesMut, pub SocketAddr); impl Message for CodecMessage { type Result = (); } #[derive(Clone)] pub(crate) struct P2PAddrMessage<A: P2PBridgeActor>(pub Addr<P2PActor<A>>); impl<A: P2PBridgeActor> Message for P2PAddrMessage<A> { type Result = (); } pub struct P2PSessionActor<A: P2PBridgeActor> { pub sinks: Vec<SplitSink<UdpFramed<BytesCodec>>>, pub p2p_addr: Option<Addr<P2PActor<A>>>, pub waitings: Vec<(P2PHead, P2PBody, SocketAddr)>, pub receivings: HashMap<[u8; 8], Vec<u8>>, } impl<A: P2PBridgeActor> P2PSessionActor<A> { fn send_udp( &mut self, mut bytes: Vec<u8>, mut prev_sign: [u8; 8], self_sign: [u8; 8], socket: SocketAddr, ctx: &mut Context<Self>, ) { let mut send_bytes = vec![]; let (mut now, next, next_sign) = if bytes.len() > 65400 { let now = bytes.drain(0..65400).as_slice().into(); (now, bytes, rand::random()) } else { (bytes, vec![], self_sign.clone()) }; send_bytes.extend_from_slice(&mut prev_sign); send_bytes.extend_from_slice(&mut self_sign.clone()); send_bytes.extend_from_slice(&mut next_sign.clone()); send_bytes.append(&mut now); let mut dst = BytesMut::new(); dst.reserve(send_bytes.len()); dst.put(send_bytes); self.sinks.pop().and_then(|sink| { let _ = sink .send((dst.into(), socket.clone())) .into_actor(self) .then(move |res, act, ctx| { match res { Ok(sink) => { act.sinks.push(sink);
} Err(_) => panic!("DEBUG: NETWORK HAVE ERROR"), } actor_ok(()) }) .wait(ctx); Some(()) }); } } impl<A: P2PBridgeActor> Actor for P2PSessionActor<A> { type Context = Context<Self>; } impl<A: P2PBridgeActor> Handler<P2PAddrMessage<A>> for P2PSessionActor<A> { type Result = (); fn handle(&mut self, msg: P2PAddrMessage<A>, _ctx: &mut Context<Self>) { self.p2p_addr = Some(msg.0); } } impl<A: P2PBridgeActor> StreamHandler<CodecMessage, std::io::Error> for P2PSessionActor<A> { fn handle(&mut self, msg: CodecMessage, _ctx: &mut Context<Self>) { let (mut src, socket) = (msg.0, msg.1); if src.len() < 16 { return; } let (head_sign, new_data) = src.split_at_mut(24); let (prev, me_next) = head_sign.split_at_mut(8); let (me, next) = me_next.split_at_mut(8); let mut prev_sign = [0u8; 8]; prev_sign.copy_from_slice(prev); let mut sign = [0u8; 8]; sign.copy_from_slice(me); let mut next_sign = [0u8; 8]; next_sign.copy_from_slice(next); let mut data = vec![]; if let Some(mut prev_data) = self.receivings.remove(&prev_sign) { prev_sign = sign; data.append(&mut prev_data); } data.extend_from_slice(new_data); if let Some(mut next_data) = self.receivings.remove(&next_sign) { data.append(&mut next_data); } let head = { if data.len() < HEAD_LENGTH || prev_sign != sign { self.receivings.insert(sign, data); return; } P2PHead::decode(data.as_ref()) }; let size = head.len as usize; if data.len() >= size + HEAD_LENGTH { let (_, data) = data.split_at_mut(HEAD_LENGTH); let (buf, _) = data.split_at_mut(size); let content = bincode::deserialize(buf).unwrap_or(P2PContent::None); if self.p2p_addr.is_some() { let _ = try_resend_times( self.p2p_addr.clone().unwrap(), P2PMessage(head, content, socket), DEFAULT_TIMES, ) .map_err(|_| { println!("Send Message to p2p fail"); }); } } else { self.receivings.insert(sign, data); } } } impl<A: P2PBridgeActor> Handler<P2PMessage> for P2PSessionActor<A> { type Result = (); fn handle(&mut self, msg: P2PMessage, ctx: &mut Context<Self>) { self.waitings.push((msg.0, P2PBody(msg.1), msg.2)); if self.sinks.is_empty() { return; } while !self.waitings.is_empty() { let w = self.waitings.remove(0); if self.sinks.is_empty() { self.waitings.push(w); break; } let (mut head, body, socket) = (w.0, w.1, w.2); let mut body_bytes: Vec<u8> = bincode::serialize(&body).unwrap_or(vec![]); head.update_len(body_bytes.len() as u32); let mut head_bytes = head.encode().to_vec(); let mut bytes = vec![]; bytes.append(&mut head_bytes); bytes.append(&mut body_bytes); let sign: [u8; 8] = rand::random(); self.send_udp(bytes, sign.clone(), sign, socket, ctx); } } }
if !next.is_empty() { act.send_udp(next, self_sign, next_sign, socket, ctx); }
if_condition
[ { "content": "pub fn parse_http_body_json(bytes: &mut BytesMut) -> Result<Value, ()> {\n\n let mut vec: Vec<u8> = Vec::new();\n\n\n\n for (i, v) in (&bytes).iter().enumerate() {\n\n if v == &13 || v == &10 {\n\n vec.push(v.clone())\n\n } else {\n\n if vec == [13, 10, 13, 10] {\n\n return serde_json::from_slice(&bytes.split_off(i)[..]).or(Err(()));\n\n } else {\n\n if vec.len() > 0 {\n\n vec.clear();\n\n }\n\n }\n\n }\n\n }\n\n\n\n return Err(());\n\n}\n\n\n", "file_path": "src/primitives/functions.rs", "rank": 0, "score": 212687.45225000038 }, { "content": "pub fn try_resend_times<A, M>(addr: Addr<A>, message: M, times: u8) -> Result<(), ()>\n\nwhere\n\n A: 'static + Actor + Handler<M>,\n\n M: 'static + Message + std::marker::Send + Clone,\n\n <M as Message>::Result: std::marker::Send,\n\n <A as Actor>::Context: ToEnvelope<A, M>,\n\n{\n\n if times > 0 {\n\n match addr.try_send(message.clone()) {\n\n Ok(_) => Ok(()),\n\n Err(_) => {\n\n std::thread::sleep(std::time::Duration::from_millis(100));\n\n try_resend_times(addr, message, times - 1)\n\n }\n\n }\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "src/primitives/functions.rs", "rank": 1, "score": 164420.80424819211 }, { "content": "pub fn rpc_start<A: RPCBridgeActor>(rpc_socket: SocketAddr) -> Addr<RPCActor<A>> {\n\n // start rpc actor\n\n let rpc_addr = RPCActor::create(|ctx: &mut Context<RPCActor<A>>| {\n\n ctx.set_mailbox_capacity(100);\n\n RPCActor::load()\n\n });\n\n\n\n // listen RPC TCP socket\n\n let listener =\n\n TcpListener::bind(&rpc_socket).expect(&format!(\"RPC Socket bind: {} fail!\", rpc_socket));\n\n\n\n let new_rpc_addr = rpc_addr.clone();\n\n\n\n println!(\"DEBUG: RPC listen: {}\", rpc_socket);\n\n // start rpc session actor\n\n RPCListenActor::create(|ctx| {\n\n ctx.set_mailbox_capacity(100);\n\n ctx.add_message_stream(listener.incoming().map_err(|_| ()).map(|st| {\n\n let addr = st.peer_addr().unwrap();\n\n RPCTcpConnectMessage(st, addr)\n\n }));\n\n RPCListenActor {\n\n rpc_addr: new_rpc_addr,\n\n }\n\n });\n\n\n\n rpc_addr\n\n}\n", "file_path": "src/rpc/mod.rs", "rank": 2, "score": 115960.00607953055 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct Socket {\n\n ip: IpAddr,\n\n port: u16,\n\n}\n\n\n\nimpl Socket {\n\n fn parse(&self) -> SocketAddr {\n\n SocketAddr::new(self.ip, self.port)\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 3, "score": 112528.64186097923 }, { "content": "pub fn p2p_start<B: P2PBridgeActor>(\n\n p2p_socket: SocketAddr,\n\n psk: Option<PrivateKey>,\n\n) -> Addr<P2PActor<B>> {\n\n // bind to udp\n\n let sock =\n\n UdpSocket::bind(&p2p_socket).expect(&format!(\"P2P Socket bind: {} fail!\", p2p_socket));\n\n\n\n // start p2p session\n\n let (sink, stream) = UdpFramed::new(sock, BytesCodec::new()).split();\n\n let session_addr = P2PSessionActor::create(|ctx| {\n\n ctx.set_mailbox_capacity(100);\n\n ctx.add_stream(stream.map(|(data, sender)| CodecMessage(data, sender)));\n\n P2PSessionActor {\n\n sinks: vec![sink],\n\n p2p_addr: None,\n\n waitings: vec![],\n\n receivings: Default::default(),\n\n }\n\n });\n\n\n\n println!(\"DEBUG: P2P listen: {}\", p2p_socket);\n\n // start p2p actor\n\n P2PActor::create(|ctx| {\n\n ctx.set_mailbox_capacity(100);\n\n P2PActor::load(session_addr, psk)\n\n })\n\n}\n", "file_path": "src/p2p/mod.rs", "rank": 4, "score": 99384.08288715717 }, { "content": "pub fn network_start(\n\n p2p_socket: SocketAddr,\n\n rpc_socket: SocketAddr,\n\n psk: Option<PrivateKey>,\n\n) -> Addr<NetworkBridgeActor> {\n\n let p2p_addr = p2p_start::<NetworkBridgeActor>(p2p_socket, psk);\n\n let rpc_addr = rpc_start::<NetworkBridgeActor>(rpc_socket);\n\n\n\n NetworkBridgeActor::create(|ctx| {\n\n ctx.set_mailbox_capacity(100);\n\n NetworkBridgeActor::load(p2p_addr, rpc_addr)\n\n })\n\n}\n", "file_path": "src/lib.rs", "rank": 5, "score": 97014.35863694051 }, { "content": "pub trait P2PBridgeActor<R = Context<Self>>\n\nwhere\n\n Self: Clone\n\n + Actor<Context = R>\n\n + Handler<ReceiveEventMessage>\n\n + Handler<ReceivePeerJoinMessage>\n\n + Handler<ReceivePeerLeaveMessage>\n\n + Handler<ReceivePeerJoinResultMessage>,\n\n R: ActorContext\n\n + ToEnvelope<Self, ReceiveEventMessage>\n\n + ToEnvelope<Self, ReceivePeerJoinMessage>\n\n + ToEnvelope<Self, ReceivePeerLeaveMessage>\n\n + ToEnvelope<Self, ReceivePeerJoinResultMessage>,\n\n{\n\n}\n", "file_path": "src/traits/actor/p2p_bridge.rs", "rank": 6, "score": 95235.04753255587 }, { "content": "pub fn system_init() -> SystemRunner {\n\n System::new(\"Teatree\")\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 90820.41390926893 }, { "content": "pub fn system_run(runner: SystemRunner) {\n\n let _ = runner.run();\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 8, "score": 87136.39527992108 }, { "content": "pub fn get_default_storage_path() -> PathBuf {\n\n #[cfg(feature = \"dev\")]\n\n let mut path = PathBuf::from(\"./\");\n\n\n\n #[cfg(not(feature = \"dev\"))]\n\n let mut path = if dirs::home_dir().is_some() {\n\n dirs::home_dir().unwrap()\n\n } else {\n\n PathBuf::from(\"./\")\n\n };\n\n\n\n path.push(DEFAULT_STORAGE_DIR_NAME);\n\n path\n\n}\n", "file_path": "src/primitives/functions.rs", "rank": 9, "score": 85511.58416319192 }, { "content": "pub trait BridgeActor<R = Context<Self>>\n\nwhere\n\n Self: Actor<Context = R>\n\n + Handler<EventMessage>\n\n + Handler<PeerJoinMessage>\n\n + Handler<PeerJoinResultMessage>\n\n + Handler<PeerLeaveMessage>\n\n + Handler<LocalMessage>\n\n + Handler<UpperMessage>\n\n + Handler<LowerMessage>\n\n + Handler<LevelPermissionMessage>\n\n + Handler<LocalResponseMessage>\n\n + Handler<UpperResponseMessage>\n\n + Handler<LowerResponseMessage>\n\n + Handler<LevelPermissionResponseMessage>,\n\n\n\n R: ActorContext\n\n + ToEnvelope<Self, EventMessage>\n\n + ToEnvelope<Self, PeerJoinMessage>\n\n + ToEnvelope<Self, PeerJoinResultMessage>\n", "file_path": "src/traits/actor/bridge.rs", "rank": 10, "score": 83447.11504786211 }, { "content": "pub trait MultipleBridgeActor<R = Context<Self>>\n\nwhere\n\n Self: Actor<Context = R>\n\n + Handler<MultipleEventMessage>\n\n + Handler<MultiplePeerJoinMessage>\n\n + Handler<MultiplePeerJoinResultMessage>\n\n + Handler<MultiplePeerLeaveMessage>\n\n + Handler<MultipleLocalMessage>\n\n + Handler<MultipleUpperMessage>\n\n + Handler<MultipleLowerMessage>\n\n + Handler<MultipleLevelPermissionMessage>\n\n + Handler<MultipleLocalResponseMessage>\n\n + Handler<MultipleUpperResponseMessage>\n\n + Handler<MultipleLowerResponseMessage>\n\n + Handler<MultipleLevelPermissionResponseMessage>,\n\n\n\n R: ActorContext\n\n + ToEnvelope<Self, MultipleEventMessage>\n\n + ToEnvelope<Self, MultiplePeerJoinMessage>\n\n + ToEnvelope<Self, MultiplePeerJoinResultMessage>\n", "file_path": "src/traits/actor/multiple_bridge.rs", "rank": 11, "score": 80386.2324634202 }, { "content": "pub trait RPCBridgeActor<R = Context<Self>>\n\nwhere\n\n Self: Clone\n\n + Actor<Context = R>\n\n + Handler<ReceiveLocalMessage>\n\n + Handler<ReceiveUpperMessage>\n\n + Handler<ReceiveLowerMessage>\n\n + Handler<ReceiveLevelPermissionMessage>\n\n + Handler<ReceiveLocalResponseMessage>\n\n + Handler<ReceiveUpperResponseMessage>\n\n + Handler<ReceiveLowerResponseMessage>\n\n + Handler<ReceiveLevelPermissionResponseMessage>,\n\n R: ActorContext\n\n + ToEnvelope<Self, ReceiveLocalMessage>\n\n + ToEnvelope<Self, ReceiveUpperMessage>\n\n + ToEnvelope<Self, ReceiveLowerMessage>\n\n + ToEnvelope<Self, ReceiveLevelPermissionMessage>\n\n + ToEnvelope<Self, ReceiveLocalResponseMessage>\n\n + ToEnvelope<Self, ReceiveUpperResponseMessage>\n\n + ToEnvelope<Self, ReceiveLowerResponseMessage>\n\n + ToEnvelope<Self, ReceiveLevelPermissionResponseMessage>,\n\n{\n\n}\n", "file_path": "src/traits/actor/rpc_bridge.rs", "rank": 12, "score": 80386.2324634202 }, { "content": "pub trait Message: Clone + Send + Debug + Serialize + DeserializeOwned {}\n", "file_path": "src/traits/propose/message.rs", "rank": 13, "score": 72437.41774004124 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct PeerAddr {\n\n ip: IpAddr,\n\n port: u16,\n\n pk: String,\n\n}\n\n\n\nimpl PeerAddr {\n\n fn parse(&self) -> (NodeAddr, SocketAddr) {\n\n ((&self.pk).into(), SocketAddr::new(self.ip, self.port))\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ConfigureRow {\n\n current_group: String,\n\n upper_group: String,\n\n p2p_address: Socket,\n\n rpc_address: Socket,\n\n upper_address: Socket,\n\n lower_address: Socket,\n", "file_path": "src/config.rs", "rank": 14, "score": 54579.90051967978 }, { "content": "#[derive(Clone)]\n\nstruct MultipleRecipient {\n\n upper_group: GroupID,\n\n lower_groups: Vec<GroupID>,\n\n\n\n recipient_event: Recipient<EventMessage>,\n\n recipient_peer_join: Recipient<PeerJoinMessage>,\n\n recipient_peer_join_result: Recipient<PeerJoinResultMessage>,\n\n recipient_peer_leave: Recipient<PeerLeaveMessage>,\n\n\n\n recipient_local: Recipient<LocalMessage>,\n\n recipient_upper: Recipient<UpperMessage>,\n\n recipient_lower: Recipient<LowerMessage>,\n\n\n\n recipient_local_response: Recipient<LocalResponseMessage>,\n\n recipient_upper_response: Recipient<UpperResponseMessage>,\n\n recipient_lower_response: Recipient<LowerResponseMessage>,\n\n recipient_level_permission: Recipient<LevelPermissionMessage>,\n\n recipient_level_permission_response: Recipient<LevelPermissionResponseMessage>,\n\n}\n\n\n", "file_path": "src/network_bridge.rs", "rank": 15, "score": 53430.0870089254 }, { "content": "type Sha512 = Sha3_512;\n\n\n\n#[derive(Default, Clone, Serialize, Deserialize)]\n\npub struct H256 {\n\n value: [u8; 32],\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct H512 {\n\n value: [u8; 64],\n\n}\n\n\n\nimpl H256 {\n\n pub fn new(content: &[u8]) -> H256 {\n\n let mut h: H256 = Default::default();\n\n let mut hasher = Sha256::default();\n\n hasher.input(content);\n\n h.value.copy_from_slice(&hasher.result()[..]);\n\n h\n\n }\n", "file_path": "src/crypto/hash.rs", "rank": 16, "score": 52134.62534579518 }, { "content": "type Sha256 = Sha3_256;\n", "file_path": "src/crypto/hash.rs", "rank": 17, "score": 52134.62534579518 }, { "content": "fn load_file_string() -> Option<String> {\n\n let file_path = \"config.toml\";\n\n let mut file = match File::open(file_path) {\n\n Ok(f) => f,\n\n Err(_) => {\n\n return None;\n\n }\n\n };\n\n\n\n let mut str_val = String::new();\n\n match file.read_to_string(&mut str_val) {\n\n Ok(s) => s,\n\n Err(e) => panic!(\"Error Reading file: {}\", e),\n\n };\n\n Some(str_val)\n\n}\n", "file_path": "src/config.rs", "rank": 18, "score": 47168.04954613863 }, { "content": "type TreeNode = Option<Box<Node>>;\n\n\n\n#[derive(Serialize, Deserialize, Clone)]\n\npub(crate) struct Node {\n\n left: TreeNode,\n\n right: TreeNode,\n\n pk: PublicKey,\n\n distance: Distance,\n\n value: SocketAddr,\n\n}\n\n\n\nimpl Node {\n\n pub fn root(base: &PublicKey) -> Self {\n\n Node {\n\n left: None,\n\n right: None,\n\n pk: base.clone(),\n\n distance: Distance::distance(&base.to_bytes(), &base.to_bytes()),\n\n value: P2P_DEFAULT_SOCKET.parse().unwrap(),\n\n }\n", "file_path": "src/p2p/dht/binary_tree.rs", "rank": 19, "score": 45036.12040595422 }, { "content": "/// This is the interface of the group in the entire network,\n\n/// you can customize the implementation of these methods,\n\n/// you can set the permission or size of the group\n\npub trait Group<P: Peer> {\n\n type JoinType: Clone + Send + Default + Debug + Serialize + DeserializeOwned;\n\n\n\n /// id: it will return group's id\n\n fn id(&self) -> &GroupID;\n\n\n\n /// join: when peer join will call, happen before call consensus's peer_join,\n\n /// it has default implement if you want to handle it in consensus\n\n fn join(&mut self, _data: Self::JoinType, _peer_addr: PeerAddr) -> bool {\n\n true\n\n }\n\n\n\n /// leave: when peer leave will call, happen before call consensus's peer_leave,\n\n /// it has default implement if you want to handle it in consensus\n\n fn leave(&mut self, _pk: &PeerAddr) -> bool {\n\n true\n\n }\n\n\n\n /// verify: check peer is verified by group permission,\n\n /// it has default implement if you want to handle it in consensus\n", "file_path": "src/traits/propose/group.rs", "rank": 20, "score": 41915.04772935094 }, { "content": "pub trait Entity: Serialize + DeserializeOwned {\n\n type Key: AsRef<[u8]>;\n\n\n\n fn key(&self) -> Self::Key;\n\n}\n\n\n\n/// read message\n\n#[derive(Clone)]\n\npub struct EntityRead<E: 'static + Entity>(pub E::Key);\n\n\n\nimpl<E: 'static + Entity> Message for EntityRead<E> {\n\n type Result = Result<E, ()>;\n\n}\n\n\n\n/// write message\n\n#[derive(Clone)]\n\npub struct EntityWrite<E: Entity>(pub E);\n\n\n\nimpl<E: Entity> Message for EntityWrite<E> {\n\n type Result = ();\n\n}\n\n\n\n/// delete message\n\n#[derive(Clone)]\n\npub struct EntityDelete<E: 'static + Entity>(pub E::Key);\n\n\n\nimpl<E: 'static + Entity> Message for EntityDelete<E> {\n\n type Result = Result<E, ()>;\n\n}\n", "file_path": "src/storage/entity.rs", "rank": 21, "score": 41915.04772935094 }, { "content": "#[derive(Serialize, Deserialize, Clone)]\n\nstruct DHTTableStore(PublicKey, HashMap<GroupID, DHTTable>);\n\n\n\nimpl Entity for DHTTableStore {\n\n type Key = String;\n\n\n\n fn key(&self) -> Self::Key {\n\n format!(\"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl DHTTableStore {\n\n pub fn async_store(\n\n pk: PublicKey,\n\n table: HashMap<GroupID, DHTTable>,\n\n addr: &Addr<DiskStorageActor>,\n\n ) {\n\n let _ = try_resend_times(\n\n addr.clone(),\n\n EntityWrite(DHTTableStore(pk, table)),\n\n DEFAULT_TIMES,\n", "file_path": "src/p2p/p2p.rs", "rank": 22, "score": 41036.161826048 }, { "content": " pub GroupID,\n\n pub PeerAddr,\n\n pub PeerInfoByte,\n\n pub Option<SocketAddr>,\n\n);\n\n\n\nimpl Message for ReceivePeerJoinMessage {\n\n type Result = ();\n\n}\n\n\n\n/// peer join result when receive join request between p2p & bridge.\n\n/// Params is PeerAddr (p2p Node), bool (join ok or not), help some peer addr.\n\n#[derive(Clone)]\n\npub struct ReceivePeerJoinResultMessage(pub GroupID, pub PeerAddr, pub bool, pub Vec<PeerAddr>);\n\n\n\nimpl Message for ReceivePeerJoinResultMessage {\n\n type Result = ();\n\n}\n\n\n\n/// receive peer leave between p2p & bridge.\n", "file_path": "src/traits/message/p2p_message.rs", "rank": 23, "score": 41001.26852178357 }, { "content": "use std::net::SocketAddr;\n\n\n\nuse crate::actor::prelude::{Addr, Message};\n\nuse crate::primitives::types::{EventByte, GroupID, PeerAddr, PeerInfoByte};\n\n\n\nuse crate::traits::actor::P2PBridgeActor;\n\n\n\n/// receive event message between p2p & bridge.\n\n/// Params peerAddr, Event Byte.\n\n#[derive(Clone)]\n\npub struct ReceiveEventMessage(pub GroupID, pub PeerAddr, pub EventByte);\n\n\n\nimpl Message for ReceiveEventMessage {\n\n type Result = ();\n\n}\n\n\n\n/// receive peer join between p2p & bridge.\n\n/// Params is PeerAddr (p2p Node), Peer Join Info Byte.\n\n#[derive(Clone)]\n\npub struct ReceivePeerJoinMessage(\n", "file_path": "src/traits/message/p2p_message.rs", "rank": 24, "score": 41001.06669584842 }, { "content": "use std::net::SocketAddr;\n\n\n\nuse crate::actor::prelude::{Addr, Message};\n\nuse crate::primitives::types::{BlockByte, EventID, GroupID, LevelPermissionByte, RPCParams};\n\n\n\nuse crate::traits::actor::RPCBridgeActor;\n\n\n\n/// rpc request from local outside.\n\n/// Params is rpc_session_id, group_id, RPCParams, and socket_addr,\n\n/// if want to request other socket, use socket_addr, when receive is none.\n\n#[derive(Clone)]\n\npub struct ReceiveLocalMessage(pub GroupID, pub usize, pub RPCParams, pub SocketAddr);\n\n\n\nimpl Message for ReceiveLocalMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc response from local outside.\n\n/// Params is rpc_session_id, group_id, RPCParams, and socket_addr,\n\n#[derive(Clone)]\n", "file_path": "src/traits/message/rpc_message.rs", "rank": 25, "score": 41000.50505243188 }, { "content": "#[derive(Clone)]\n\npub struct PeerJoinMessage(\n\n pub GroupID,\n\n pub PeerAddr,\n\n pub PeerInfoByte,\n\n pub Option<SocketAddr>,\n\n);\n\n\n\nimpl Message for PeerJoinMessage {\n\n type Result = ();\n\n}\n\n\n\n/// peer join result when receive join request between p2p & bridge.\n\n/// Params is PeerAddr (p2p Node), bool (join ok or not), help some peer addr.\n\n#[derive(Clone)]\n\npub struct PeerJoinResultMessage(pub GroupID, pub PeerAddr, pub bool, pub Vec<PeerAddr>);\n\n\n\nimpl Message for PeerJoinResultMessage {\n\n type Result = ();\n\n}\n", "file_path": "src/traits/message/bridge_message.rs", "rank": 26, "score": 40999.71556364868 }, { "content": "}\n\n\n\n/// rpc level permission request.\n\n/// Params is LevelPermissionByte.\n\n#[derive(Clone)]\n\npub struct ReceiveLevelPermissionMessage(\n\n pub GroupID,\n\n pub usize,\n\n pub LevelPermissionByte,\n\n pub SocketAddr,\n\n);\n\n\n\nimpl Message for ReceiveLevelPermissionMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc level permission response.\n\n/// Params is LevelPermissionByte.\n\n#[derive(Clone)]\n\npub struct ReceiveLevelPermissionResponseMessage(pub GroupID, pub usize, pub bool);\n", "file_path": "src/traits/message/rpc_message.rs", "rank": 27, "score": 40998.69458382283 }, { "content": "impl Message for ReceiveUpperResponseMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc request from lower level group (send block get more security).\n\n/// Params is rpc_session_id, group_id, Block Byte.\n\n#[derive(Clone)]\n\npub struct ReceiveLowerMessage(pub GroupID, pub usize, pub BlockByte);\n\n\n\nimpl Message for ReceiveLowerMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc response from lower level group (send block get more security).\n\n/// Params is rpc_session_id, response\n\n#[derive(Clone)]\n\npub struct ReceiveLowerResponseMessage(pub GroupID, pub usize, pub Option<EventID>);\n\n\n\nimpl Message for ReceiveLowerResponseMessage {\n\n type Result = ();\n", "file_path": "src/traits/message/rpc_message.rs", "rank": 28, "score": 40997.974039739966 }, { "content": "\n\nimpl Message for LowerResponseMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc level permission request.\n\n/// Params is LevelPermissionByte.\n\n#[derive(Clone)]\n\npub struct LevelPermissionMessage(\n\n pub GroupID,\n\n pub usize,\n\n pub LevelPermissionByte,\n\n pub SocketAddr,\n\n);\n\n\n\nimpl Message for LevelPermissionMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc level permission response.\n", "file_path": "src/traits/message/bridge_message.rs", "rank": 29, "score": 40997.563838991344 }, { "content": "use std::net::SocketAddr;\n\n\n\nuse crate::actor::prelude::{Addr, Message};\n\nuse crate::primitives::types::{\n\n BlockByte, EventByte, EventID, GroupID, LevelPermissionByte, PeerAddr, PeerInfoByte, RPCParams,\n\n};\n\n\n\nuse crate::traits::actor::BridgeActor;\n\n\n\n/// event from p2p network self group.\n\n/// Params is PeerAddr (p2p Node), Event Byte.\n\n#[derive(Clone)]\n\npub struct EventMessage(pub GroupID, pub PeerAddr, pub EventByte);\n\n\n\nimpl Message for EventMessage {\n\n type Result = ();\n\n}\n\n\n\n/// peer join from p2p network.\n\n/// Params is PeerAddr (p2p Node), Peer Join Info Byte.\n", "file_path": "src/traits/message/bridge_message.rs", "rank": 30, "score": 40997.08054732757 }, { "content": "pub struct ReceiveLocalResponseMessage(pub GroupID, pub usize, pub Option<RPCParams>);\n\n\n\nimpl Message for ReceiveLocalResponseMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc request from upper level group (send block for subscribed).\n\n/// Params is rpc_session_id, group_id, Block Byte.\n\n#[derive(Clone)]\n\npub struct ReceiveUpperMessage(pub GroupID, pub usize, pub BlockByte);\n\n\n\nimpl Message for ReceiveUpperMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc response from upper level group (send block for subscribed).\n\n/// Params is session_id, response.\n\n#[derive(Clone)]\n\npub struct ReceiveUpperResponseMessage(pub GroupID, pub usize, pub Option<EventID>);\n\n\n", "file_path": "src/traits/message/rpc_message.rs", "rank": 31, "score": 40996.90412574843 }, { "content": "/// Params is LevelPermissionByte.\n\n#[derive(Clone)]\n\npub struct LevelPermissionResponseMessage(pub GroupID, pub usize, pub bool);\n\n\n\nimpl Message for LevelPermissionResponseMessage {\n\n type Result = ();\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct RegisterBridgeMessage<B: BridgeActor>(pub GroupID, pub GroupID, pub Addr<B>);\n\n\n\nimpl<B: BridgeActor> Message for RegisterBridgeMessage<B> {\n\n type Result = bool;\n\n}\n", "file_path": "src/traits/message/bridge_message.rs", "rank": 32, "score": 40993.707362131114 }, { "content": "/// rpc response from local outside or send to outsize.\n\n/// Params is RPCParams.\n\n#[derive(Clone)]\n\npub struct LocalResponseMessage(pub GroupID, pub usize, pub Option<RPCParams>);\n\n\n\nimpl Message for LocalResponseMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc request from upper level group (send block for subscribed).\n\n/// Params is rpc session_id, Block Byte.\n\n#[derive(Clone)]\n\npub struct UpperMessage(pub GroupID, pub usize, pub BlockByte);\n\n\n\nimpl Message for UpperMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc request from upper level group (send block for subscribed).\n\n/// Params is EventID.\n", "file_path": "src/traits/message/bridge_message.rs", "rank": 33, "score": 40993.3662951742 }, { "content": "#[derive(Clone)]\n\npub struct UpperResponseMessage(pub GroupID, pub usize, pub Option<EventID>);\n\n\n\nimpl Message for UpperResponseMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc request from lower level group (send block get more security).\n\n/// Params is rpc session_id, Block Byte.\n\n#[derive(Clone)]\n\npub struct LowerMessage(pub GroupID, pub usize, pub BlockByte);\n\n\n\nimpl Message for LowerMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc request from lower level group (send block get more security).\n\n/// Params is EventID.\n\n#[derive(Clone)]\n\npub struct LowerResponseMessage(pub GroupID, pub usize, pub Option<EventID>);\n", "file_path": "src/traits/message/bridge_message.rs", "rank": 34, "score": 40993.302226824024 }, { "content": "\n\nimpl Message for ReceiveLevelPermissionResponseMessage {\n\n type Result = ();\n\n}\n\n\n\n/// when rpc bridge actor start, need register addr to rpc actor\n\n#[derive(Clone)]\n\npub struct RPCBridgeAddrMessage<B: RPCBridgeActor>(pub Addr<B>);\n\n\n\nimpl<B: RPCBridgeActor> Message for RPCBridgeAddrMessage<B> {\n\n type Result = ();\n\n}\n", "file_path": "src/traits/message/rpc_message.rs", "rank": 35, "score": 40993.2568394154 }, { "content": "/// Params is PeerAddr (p2p Node), bool if is true, lost by all peers,\n\n/// if false, only first lost by self lost.\n\n#[derive(Clone)]\n\npub struct ReceivePeerLeaveMessage(pub GroupID, pub PeerAddr, pub bool);\n\n\n\nimpl Message for ReceivePeerLeaveMessage {\n\n type Result = ();\n\n}\n\n\n\n/// when p2p bridge actor start, need register addr to p2p actor\n\n#[derive(Clone)]\n\npub struct P2PBridgeAddrMessage<B: P2PBridgeActor>(pub Addr<B>);\n\n\n\nimpl<B: P2PBridgeActor> Message for P2PBridgeAddrMessage<B> {\n\n type Result = ();\n\n}\n", "file_path": "src/traits/message/p2p_message.rs", "rank": 36, "score": 40993.094081555515 }, { "content": "\n\n/// peer leave from p2p network.\n\n/// Params is PeerAddr (p2p Node), bool if is true, lost by all peers,\n\n/// if false, only first lost by self lost.\n\n#[derive(Clone)]\n\npub struct PeerLeaveMessage(pub GroupID, pub PeerAddr, pub bool);\n\n\n\nimpl Message for PeerLeaveMessage {\n\n type Result = ();\n\n}\n\n\n\n/// rpc request from local outside, or send actor.\n\n/// Params is SoocketAddr, RPCParams.\n\n#[derive(Clone)]\n\npub struct LocalMessage(pub GroupID, pub usize, pub RPCParams, pub SocketAddr);\n\n\n\nimpl Message for LocalMessage {\n\n type Result = ();\n\n}\n\n\n", "file_path": "src/traits/message/bridge_message.rs", "rank": 37, "score": 40992.770083635834 }, { "content": "", "file_path": "src/traits/message/inner_storage_message.rs", "rank": 38, "score": 40123.10691538236 }, { "content": "pub type BlockByte = Vec<u8>;\n\npub type EventByte = Vec<u8>;\n\npub type PeerInfoByte = Vec<u8>;\n\npub type LevelPermissionByte = Vec<u8>;\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct App<'a> {\n\n id: AppID,\n\n symbol: &'a str,\n\n owner: PublicKey,\n\n}\n\n\n\nimpl<'a> App<'a> {\n\n pub fn new(symbol: &'a str, owner: PublicKey) -> Self {\n\n let mut data = Vec::new();\n\n data.extend(bincode::serialize(&symbol).unwrap());\n\n data.extend(bincode::serialize(&owner).unwrap());\n\n let id = AppID::new(&data[..]);\n\n\n\n App { id, symbol, owner }\n", "file_path": "src/primitives/types.rs", "rank": 39, "score": 35143.14505787121 }, { "content": " pub fn to_string(&self) -> String {\n\n let mut string = String::new();\n\n for i in &self.0 {\n\n string.push_str(match i {\n\n true => \"1\",\n\n false => \"0\",\n\n });\n\n }\n\n string\n\n }\n\n\n\n pub fn get_same_prefix(&self) -> Binary {\n\n let mut vec: Vec<bool> = Vec::new();\n\n let first_prefix: bool = self[0];\n\n for i in &self.0 {\n\n if i == &first_prefix {\n\n vec.push(first_prefix)\n\n } else {\n\n break;\n\n }\n", "file_path": "src/primitives/types.rs", "rank": 40, "score": 35136.016225133055 }, { "content": " }\n\n Binary(vec)\n\n }\n\n\n\n pub fn range(&self, start: usize, end: usize) -> Binary {\n\n let true_end = if self.len() < end { self.len() } else { end };\n\n\n\n let mut vec: Vec<bool> = Vec::new();\n\n for i in start..true_end {\n\n vec.push(self[i])\n\n }\n\n Binary(vec)\n\n }\n\n\n\n pub fn xor(&self, other: &Binary) -> Binary {\n\n let mut xor_: Vec<bool> = Vec::new();\n\n\n\n for i in 0..self.len() {\n\n xor_.push(self[i] ^ other[i])\n\n }\n", "file_path": "src/primitives/types.rs", "rank": 41, "score": 35133.44010096981 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize, Hash, Eq)]\n\npub struct Binary(Vec<bool>);\n\n\n\nimpl Binary {\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n\n\n pub fn new(vec: &Vec<bool>) -> Binary {\n\n Binary(vec.clone())\n\n }\n\n\n\n pub fn max(len: usize) -> Binary {\n\n let vec = vec![true; len];\n\n Binary(vec)\n\n }\n\n\n", "file_path": "src/primitives/types.rs", "rank": 42, "score": 35133.237778439914 }, { "content": "use jsonrpc_parse::Params;\n\nuse serde_derive::{Deserialize, Serialize};\n\nuse std::cmp::Ordering;\n\nuse std::ops::Index;\n\n\n\nuse crate::crypto::hash::H256;\n\n//use crate::crypto::hash::H512;\n\nuse crate::crypto::keypair::PublicKey;\n\n\n\npub const GROUP_ID_LENGTH: usize = 32;\n\npub type GroupID = H256;\n\n\n\npub const EVENT_ID_LENGTH: usize = 32;\n\npub type EventID = H256;\n\n\n\npub const APP_ID_LENGTH: usize = 32;\n\npub type AppID = H256;\n\n\n\npub type PeerAddr = PublicKey;\n\npub type RPCParams = Params;\n", "file_path": "src/primitives/types.rs", "rank": 43, "score": 35130.65562369427 }, { "content": " fn partial_cmp(&self, other: &Binary) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl PartialEq for Binary {\n\n fn eq(&self, other: &Binary) -> bool {\n\n self.0 == other.0\n\n }\n\n}\n\n\n\nimpl Default for Binary {\n\n fn default() -> Binary {\n\n let vec = vec![true; 8];\n\n Binary(vec)\n\n }\n\n}\n", "file_path": "src/primitives/types.rs", "rank": 44, "score": 35129.99734765781 }, { "content": "\n\n Binary::new(&xor_)\n\n }\n\n}\n\n\n\nimpl Index<usize> for Binary {\n\n type Output = bool;\n\n\n\n fn index(&self, index: usize) -> &bool {\n\n &self.0[index]\n\n }\n\n}\n\n\n\nimpl Ord for Binary {\n\n fn cmp(&self, other: &Binary) -> Ordering {\n\n self.0.cmp(&other.0)\n\n }\n\n}\n\n\n\nimpl PartialOrd for Binary {\n", "file_path": "src/primitives/types.rs", "rank": 45, "score": 35126.07753867363 }, { "content": "/// This is the interface of the Event in the entire network,\n\n/// Event id's data structure is defined by teatree,\n\n/// Events are the basic unit of flow in the network\n\npub trait Event: Clone + Send + Debug + Eq + Ord + Serialize + DeserializeOwned {\n\n /// get the event id, defined in teatree\n\n fn id(&self) -> &EventID;\n\n}\n", "file_path": "src/traits/propose/event.rs", "rank": 46, "score": 34434.237331207885 }, { "content": "pub trait Peer: Default + Clone + Debug + Sync + Send + Serialize + DeserializeOwned {\n\n type PrivateKey: Clone\n\n + Debug\n\n + Eq\n\n + Ord\n\n + Sync\n\n + Send\n\n + Serialize\n\n + DeserializeOwned\n\n + Display\n\n + From<String>;\n\n type PublicKey: Hash\n\n + Default\n\n + Clone\n\n + Debug\n\n + Eq\n\n + Ord\n\n + Sync\n\n + Send\n\n + Serialize\n", "file_path": "src/traits/propose/peer.rs", "rank": 47, "score": 34434.237331207885 }, { "content": "pub mod bridge_message;\n\npub mod inner_storage_message;\n\npub mod p2p_message;\n\npub mod rpc_message;\n", "file_path": "src/traits/message/mod.rs", "rank": 48, "score": 33667.398275589956 }, { "content": "use serde::de::DeserializeOwned;\n\nuse serde::ser::Serialize;\n\nuse std::fmt::Debug;\n\n\n", "file_path": "src/traits/propose/message.rs", "rank": 49, "score": 33663.30047257686 }, { "content": "\n\nimpl<A: RPCBridgeActor> Handler<ReceiveLevelPermissionMessage> for RPCActor<A> {\n\n type Result = ();\n\n\n\n fn handle(\n\n &mut self,\n\n msg: ReceiveLevelPermissionMessage,\n\n ctx: &mut Self::Context,\n\n ) -> Self::Result {\n\n let (group, permission_bytes, socket_addr) = (msg.0, msg.2, msg.3);\n\n let id = rand::thread_rng().gen::<usize>();\n\n let request = Request::Permission(group, permission_bytes);\n\n self.waitings.insert(id, request);\n\n let rpc_addr = ctx.address();\n\n\n\n create_session(id, socket_addr, rpc_addr);\n\n }\n\n}\n\n\n\nimpl<A: RPCBridgeActor> Handler<ReceiveLocalResponseMessage> for RPCActor<A> {\n", "file_path": "src/rpc/rpc.rs", "rank": 52, "score": 28.25883382996583 }, { "content": " head.update_signature(self.psk.sign_bytes(&hash_content.to_vec()));\n\n P2PMessage(head, content, socket)\n\n }\n\n\n\n /// Timed task, include: Heart Beat and NAT holepunching\n\n fn hb(&self, ctx: &mut Context<Self>) {\n\n ctx.run_later(Duration::new(5, 0), |act, ctx| {\n\n let mut send_peer_leave: Vec<ReceivePeerLeaveMessage> = vec![];\n\n let mut send_heartbeat: Vec<(GroupID, PublicKey, SocketAddr, P2PContent)> = vec![];\n\n for (group, table) in act.tables.iter_mut() {\n\n let (mut next, mut dis) = table.next_hb_peers();\n\n for _ in 0..dis.len() {\n\n if let Some(id) = dis.pop() {\n\n table.remove_peer(&id);\n\n send_peer_leave.push(ReceivePeerLeaveMessage(group.clone(), id, false));\n\n }\n\n }\n\n\n\n while !next.is_empty() {\n\n let (pk, socket) = next.pop().unwrap();\n", "file_path": "src/p2p/p2p.rs", "rank": 53, "score": 28.11967368076835 }, { "content": " self.waitings.insert(id, request);\n\n let rpc_addr = ctx.address();\n\n\n\n create_session(id, socket_addr, rpc_addr);\n\n }\n\n}\n\n\n\nimpl<A: RPCBridgeActor> Handler<ReceiveUpperMessage> for RPCActor<A> {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ReceiveUpperMessage, ctx: &mut Self::Context) -> Self::Result {\n\n let (group, block_bytes) = (msg.0, msg.2);\n\n if let Some(socket_addr) = self.upper_sockets.get(&group) {\n\n let id = rand::thread_rng().gen::<usize>();\n\n let request = Request::Upper(group, block_bytes);\n\n self.waitings.insert(id, request);\n\n let rpc_addr = ctx.address();\n\n\n\n create_session(id, socket_addr.clone(), rpc_addr);\n\n }\n", "file_path": "src/rpc/rpc.rs", "rank": 54, "score": 27.88376914108014 }, { "content": " }\n\n}\n\n\n\nimpl<A: RPCBridgeActor> Handler<ReceiveLowerMessage> for RPCActor<A> {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ReceiveLowerMessage, ctx: &mut Self::Context) -> Self::Result {\n\n let (group, block_bytes) = (msg.0, msg.2);\n\n if let Some(socket_addrs) = self.lower_sockets.get(&group) {\n\n for socket_addr in socket_addrs.iter() {\n\n let id = rand::thread_rng().gen::<usize>();\n\n let request = Request::Lower(group.clone(), block_bytes.clone());\n\n self.waitings.insert(id, request);\n\n let rpc_addr = ctx.address();\n\n\n\n create_session(id, socket_addr.clone(), rpc_addr);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/rpc/rpc.rs", "rank": 55, "score": 27.77227602285876 }, { "content": " self.sessions.remove(&index);\n\n self.waitings.remove(&index);\n\n }\n\n}\n\n\n\nimpl<A: RPCBridgeActor> Handler<RPCBridgeAddrMessage<A>> for RPCActor<A> {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: RPCBridgeAddrMessage<A>, _ctx: &mut Self::Context) -> Self::Result {\n\n self.bridge = Some(msg.0);\n\n }\n\n}\n\n\n\nimpl<A: RPCBridgeActor> Handler<ReceiveLocalMessage> for RPCActor<A> {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ReceiveLocalMessage, ctx: &mut Self::Context) -> Self::Result {\n\n let (group, params, socket_addr) = (msg.0, msg.2, msg.3);\n\n let id = rand::thread_rng().gen::<usize>();\n\n let request = Request::Local(group, params);\n", "file_path": "src/rpc/rpc.rs", "rank": 57, "score": 26.343706867373854 }, { "content": "impl<A: RPCBridgeActor> RPCBridgeActor for RPCActor<A> {}\n\n\n\n/// when receive request from session, send to bridge.\n\nimpl<A: RPCBridgeActor> Handler<RequestMessage> for RPCActor<A> {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: RequestMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n let (index, request, socket_addr) = (msg.0, msg.1, msg.2);\n\n if self.sessions.get(&index).is_some() {\n\n match request {\n\n Request::Local(group, params) => {\n\n self.send_bridge(ReceiveLocalMessage(group, index, params, socket_addr))\n\n }\n\n Request::Lower(group, block_bytes) => {\n\n self.send_bridge(ReceiveLowerMessage(group, index, block_bytes))\n\n }\n\n Request::Upper(group, block_bytes) => {\n\n self.send_bridge(ReceiveUpperMessage(group, index, block_bytes))\n\n }\n\n Request::Permission(group, permission_bytes) => self.send_bridge(\n", "file_path": "src/rpc/rpc.rs", "rank": 59, "score": 26.273149898395957 }, { "content": " ReceiveLevelPermissionMessage(group, index, permission_bytes, socket_addr),\n\n ),\n\n _ => {}\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// when receive response from session, send to bridge.\n\nimpl<A: RPCBridgeActor> Handler<ResponseMessage> for RPCActor<A> {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ResponseMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n let (index, response) = (msg.0, msg.1);\n\n if self.sessions.get(&index).is_some() {\n\n match response {\n\n Response::Local(group, params) => {\n\n self.send_bridge(ReceiveLocalResponseMessage(group, index, Some(params)))\n\n }\n\n Response::Lower(group, event_id) => {\n", "file_path": "src/rpc/rpc.rs", "rank": 60, "score": 26.213674474720364 }, { "content": "\n\n fn handle(&mut self, msg: SessionOpenMessage<A>, _ctx: &mut Self::Context) -> Self::Result {\n\n let (index, addr) = (msg.0, msg.1);\n\n self.sessions.insert(index, addr);\n\n if self.waitings.contains_key(&index) {\n\n let request = self.waitings.remove(&index).unwrap();\n\n self.send_session(\n\n index,\n\n RequestMessage(index, request, \"0.0.0.0:0\".parse().unwrap()), // use default sock because dono use\n\n );\n\n }\n\n }\n\n}\n\n\n\n/// when session close, delete it.\n\nimpl<A: RPCBridgeActor> Handler<SessionCloseMessage> for RPCActor<A> {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: SessionCloseMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n let index = msg.0;\n", "file_path": "src/rpc/rpc.rs", "rank": 62, "score": 24.65249085693002 }, { "content": "impl Handler<LevelPermissionMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: LevelPermissionMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n self.send_rpc(ReceiveLevelPermissionMessage(msg.0, msg.1, msg.2, msg.3));\n\n }\n\n}\n\n\n\nimpl Handler<LocalResponseMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: LocalResponseMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n self.send_rpc(ReceiveLocalResponseMessage(msg.0, msg.1, msg.2));\n\n }\n\n}\n\n\n\nimpl Handler<UpperResponseMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: UpperResponseMessage, _ctx: &mut Self::Context) -> Self::Result {\n", "file_path": "src/network_bridge.rs", "rank": 63, "score": 24.553588206944255 }, { "content": " .get_mut(&peer_addr)\n\n .unwrap()\n\n .3\n\n .push(message);\n\n } else if let Some(socket) = table.get_socket_addr(&peer_addr) {\n\n self.send_session(self.new_p2p_message(\n\n group,\n\n peer_addr,\n\n socket,\n\n P2PContent::Event(event),\n\n ));\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<A: P2PBridgeActor> Handler<ReceivePeerJoinMessage> for P2PActor<A> {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ReceivePeerJoinMessage, _ctx: &mut Self::Context) -> Self::Result {\n", "file_path": "src/p2p/p2p.rs", "rank": 64, "score": 24.260434487923927 }, { "content": "}\n\n\n\n/// receive peer join result from bridge actor, and send to p2p\n\nimpl Handler<PeerJoinResultMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: PeerJoinResultMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n self.send_p2p(ReceivePeerJoinResultMessage(msg.0, msg.1, msg.2, msg.3));\n\n }\n\n}\n\n\n\n/// receive peer leave message from bridge actor, and send to p2p\n\nimpl Handler<PeerLeaveMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: PeerLeaveMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n self.send_p2p(ReceivePeerLeaveMessage(msg.0, msg.1, msg.2));\n\n }\n\n}\n\n\n", "file_path": "src/network_bridge.rs", "rank": 65, "score": 23.8915560445546 }, { "content": " self.send_rpc(ReceiveUpperResponseMessage(msg.0, msg.1, msg.2));\n\n }\n\n}\n\n\n\nimpl Handler<LowerResponseMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: LowerResponseMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n self.send_rpc(ReceiveLowerResponseMessage(msg.0, msg.1, msg.2));\n\n }\n\n}\n\n\n\nimpl Handler<LevelPermissionResponseMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(\n\n &mut self,\n\n msg: LevelPermissionResponseMessage,\n\n _ctx: &mut Self::Context,\n\n ) -> Self::Result {\n", "file_path": "src/network_bridge.rs", "rank": 66, "score": 23.87117706216784 }, { "content": " _ctx: &mut Self::Context,\n\n ) -> Self::Result {\n\n self.bridges.get(&msg.0).and_then(|group| {\n\n Some(\n\n group\n\n .recipient_peer_join_result\n\n .do_send(PeerJoinResultMessage(msg.0, msg.1, msg.2, msg.3)),\n\n )\n\n });\n\n }\n\n}\n\n\n\n/// receive peer leave message from p2p actor, and send to bridge\n\nimpl Handler<ReceivePeerLeaveMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ReceivePeerLeaveMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n self.bridges.get(&msg.0).and_then(|group| {\n\n Some(\n\n group\n", "file_path": "src/network_bridge.rs", "rank": 68, "score": 23.28374459006952 }, { "content": "\n\n/// receive send to upper rpc request from bridge actor, and send to rpc\n\nimpl Handler<UpperMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: UpperMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n self.send_rpc(ReceiveUpperMessage(msg.0, msg.1, msg.2));\n\n }\n\n}\n\n\n\n/// receive send to lower rpc request from bridge actor, and send to rpc\n\nimpl Handler<LowerMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: LowerMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n self.send_rpc(ReceiveLowerMessage(msg.0, msg.1, msg.2));\n\n }\n\n}\n\n\n\n/// receive send to lower rpc request from bridge actor, and send to rpc\n", "file_path": "src/network_bridge.rs", "rank": 69, "score": 23.283315974236952 }, { "content": " }\n\n\n\n pub fn to_bytes(&self) -> [u8; 32] {\n\n self.value.clone()\n\n }\n\n}\n\n\n\nimpl H512 {\n\n pub fn new(content: &[u8]) -> H512 {\n\n let mut h: H512 = Default::default();\n\n let mut hasher = Sha512::default();\n\n hasher.input(content);\n\n h.value.copy_from_slice(&hasher.result()[..]);\n\n h\n\n }\n\n\n\n pub fn to_vec(&self) -> Vec<u8> {\n\n self.value.to_vec()\n\n }\n\n\n", "file_path": "src/crypto/hash.rs", "rank": 70, "score": 23.06065250602513 }, { "content": " self.send_rpc(ReceiveLevelPermissionResponseMessage(msg.0, msg.1, msg.2));\n\n }\n\n}\n\n\n\n/// receive event message from bridge actor, and send to p2p\n\nimpl Handler<EventMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: EventMessage, _ctx: &mut Self::Context) {\n\n self.send_p2p(ReceiveEventMessage(msg.0, msg.1, msg.2));\n\n }\n\n}\n\n\n\n/// receive peer join message from bridge actor, and send to p2p\n\nimpl Handler<PeerJoinMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: PeerJoinMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n self.send_p2p(ReceivePeerJoinMessage(msg.0, msg.1, msg.2, msg.3));\n\n }\n", "file_path": "src/network_bridge.rs", "rank": 71, "score": 22.945561409535873 }, { "content": " self.hb(ctx);\n\n }\n\n}\n\n\n\nimpl<A: P2PBridgeActor> Handler<P2PBridgeAddrMessage<A>> for P2PActor<A> {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: P2PBridgeAddrMessage<A>, _ctx: &mut Self::Context) -> Self::Result {\n\n self.bridge = Some(msg.0);\n\n // load saved tables\n\n }\n\n}\n\n\n\nimpl<A: P2PBridgeActor> P2PBridgeActor for P2PActor<A> {}\n\n\n\nimpl<A: P2PBridgeActor> Handler<ReceiveEventMessage> for P2PActor<A> {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ReceiveEventMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n let (group, peer_addr, event) = (msg.0, msg.1, msg.2);\n", "file_path": "src/p2p/p2p.rs", "rank": 72, "score": 22.87526767557024 }, { "content": "\n\n fn handle(\n\n &mut self,\n\n msg: ReceiveLowerResponseMessage,\n\n _ctx: &mut Self::Context,\n\n ) -> Self::Result {\n\n let (group, index, result) = (msg.0, msg.1, msg.2);\n\n self.send_session(\n\n index,\n\n ResponseMessage(0usize, Response::Lower(group, result)),\n\n );\n\n }\n\n}\n\n\n\nimpl<A: RPCBridgeActor> Handler<ReceiveUpperResponseMessage> for RPCActor<A> {\n\n type Result = ();\n\n\n\n fn handle(\n\n &mut self,\n\n msg: ReceiveUpperResponseMessage,\n", "file_path": "src/rpc/rpc.rs", "rank": 73, "score": 22.720088415426904 }, { "content": " _ctx: &mut Self::Context,\n\n ) -> Self::Result {\n\n let (group, index, result) = (msg.0, msg.1, msg.2);\n\n self.send_session(\n\n index,\n\n ResponseMessage(0usize, Response::Upper(group, result)),\n\n );\n\n }\n\n}\n\n\n\nimpl<A: RPCBridgeActor> Handler<ReceiveLevelPermissionResponseMessage> for RPCActor<A> {\n\n type Result = ();\n\n\n\n fn handle(\n\n &mut self,\n\n msg: ReceiveLevelPermissionResponseMessage,\n\n _ctx: &mut Self::Context,\n\n ) -> Self::Result {\n\n let (group, index, result) = (msg.0, msg.1, msg.2);\n\n self.send_session(\n\n index,\n\n ResponseMessage(0usize, Response::Permission(group, result)),\n\n );\n\n }\n\n}\n", "file_path": "src/rpc/rpc.rs", "rank": 74, "score": 22.572923668851338 }, { "content": "impl<A: P2PBridgeActor> Handler<ReceivePeerJoinResultMessage> for P2PActor<A> {\n\n type Result = ();\n\n\n\n fn handle(\n\n &mut self,\n\n msg: ReceivePeerJoinResultMessage,\n\n _ctx: &mut Self::Context,\n\n ) -> Self::Result {\n\n let (group, peer_addr, result, helps) = (msg.0, msg.1, msg.2, msg.3);\n\n println!(\"DEBUG: peer join: {}\", result);\n\n let need_store = if let Some(table) = self.tables.get_mut(&group) {\n\n let mut need_store = false;\n\n if let Some(socket) = table.get_socket_addr(&peer_addr) {\n\n if result {\n\n if table.fixed_peer(&peer_addr) {\n\n need_store = true;\n\n }\n\n\n\n let dht = helps\n\n .iter()\n", "file_path": "src/p2p/p2p.rs", "rank": 75, "score": 22.46757159018121 }, { "content": "\n\nimpl Handler<ReceiveLowerResponseMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(\n\n &mut self,\n\n msg: ReceiveLowerResponseMessage,\n\n _ctx: &mut Self::Context,\n\n ) -> Self::Result {\n\n self.bridges.get(&msg.0).and_then(|group| {\n\n Some(\n\n group\n\n .recipient_lower_response\n\n .do_send(LowerResponseMessage(msg.0, msg.1, msg.2)),\n\n )\n\n });\n\n }\n\n}\n\n\n\nimpl Handler<ReceiveLevelPermissionResponseMessage> for NetworkBridgeActor {\n", "file_path": "src/network_bridge.rs", "rank": 76, "score": 22.385579733732207 }, { "content": " type Result = ();\n\n\n\n fn handle(&mut self, msg: ReceivePeerJoinMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n self.bridges.get(&msg.0).and_then(|group| {\n\n Some(\n\n group\n\n .recipient_peer_join\n\n .do_send(PeerJoinMessage(msg.0, msg.1, msg.2, msg.3)),\n\n )\n\n });\n\n }\n\n}\n\n\n\n/// receive peer join result from bridge actor, and send to p2p\n\nimpl Handler<ReceivePeerJoinResultMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(\n\n &mut self,\n\n msg: ReceivePeerJoinResultMessage,\n", "file_path": "src/network_bridge.rs", "rank": 77, "score": 22.363635963063615 }, { "content": " }\n\n}\n\n\n\nimpl Handler<ReceiveUpperResponseMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(\n\n &mut self,\n\n msg: ReceiveUpperResponseMessage,\n\n _ctx: &mut Self::Context,\n\n ) -> Self::Result {\n\n self.bridges.get(&msg.0).and_then(|group| {\n\n Some(\n\n group\n\n .recipient_upper_response\n\n .do_send(UpperResponseMessage(msg.0, msg.1, msg.2)),\n\n )\n\n });\n\n }\n\n}\n", "file_path": "src/network_bridge.rs", "rank": 78, "score": 22.198120371222508 }, { "content": " type Result = ();\n\n\n\n fn handle(\n\n &mut self,\n\n msg: ReceiveLocalResponseMessage,\n\n _ctx: &mut Self::Context,\n\n ) -> Self::Result {\n\n let (group, index, result) = (msg.0, msg.1, msg.2);\n\n let response = if result.is_some() {\n\n Response::Local(group, result.unwrap())\n\n } else {\n\n Response::Invalid\n\n };\n\n\n\n self.send_session(index, ResponseMessage(0usize, response));\n\n }\n\n}\n\n\n\nimpl<A: RPCBridgeActor> Handler<ReceiveLowerResponseMessage> for RPCActor<A> {\n\n type Result = ();\n", "file_path": "src/rpc/rpc.rs", "rank": 79, "score": 21.860033499299384 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Handler<ReceiveLevelPermissionMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(\n\n &mut self,\n\n msg: ReceiveLevelPermissionMessage,\n\n _ctx: &mut Self::Context,\n\n ) -> Self::Result {\n\n if self.bridges.contains_key(&msg.0) {\n\n let _ = self\n\n .bridges\n\n .get(&msg.0)\n\n .unwrap()\n\n .recipient_level_permission\n\n .do_send(LevelPermissionMessage(msg.0, msg.1, msg.2, msg.3));\n\n } else {\n", "file_path": "src/network_bridge.rs", "rank": 80, "score": 21.61949413489166 }, { "content": " self.send_rpc(ReceiveLevelPermissionResponseMessage(msg.0, msg.1, false));\n\n }\n\n }\n\n}\n\n\n\nimpl Handler<ReceiveLocalResponseMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(\n\n &mut self,\n\n msg: ReceiveLocalResponseMessage,\n\n _ctx: &mut Self::Context,\n\n ) -> Self::Result {\n\n self.bridges.get(&msg.0).and_then(|group| {\n\n Some(\n\n group\n\n .recipient_local_response\n\n .do_send(LocalResponseMessage(msg.0, msg.1, msg.2)),\n\n )\n\n });\n", "file_path": "src/network_bridge.rs", "rank": 81, "score": 21.550420563755644 }, { "content": "\n\n pub fn to_vec(&self) -> Vec<u8> {\n\n self.value.to_vec()\n\n }\n\n\n\n pub fn from_vec(data: Vec<u8>) -> Result<Self, ()> {\n\n if data.len() != 32 {\n\n return Err(());\n\n }\n\n\n\n let mut value: [u8; 32] = [0; 32];\n\n value.copy_from_slice(&data[..]);\n\n\n\n Ok(Self { value })\n\n }\n\n\n\n pub fn to_string(&self) -> String {\n\n format!(\"{}\", self)\n\n }\n\n\n", "file_path": "src/crypto/hash.rs", "rank": 82, "score": 21.40524585838136 }, { "content": "/// impl RPCBridgeActor for NetworkBridgeActor {}\n\nimpl P2PBridgeActor for NetworkBridgeActor {}\n\n\n\n/// receive event message from p2p actor, and send to bridge\n\nimpl Handler<ReceiveEventMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ReceiveEventMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n self.bridges.get(&msg.0).and_then(|group| {\n\n Some(\n\n group\n\n .recipient_event\n\n .do_send(EventMessage(msg.0, msg.1, msg.2)),\n\n )\n\n });\n\n }\n\n}\n\n\n\n/// receive peer join message from p2p actor, and send to bridge\n\nimpl Handler<ReceivePeerJoinMessage> for NetworkBridgeActor {\n", "file_path": "src/network_bridge.rs", "rank": 83, "score": 21.405072967155355 }, { "content": " .recipient_peer_leave\n\n .do_send(PeerLeaveMessage(msg.0, msg.1, msg.2)),\n\n )\n\n });\n\n }\n\n}\n\n\n\n/// impl RPCBridgeActor for NetworkBridgeActor\n\nimpl RPCBridgeActor for NetworkBridgeActor {}\n\n\n\n/// receive local rpc request from bridge actor, and send to rpc\n\nimpl Handler<ReceiveLocalMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ReceiveLocalMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n if self.bridges.contains_key(&msg.0) {\n\n let _ = self\n\n .bridges\n\n .get(&msg.0)\n\n .unwrap()\n", "file_path": "src/network_bridge.rs", "rank": 85, "score": 21.051841623404222 }, { "content": " pub fn from_vec(data: Vec<u8>) -> Result<Self, ()> {\n\n if data.len() != 64 {\n\n return Err(());\n\n }\n\n\n\n let mut value = [0u8; 64];\n\n value.copy_from_slice(&data[..]);\n\n Ok(Self { value })\n\n }\n\n\n\n pub fn to_string(&self) -> String {\n\n format!(\"{}\", self)\n\n }\n\n\n\n pub fn from_string(s: &String) -> Result<Self, ()> {\n\n let string = s[2..].to_string();\n\n\n\n if string.len() != 128 {\n\n return Err(());\n\n }\n", "file_path": "src/crypto/hash.rs", "rank": 86, "score": 20.851636322109538 }, { "content": "}\n\n\n\npub(crate) struct RPCTcpConnectMessage(pub TcpStream, pub SocketAddr);\n\n\n\nimpl Message for RPCTcpConnectMessage {\n\n type Result = ();\n\n}\n\n\n\nimpl<A: RPCBridgeActor> Handler<RPCTcpConnectMessage> for RPCListenActor<A> {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: RPCTcpConnectMessage, _: &mut Context<Self>) {\n\n let rpc_addr = self.rpc_addr.clone();\n\n RPCSessionActor::create(move |ctx| {\n\n let id = rand::thread_rng().gen::<usize>();\n\n let (r, w) = msg.0.split();\n\n let read_frame = FramedRead::new(r, HTTPCodec::new());\n\n RPCSessionActor::add_stream(read_frame, ctx);\n\n let mut write_frame = FramedWrite::new(w, HTTPCodec::new(), ctx);\n\n write_frame.set_buffer_capacity(LOW_WATERMARK, HIGH_WATERMARK);\n", "file_path": "src/rpc/listen.rs", "rank": 88, "score": 20.52874204975413 }, { "content": " return Err(());\n\n }\n\n\n\n let mut value: [u8; 32] = [0; 32];\n\n\n\n for i in 0..(string.len() / 2) {\n\n let res = u8::from_str_radix(&string[2 * i..2 * i + 2], 16).unwrap();\n\n value[i] = res;\n\n }\n\n Ok(Self { value })\n\n }\n\n\n\n pub fn from_bytes(bytes: &[u8]) -> Result<Self, ()> {\n\n if bytes.len() != 32 {\n\n return Err(());\n\n }\n\n\n\n let mut value: [u8; 32] = Default::default();\n\n value.copy_from_slice(bytes);\n\n Ok(Self { value })\n", "file_path": "src/crypto/hash.rs", "rank": 89, "score": 20.4800916949723 }, { "content": " } else {\n\n self.send_rpc(ReceiveLevelPermissionResponseMessage(msg.0, msg.1, false));\n\n }\n\n }\n\n}\n\n\n\n/// receive send to lower rpc request from bridge actor, and send to rpc\n\nimpl Handler<ReceiveLowerMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ReceiveLowerMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n if self.bridges.contains_key(&msg.0) {\n\n let _ = self\n\n .bridges\n\n .get(&msg.0)\n\n .unwrap()\n\n .recipient_lower\n\n .do_send(LowerMessage(msg.0, msg.1, msg.2));\n\n } else {\n\n self.send_rpc(ReceiveLevelPermissionResponseMessage(msg.0, msg.1, false));\n", "file_path": "src/network_bridge.rs", "rank": 90, "score": 20.27877963114164 }, { "content": " .recipient_local\n\n .do_send(LocalMessage(msg.0, msg.1, msg.2, msg.3));\n\n } else {\n\n self.send_rpc(ReceiveLevelPermissionResponseMessage(msg.0, msg.1, false));\n\n }\n\n }\n\n}\n\n\n\n/// receive send to upper rpc request from bridge actor, and send to rpc\n\nimpl Handler<ReceiveUpperMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ReceiveUpperMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n if self.bridges.contains_key(&msg.0) {\n\n let _ = self\n\n .bridges\n\n .get(&msg.0)\n\n .unwrap()\n\n .recipient_upper\n\n .do_send(UpperMessage(msg.0, msg.1, msg.2));\n", "file_path": "src/network_bridge.rs", "rank": 91, "score": 20.153898464644534 }, { "content": "}\n\n\n\nimpl Actor for DiskStorageActor {\n\n type Context = Context<Self>;\n\n\n\n fn started(&mut self, _ctx: &mut Self::Context) {\n\n println!(\"DEBUG: Storage is started!\");\n\n }\n\n}\n\n\n\n/// handle read message\n\nimpl<E: Entity> Handler<EntityRead<E>> for DiskStorageActor {\n\n type Result = Result<E, ()>;\n\n\n\n fn handle(&mut self, msg: EntityRead<E>, _ctx: &mut Self::Context) -> Self::Result {\n\n self.db.read_entity::<E>(msg.0)\n\n }\n\n}\n\n\n\n/// handle write message\n", "file_path": "src/storage/disk_storage.rs", "rank": 92, "score": 20.139238873920885 }, { "content": " recipient_level_permission_response: addr.recipient::<LevelPermissionResponseMessage>(),\n\n };\n\n\n\n if self.bridges.contains_key(&group_id) {\n\n false\n\n } else {\n\n self.bridges.insert(group_id, group);\n\n true\n\n }\n\n }\n\n}\n\n\n\n/// receive local rpc request from bridge actor, and send to rpc\n\nimpl Handler<LocalMessage> for NetworkBridgeActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: LocalMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n self.send_rpc(ReceiveLocalMessage(msg.0, msg.1, msg.2, msg.3));\n\n }\n\n}\n", "file_path": "src/network_bridge.rs", "rank": 93, "score": 19.87903288288039 }, { "content": " P2PContent::Leave,\n\n ));\n\n }\n\n }\n\n need_store\n\n } else {\n\n false\n\n };\n\n\n\n if need_store {\n\n DHTTableStore::async_store(self.pk.clone(), self.tables.clone(), &self.storage);\n\n }\n\n }\n\n}\n\n\n\nimpl<A: P2PBridgeActor> Handler<ReceivePeerLeaveMessage> for P2PActor<A> {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ReceivePeerLeaveMessage, _ctx: &mut Self::Context) -> Self::Result {\n\n let (group, peer_addr, is_force) = (msg.0, msg.1, msg.2);\n", "file_path": "src/p2p/p2p.rs", "rank": 94, "score": 19.72829998343576 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\nuse std::cmp::Ordering;\n\n\n\n/// DHT Distance use 160 bit (20bytes)\n\n#[derive(Debug, Clone, Serialize, Deserialize, Eq)]\n\npub(crate) struct Distance([u8; 20]);\n\n\n\nimpl Distance {\n\n pub fn distance(from: &[u8], base: &[u8]) -> Self {\n\n let from_len = from.len();\n\n let base_len = base.len();\n\n\n\n let (len, mut hold) = if from_len >= 20 && base_len >= 20 {\n\n (0, [0; 20])\n\n } else if from_len > base_len {\n\n let mut hold = [0; 20];\n\n hold[0..base_len].copy_from_slice(&from[base_len..]);\n\n (base_len, hold)\n\n } else {\n\n let mut hold = [0; 20];\n", "file_path": "src/p2p/dht/distance.rs", "rank": 95, "score": 19.612838735650968 }, { "content": " fn generate() -> (Self::PublicKey, Self::PrivateKey);\n\n\n\n fn pk(&self) -> &Self::PublicKey;\n\n\n\n fn sign(psk: &Self::PrivateKey, data: &Vec<u8>) -> Self::Signature;\n\n\n\n fn verify(pk: &Self::PublicKey, data: &Vec<u8>, signature: &Self::Signature) -> bool;\n\n\n\n fn binary(&self) -> Binary {\n\n let self_bytes = bincode::serialize(self.pk()).unwrap();\n\n let mut vec: Vec<bool> = Vec::new();\n\n for i in 0..self_bytes.len() {\n\n let str_a = format!(\"{:>08b}\", self_bytes[i]);\n\n for i in str_a.chars() {\n\n vec.push(match i {\n\n '1' => true,\n\n _ => false,\n\n });\n\n }\n\n }\n", "file_path": "src/traits/propose/peer.rs", "rank": 96, "score": 19.581754820065555 }, { "content": "impl<E: Entity> Handler<EntityWrite<E>> for DiskStorageActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: EntityWrite<E>, _ctx: &mut Self::Context) -> Self::Result {\n\n self.db.write_entity(msg.0)\n\n }\n\n}\n\n\n\n/// handle delete message\n\nimpl<E: Entity> Handler<EntityDelete<E>> for DiskStorageActor {\n\n type Result = Result<E, ()>;\n\n\n\n fn handle(&mut self, msg: EntityDelete<E>, _ctx: &mut Self::Context) -> Self::Result {\n\n self.db.delete_entity(msg.0)\n\n }\n\n}\n", "file_path": "src/storage/disk_storage.rs", "rank": 97, "score": 19.339059038620803 }, { "content": " }\n\n}\n\n\n\n/// impl Actor for NetworkBridgeActor\n\nimpl Actor for NetworkBridgeActor {\n\n type Context = Context<Self>;\n\n\n\n /// when start register to p2p and rpc actor\n\n fn started(&mut self, ctx: &mut Self::Context) {\n\n self.send_p2p(P2PBridgeAddrMessage(ctx.address()));\n\n self.send_rpc(RPCBridgeAddrMessage(ctx.address()));\n\n }\n\n}\n\n\n\n/// impl BridgeActor for NetworkBridgeActor\n\nimpl BridgeActor for NetworkBridgeActor {}\n\n\n\n/// receive local rpc request from bridge actor, and send to rpc\n\nimpl<B: BridgeActor> Handler<RegisterBridgeMessage<B>> for NetworkBridgeActor {\n\n type Result = bool;\n", "file_path": "src/network_bridge.rs", "rank": 98, "score": 19.334357881246525 }, { "content": " type Result = ();\n\n\n\n fn handle(\n\n &mut self,\n\n msg: ReceiveLevelPermissionResponseMessage,\n\n _ctx: &mut Self::Context,\n\n ) -> Self::Result {\n\n self.bridges.get(&msg.0).and_then(|group| {\n\n Some(\n\n group\n\n .recipient_level_permission_response\n\n .do_send(LevelPermissionResponseMessage(msg.0, msg.1, msg.2)),\n\n )\n\n });\n\n }\n\n}\n", "file_path": "src/network_bridge.rs", "rank": 99, "score": 19.303973883334606 } ]
Rust
src/connector/mysql/conversion.rs
kyle-mccarthy/quaint
12b6d22014f4e1218e32760a91b7985205e02453
use crate::{ ast::Value, connector::{queryable::TakeRow, TypeIdentifier}, error::{Error, ErrorKind}, }; #[cfg(feature = "chrono")] use chrono::{DateTime, Datelike, NaiveDate, NaiveDateTime, NaiveTime, Timelike, Utc}; use mysql_async::{ self as my, consts::{ColumnFlags, ColumnType}, }; use std::convert::TryFrom; #[tracing::instrument(skip(params))] pub fn conv_params<'a>(params: &[Value<'a>]) -> crate::Result<my::Params> { if params.is_empty() { Ok(my::Params::Empty) } else { let mut values = Vec::with_capacity(params.len()); for pv in params { let res = match pv { Value::Integer(i) => i.map(my::Value::Int), Value::Float(f) => f.map(my::Value::Float), Value::Double(f) => f.map(my::Value::Double), Value::Text(s) => s.clone().map(|s| my::Value::Bytes((&*s).as_bytes().to_vec())), Value::Bytes(bytes) => bytes.clone().map(|bytes| my::Value::Bytes(bytes.into_owned())), Value::Enum(s) => s.clone().map(|s| my::Value::Bytes((&*s).as_bytes().to_vec())), Value::Boolean(b) => b.map(|b| my::Value::Int(b as i64)), Value::Char(c) => c.map(|c| my::Value::Bytes(vec![c as u8])), Value::Xml(s) => s.as_ref().map(|s| my::Value::Bytes((s).as_bytes().to_vec())), Value::Array(_) => { let msg = "Arrays are not supported in MySQL."; let kind = ErrorKind::conversion(msg); let mut builder = Error::builder(kind); builder.set_original_message(msg); return Err(builder.build()); } #[cfg(feature = "bigdecimal")] Value::Numeric(f) => f.as_ref().map(|f| my::Value::Bytes(f.to_string().as_bytes().to_vec())), #[cfg(feature = "json")] Value::Json(s) => match s { Some(ref s) => { let json = serde_json::to_string(s)?; let bytes = json.into_bytes(); Some(my::Value::Bytes(bytes)) } None => None, }, #[cfg(feature = "uuid")] Value::Uuid(u) => u.map(|u| my::Value::Bytes(u.to_hyphenated().to_string().into_bytes())), #[cfg(feature = "chrono")] Value::Date(d) => { d.map(|d| my::Value::Date(d.year() as u16, d.month() as u8, d.day() as u8, 0, 0, 0, 0)) } #[cfg(feature = "chrono")] Value::Time(t) => { t.map(|t| my::Value::Time(false, 0, t.hour() as u8, t.minute() as u8, t.second() as u8, 0)) } #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| { my::Value::Date( dt.year() as u16, dt.month() as u8, dt.day() as u8, dt.hour() as u8, dt.minute() as u8, dt.second() as u8, dt.timestamp_subsec_micros(), ) }), }; match res { Some(val) => values.push(val), None => values.push(my::Value::NULL), } } Ok(my::Params::Positional(values)) } } impl TypeIdentifier for my::Column { fn is_real(&self) -> bool { use ColumnType::*; matches!(self.column_type(), MYSQL_TYPE_DECIMAL | MYSQL_TYPE_NEWDECIMAL) } fn is_float(&self) -> bool { use ColumnType::*; matches!(self.column_type(), MYSQL_TYPE_FLOAT) } fn is_double(&self) -> bool { use ColumnType::*; matches!(self.column_type(), MYSQL_TYPE_DOUBLE) } fn is_integer(&self) -> bool { use ColumnType::*; matches!( self.column_type(), MYSQL_TYPE_TINY | MYSQL_TYPE_SHORT | MYSQL_TYPE_LONG | MYSQL_TYPE_LONGLONG | MYSQL_TYPE_YEAR | MYSQL_TYPE_INT24 ) } fn is_datetime(&self) -> bool { use ColumnType::*; matches!( self.column_type(), MYSQL_TYPE_TIMESTAMP | MYSQL_TYPE_DATETIME | MYSQL_TYPE_TIMESTAMP2 | MYSQL_TYPE_DATETIME2 ) } fn is_time(&self) -> bool { use ColumnType::*; matches!(self.column_type(), MYSQL_TYPE_TIME | MYSQL_TYPE_TIME2) } fn is_date(&self) -> bool { use ColumnType::*; matches!(self.column_type(), MYSQL_TYPE_DATE | MYSQL_TYPE_NEWDATE) } fn is_text(&self) -> bool { use ColumnType::*; let is_defined_text = matches!( self.column_type(), MYSQL_TYPE_VARCHAR | MYSQL_TYPE_VAR_STRING | MYSQL_TYPE_STRING ); let is_bytes_but_text = matches!( self.column_type(), MYSQL_TYPE_TINY_BLOB | MYSQL_TYPE_MEDIUM_BLOB | MYSQL_TYPE_LONG_BLOB | MYSQL_TYPE_BLOB ) && self.character_set() != 63; is_defined_text || is_bytes_but_text } fn is_bytes(&self) -> bool { use ColumnType::*; let is_a_blob = matches!( self.column_type(), MYSQL_TYPE_TINY_BLOB | MYSQL_TYPE_MEDIUM_BLOB | MYSQL_TYPE_LONG_BLOB | MYSQL_TYPE_BLOB ) && self.character_set() == 63; let is_bits = self.column_type() == MYSQL_TYPE_BIT && self.column_length() > 1; is_a_blob || is_bits } fn is_bool(&self) -> bool { self.column_type() == ColumnType::MYSQL_TYPE_BIT && self.column_length() == 1 } fn is_json(&self) -> bool { self.column_type() == ColumnType::MYSQL_TYPE_JSON } fn is_enum(&self) -> bool { self.flags() == ColumnFlags::ENUM_FLAG || self.column_type() == ColumnType::MYSQL_TYPE_ENUM } fn is_null(&self) -> bool { self.column_type() == ColumnType::MYSQL_TYPE_NULL } } impl TakeRow for my::Row { fn take_result_row(&mut self) -> crate::Result<Vec<Value<'static>>> { fn convert(row: &mut my::Row, i: usize) -> crate::Result<Value<'static>> { let value = row.take(i).ok_or_else(|| { let msg = "Index out of bounds"; let kind = ErrorKind::conversion(msg); Error::builder(kind).build() })?; let column = row.columns_ref().get(i).ok_or_else(|| { let msg = "Index out of bounds"; let kind = ErrorKind::conversion(msg); Error::builder(kind).build() })?; let res = match value { #[cfg(feature = "json")] my::Value::Bytes(b) if column.is_json() => { serde_json::from_slice(&b).map(Value::json).map_err(|_| { let msg = "Unable to convert bytes to JSON"; let kind = ErrorKind::conversion(msg); Error::builder(kind).build() })? } my::Value::Bytes(b) if column.is_enum() => { let s = String::from_utf8(b)?; Value::enum_variant(s) } #[cfg(feature = "bigdecimal")] my::Value::Bytes(b) if column.is_real() => { let s = String::from_utf8(b).map_err(|_| { let msg = "Could not convert NEWDECIMAL from bytes to String."; let kind = ErrorKind::conversion(msg); Error::builder(kind).build() })?; let dec = s.parse().map_err(|_| { let msg = "Could not convert NEWDECIMAL string to a BigDecimal."; let kind = ErrorKind::conversion(msg); Error::builder(kind).build() })?; Value::numeric(dec) } my::Value::Bytes(b) if column.is_bool() => match b.as_slice() { [0] => Value::boolean(false), _ => Value::boolean(true), }, my::Value::Bytes(b) if column.character_set() == 63 => Value::bytes(b), my::Value::Bytes(s) => Value::text(String::from_utf8(s)?), my::Value::Int(i) => Value::integer(i), my::Value::UInt(i) => Value::integer(i64::try_from(i).map_err(|_| { let msg = "Unsigned integers larger than 9_223_372_036_854_775_807 are currently not handled."; let kind = ErrorKind::value_out_of_range(msg); Error::builder(kind).build() })?), my::Value::Float(f) => Value::from(f), my::Value::Double(f) => Value::from(f), #[cfg(feature = "chrono")] my::Value::Date(year, month, day, hour, min, sec, micro) => { if day == 0 || month == 0 { let msg = format!( "The column `{}` contained an invalid datetime value with either day or month set to zero.", column.name_str() ); let kind = ErrorKind::value_out_of_range(msg); return Err(Error::builder(kind).build()); } let time = NaiveTime::from_hms_micro(hour.into(), min.into(), sec.into(), micro); let date = NaiveDate::from_ymd(year.into(), month.into(), day.into()); let dt = NaiveDateTime::new(date, time); Value::datetime(DateTime::<Utc>::from_utc(dt, Utc)) } #[cfg(feature = "chrono")] my::Value::Time(is_neg, days, hours, minutes, seconds, micros) => { if is_neg { let kind = ErrorKind::conversion("Failed to convert a negative time"); return Err(Error::builder(kind).build()); } if days != 0 { let kind = ErrorKind::conversion("Failed to read a MySQL `time` as duration"); return Err(Error::builder(kind).build()); } let time = NaiveTime::from_hms_micro(hours.into(), minutes.into(), seconds.into(), micros); Value::time(time) } my::Value::NULL => match column { t if t.is_bool() => Value::Boolean(None), t if t.is_enum() => Value::Enum(None), t if t.is_null() => Value::Integer(None), t if t.is_integer() => Value::Integer(None), t if t.is_float() => Value::Float(None), t if t.is_double() => Value::Double(None), t if t.is_text() => Value::Text(None), t if t.is_bytes() => Value::Bytes(None), #[cfg(feature = "bigdecimal")] t if t.is_real() => Value::Numeric(None), #[cfg(feature = "chrono")] t if t.is_datetime() => Value::DateTime(None), #[cfg(feature = "chrono")] t if t.is_time() => Value::Time(None), #[cfg(feature = "chrono")] t if t.is_date() => Value::Date(None), #[cfg(feature = "json")] t if t.is_json() => Value::Json(None), typ => { let msg = format!( "Value of type {:?} is not supported with the current configuration", typ ); let kind = ErrorKind::conversion(msg); return Err(Error::builder(kind).build()); } }, #[cfg(not(feature = "chrono"))] typ => { let msg = format!( "Value of type {:?} is not supported with the current configuration", typ ); let kind = ErrorKind::conversion(msg); Err(Error::builder(kind).build())? } }; Ok(res) } let mut row = Vec::with_capacity(self.len()); for i in 0..self.len() { row.push(convert(self, i)?); } Ok(row) } }
use crate::{ ast::Value, connector::{queryable::TakeRow, TypeIdentifier}, error::{Error, ErrorKind}, }; #[cfg(feature = "chrono")] use chrono::{DateTime, Datelike, NaiveDate, NaiveDateTime, NaiveTime, Timelike, Utc}; use mysql_async::{ self as my, consts::{ColumnFlags, ColumnType}, }; use std::convert::TryFrom; #[tracing::instrument(skip(params))] pub fn conv_params<'a>(params: &[Value<'a>]) -> crate::Result<my::Params> { if params.is_empty() { Ok(my::Params::Empty) } else { let mut values = Vec::with_capacity(params.len()); for pv in params { let res = match pv { Value::Integer(i) => i.map(my::Value::Int), Value::Float(f) => f.map(my::Value::Float), Value::Double(f) => f.map(my::Value::Double), Value::Text(s) => s.clone().map(|s| my::Value::Bytes((&*s).as_bytes().to_vec())), Value::Bytes(bytes) => bytes.clone().map(|bytes| my::Value::Bytes(bytes.into_owned())), Value::Enum(s) => s.clone().map(|s| my::Value::Bytes((&*s).as_bytes().to_vec())), Value::Boolean(b) => b.map(|b| my::Value::Int(b as i64)), Value::Char(c) => c.map(|c| my::Value::Bytes(vec![c as u8])), Value::Xml(s) => s.as_ref().map(|s| my::Value::Bytes((s).as_bytes().to_vec())), Value::Array(_) => { let msg = "Arrays are not supported in MySQL."; let kind = ErrorKind::conversion(msg); let mut builder = Error::builder(kind); builder.set_original_message(msg); return Err(builder.build()); } #[cfg(feature = "bigdecimal")] Value::Numeric(f) => f.as_ref().map(|f| my::Value::Bytes(f.to_string().as_bytes().to_vec())), #[cfg(feature = "json")] Value::Json(s) => match s { Some(ref s) => { let json = serde_json::to_string(s)?; let bytes = json.into_bytes(); Some(my::Value::Bytes(bytes)) } None => None, }, #[cfg(feature = "uuid")] Value::Uuid(u) => u.map(|u| my::Value::Bytes(u.to_hyphenated().to_string().into_bytes())), #[cfg(feature = "chrono")] Value::Date(d) => { d.map(|d| my::Value::Date(d.year() as u16, d.month() as u8, d.day() as u8, 0, 0, 0, 0)) } #[cfg(feature = "chrono")] Value::Time(t) => { t.map(|t| my::Value::Time(false, 0, t.hour() as u8, t.minute() as u8, t.second() as u8, 0)) } #[cfg(feature = "chrono")] Value::DateTime(dt) => dt.map(|dt| { my::Value::Date( dt.year() as u16, dt.month() as u8, dt.day() as u8, dt.hour() as u8, dt.minute() as u8, dt.second() as u8, dt.timestamp_subsec_micros(), ) }), }; match res { Some(val) => values.push(val), None => values.push(my::Value::NULL), } } Ok(my::Params::Positional(values)) } } impl TypeIdentifier for my::Column { fn is_real(&self) -> bool { use ColumnType::*; matches!(self.column_type(), MYSQL_TYPE_DECIMAL | MYSQL_TYPE_NEWDECIMAL) } fn is_float(&self) -> bool { use ColumnType::*; matches!(self.column_type(), MYSQL_TYPE_FLOAT) } fn is_double(&self) -> bool { use ColumnType::*; matches!(self.column_type(), MYSQL_TYPE_DOUBLE) } fn is_integer(&self) -> bool { use ColumnType::*; matches!( self.column_type(), MYSQL_TYPE_TINY | MYSQL_TYPE_SHORT | MYSQL_TYPE_LONG | MYSQL_TYPE_LONGLONG | MYSQL_TYPE_YEAR | MYSQL_TYPE_INT24 ) } fn is_datetime(&self) -> bool { use ColumnType::*; matches!( self.column_type(), MYSQL_TYPE_TIMESTAMP | MYSQL_TYPE_DATETIME | MYSQL_TYPE_TIMESTAMP2 | MYSQL_TYPE_DATETIME2 ) } fn is_time(&self) -> bool { use ColumnType::*; matches!(self.column_type(), MYSQL_TYPE_TIME | MYSQL_TYPE_TIME2) } fn is_date(&self) -> bool { use ColumnType::*; matches!(self.column_type(), MYSQL_TYPE_DATE | MYSQL_TYPE_NEWDATE) } fn is_text(&self) -> bool { use ColumnType::*; let is_defined_text = matches!( self.column_type(), MYSQL_TYPE_VARCHAR | MYSQL_TYPE_VAR_STRING | MYSQL_TYPE_STRING ); let is_bytes_but_text = matches!( self.column_type(), MYSQL_TYPE_TINY_BLOB | MYSQL_TYPE_MEDIUM_BLOB | MYSQL_TYPE_LONG_BLOB | MYSQL_TYPE_BLOB ) && self.character_set() != 63; is_defined_text || is_bytes_but_text } fn is_bytes(&self) -> bool { use ColumnType::*; let is_a_blob = matches!( self.column_type(), MYSQL_TYPE_TINY_BLOB | MYSQL_TYPE_MEDIUM_BLOB | MYSQL_TYPE_LONG_BLOB | MYSQL_TYPE_BLOB ) && self.character_set() == 63; let is_bits = self.column_type() == MYSQL_TYPE_BIT && self.column_length() > 1; is_a_blob || is_bits } fn is_bool(&self) -> bool { self.column_type() == ColumnType::MYSQL_TYPE_BIT && self.column_length() == 1 } fn is_json(&self) -> bool { self.column_type() == ColumnType::MYSQL_TYPE_JSON } fn is_enum(&self) -> bool { self.flags() == ColumnFlags::ENUM_FLAG || self.column_type() == ColumnType::MYSQL_TYPE_ENUM } fn is_null(&self) -> bool { self.column_type() == ColumnType::MYSQL_TYPE_NULL } } impl TakeRow for my::Row { fn take_result_row(&mut self) -> crate::Result<Vec<Value<'static>>> {
let mut row = Vec::with_capacity(self.len()); for i in 0..self.len() { row.push(convert(self, i)?); } Ok(row) } }
fn convert(row: &mut my::Row, i: usize) -> crate::Result<Value<'static>> { let value = row.take(i).ok_or_else(|| { let msg = "Index out of bounds"; let kind = ErrorKind::conversion(msg); Error::builder(kind).build() })?; let column = row.columns_ref().get(i).ok_or_else(|| { let msg = "Index out of bounds"; let kind = ErrorKind::conversion(msg); Error::builder(kind).build() })?; let res = match value { #[cfg(feature = "json")] my::Value::Bytes(b) if column.is_json() => { serde_json::from_slice(&b).map(Value::json).map_err(|_| { let msg = "Unable to convert bytes to JSON"; let kind = ErrorKind::conversion(msg); Error::builder(kind).build() })? } my::Value::Bytes(b) if column.is_enum() => { let s = String::from_utf8(b)?; Value::enum_variant(s) } #[cfg(feature = "bigdecimal")] my::Value::Bytes(b) if column.is_real() => { let s = String::from_utf8(b).map_err(|_| { let msg = "Could not convert NEWDECIMAL from bytes to String."; let kind = ErrorKind::conversion(msg); Error::builder(kind).build() })?; let dec = s.parse().map_err(|_| { let msg = "Could not convert NEWDECIMAL string to a BigDecimal."; let kind = ErrorKind::conversion(msg); Error::builder(kind).build() })?; Value::numeric(dec) } my::Value::Bytes(b) if column.is_bool() => match b.as_slice() { [0] => Value::boolean(false), _ => Value::boolean(true), }, my::Value::Bytes(b) if column.character_set() == 63 => Value::bytes(b), my::Value::Bytes(s) => Value::text(String::from_utf8(s)?), my::Value::Int(i) => Value::integer(i), my::Value::UInt(i) => Value::integer(i64::try_from(i).map_err(|_| { let msg = "Unsigned integers larger than 9_223_372_036_854_775_807 are currently not handled."; let kind = ErrorKind::value_out_of_range(msg); Error::builder(kind).build() })?), my::Value::Float(f) => Value::from(f), my::Value::Double(f) => Value::from(f), #[cfg(feature = "chrono")] my::Value::Date(year, month, day, hour, min, sec, micro) => { if day == 0 || month == 0 { let msg = format!( "The column `{}` contained an invalid datetime value with either day or month set to zero.", column.name_str() ); let kind = ErrorKind::value_out_of_range(msg); return Err(Error::builder(kind).build()); } let time = NaiveTime::from_hms_micro(hour.into(), min.into(), sec.into(), micro); let date = NaiveDate::from_ymd(year.into(), month.into(), day.into()); let dt = NaiveDateTime::new(date, time); Value::datetime(DateTime::<Utc>::from_utc(dt, Utc)) } #[cfg(feature = "chrono")] my::Value::Time(is_neg, days, hours, minutes, seconds, micros) => { if is_neg { let kind = ErrorKind::conversion("Failed to convert a negative time"); return Err(Error::builder(kind).build()); } if days != 0 { let kind = ErrorKind::conversion("Failed to read a MySQL `time` as duration"); return Err(Error::builder(kind).build()); } let time = NaiveTime::from_hms_micro(hours.into(), minutes.into(), seconds.into(), micros); Value::time(time) } my::Value::NULL => match column { t if t.is_bool() => Value::Boolean(None), t if t.is_enum() => Value::Enum(None), t if t.is_null() => Value::Integer(None), t if t.is_integer() => Value::Integer(None), t if t.is_float() => Value::Float(None), t if t.is_double() => Value::Double(None), t if t.is_text() => Value::Text(None), t if t.is_bytes() => Value::Bytes(None), #[cfg(feature = "bigdecimal")] t if t.is_real() => Value::Numeric(None), #[cfg(feature = "chrono")] t if t.is_datetime() => Value::DateTime(None), #[cfg(feature = "chrono")] t if t.is_time() => Value::Time(None), #[cfg(feature = "chrono")] t if t.is_date() => Value::Date(None), #[cfg(feature = "json")] t if t.is_json() => Value::Json(None), typ => { let msg = format!( "Value of type {:?} is not supported with the current configuration", typ ); let kind = ErrorKind::conversion(msg); return Err(Error::builder(kind).build()); } }, #[cfg(not(feature = "chrono"))] typ => { let msg = format!( "Value of type {:?} is not supported with the current configuration", typ ); let kind = ErrorKind::conversion(msg); Err(Error::builder(kind).build())? } }; Ok(res) }
function_block-full_function
[ { "content": "#[tracing::instrument(skip(params))]\n\npub fn conv_params<'a>(params: &'a [Value<'a>]) -> crate::Result<Vec<&'a dyn ToSql>> {\n\n let mut converted = Vec::with_capacity(params.len());\n\n\n\n for param in params.iter() {\n\n converted.push(param as &dyn ToSql)\n\n }\n\n\n\n Ok(converted)\n\n}\n\n\n\nimpl<'a> ToSql for Value<'a> {\n\n fn to_sql(&self) -> ColumnData<'_> {\n\n match self {\n\n Value::Integer(val) => val.to_sql(),\n\n Value::Float(val) => val.to_sql(),\n\n Value::Double(val) => val.to_sql(),\n\n Value::Text(val) => val.to_sql(),\n\n Value::Bytes(val) => val.to_sql(),\n\n Value::Enum(val) => val.to_sql(),\n\n Value::Boolean(val) => val.to_sql(),\n", "file_path": "src/connector/mssql/conversion.rs", "rank": 1, "score": 248688.32721732827 }, { "content": "#[cfg(feature = \"json\")]\n\nfn value_into_json(value: &Value) -> Option<serde_json::Value> {\n\n match value.clone() {\n\n // MariaDB returns JSON as text\n\n Value::Text(Some(text)) => {\n\n let json: serde_json::Value =\n\n serde_json::from_str(&text).expect(format!(\"expected parsable text to json, found {}\", text).as_str());\n\n\n\n Some(json)\n\n }\n\n Value::Json(Some(json)) => Some(json),\n\n _ => None,\n\n }\n\n}\n\n\n\n#[cfg(all(feature = \"json\", feature = \"mysql\"))]\n\n#[test_each_connector(tags(\"mysql\"))]\n\nasync fn json_extract_path_fun(api: &mut dyn TestApi) -> crate::Result<()> {\n\n let table = api.create_table(&format!(\"{}, obj json\", api.autogen_id(\"id\"))).await?;\n\n\n\n let insert = Insert::single_into(&table).value(\"obj\", serde_json::json!({ \"a\": { \"b\": \"c\" } }));\n", "file_path": "src/tests/query.rs", "rank": 2, "score": 214970.63304924985 }, { "content": "#[tracing::instrument(skip(params))]\n\npub fn conv_params<'a>(params: &'a [Value<'a>]) -> Vec<&'a (dyn types::ToSql + Sync)> {\n\n params.iter().map(|x| x as &(dyn ToSql + Sync)).collect::<Vec<_>>()\n\n}\n\n\n", "file_path": "src/connector/postgres/conversion.rs", "rank": 3, "score": 201321.12909720122 }, { "content": "/// A quick alias to create a default value expression.\n\npub fn default_value() -> Expression<'static> {\n\n Expression {\n\n kind: ExpressionKind::Default,\n\n alias: None,\n\n }\n\n}\n\n\n\nexpression!(Row, Row);\n\n\n\nimpl<'a> From<Function<'a>> for Expression<'a> {\n\n fn from(f: Function<'a>) -> Self {\n\n Expression {\n\n kind: ExpressionKind::Function(Box::new(f)),\n\n alias: None,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> From<Raw<'a>> for Expression<'a> {\n\n fn from(r: Raw<'a>) -> Self {\n", "file_path": "src/ast/expression.rs", "rank": 4, "score": 194614.66417770076 }, { "content": "#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"postgresql\")))]\n\n#[cfg(all(feature = \"json\", feature = \"postgresql\"))]\n\npub fn row_to_json<'a, T>(expr: T, pretty_print: bool) -> Function<'a>\n\nwhere\n\n T: Into<Table<'a>>,\n\n{\n\n let fun = RowToJson {\n\n expr: expr.into(),\n\n pretty_print,\n\n };\n\n\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/row_to_json.rs", "rank": 5, "score": 185990.12487579838 }, { "content": "pub trait ValueIndex<RowType, ReturnValue>: private::Sealed {\n\n #[doc(hidden)]\n\n fn index_into(self, row: &RowType) -> &ReturnValue;\n\n}\n\n\n\nmod private {\n", "file_path": "src/connector/result_set/index.rs", "rank": 6, "score": 172647.7838599866 }, { "content": "#[cfg(all(feature = \"json\", any(feature = \"postgresql\", feature = \"mysql\")))]\n\npub fn json_extract<'a, C, P>(column: C, path: P, extract_as_string: bool) -> Function<'a>\n\nwhere\n\n C: Into<Expression<'a>>,\n\n P: Into<JsonPath<'a>>,\n\n{\n\n let fun = JsonExtract {\n\n column: Box::new(column.into()),\n\n path: path.into(),\n\n extract_as_string,\n\n };\n\n\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/json_extract.rs", "rank": 7, "score": 171585.13685647954 }, { "content": "#[doc(hidden)]\n\npub fn make_row(cols: Vec<(&'static str, Value<'static>)>) -> ResultRow {\n\n let mut columns = Vec::with_capacity(cols.len());\n\n let mut values = Vec::with_capacity(cols.len());\n\n\n\n for (name, value) in cols.into_iter() {\n\n columns.push(name.to_owned());\n\n values.push(value);\n\n }\n\n\n\n ResultRow {\n\n values,\n\n columns: std::sync::Arc::new(columns),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use chrono::{DateTime, Utc};\n\n use serde::Deserialize;\n", "file_path": "src/serde.rs", "rank": 8, "score": 164662.52955746534 }, { "content": "fn function_returns_result(func: &ItemFn) -> bool {\n\n match func.sig.output {\n\n syn::ReturnType::Default => false,\n\n // just assume it's a result\n\n syn::ReturnType::Type(_, _) => true,\n\n }\n\n}\n\n\n", "file_path": "test-macros/src/lib.rs", "rank": 9, "score": 162373.6867031353 }, { "content": "/// Deserialize a row into any type implementing `Deserialize`.\n\n///\n\n/// ```\n\n/// # use serde::Deserialize;\n\n/// # use quaint::ast::Value;\n\n/// #\n\n/// # #[derive(Deserialize, Debug, PartialEq)]\n\n/// # struct User {\n\n/// # id: u64,\n\n/// # name: String,\n\n/// # }\n\n/// #\n\n/// # fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n/// #\n\n/// # let row = quaint::serde::make_row(vec![\n\n/// # (\"id\", Value::from(12)),\n\n/// # (\"name\", \"Georgina\".into()),\n\n/// # ]);\n\n/// #\n\n/// #\n\n/// let user: User = quaint::serde::from_row(row)?;\n\n///\n\n/// assert_eq!(user, User { name: \"Georgina\".to_string(), id: 12 });\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn from_row<T: DeserializeOwned>(row: ResultRow) -> crate::Result<T> {\n\n let deserializer = RowDeserializer(row);\n\n\n\n T::deserialize(deserializer).map_err(|e| Error::builder(ErrorKind::FromRowError(e)).build())\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 10, "score": 148786.40818717793 }, { "content": "/// Converts the value into a state to skip parameterization.\n\n///\n\n/// Must be used carefully to avoid SQL injections.\n\npub trait IntoRaw<'a> {\n\n fn raw(self) -> Raw<'a>;\n\n}\n\n\n\nimpl<'a, T> IntoRaw<'a> for T\n\nwhere\n\n T: Into<Value<'a>>,\n\n{\n\n fn raw(self) -> Raw<'a> {\n\n Raw(self.into())\n\n }\n\n}\n\n\n\n/// A value we must parameterize for the prepared statement. Null values should be\n\n/// defined by their corresponding type variants with a `None` value for best\n\n/// compatibility.\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum Value<'a> {\n\n /// 64-bit signed integer.\n\n Integer(Option<i64>),\n", "file_path": "src/ast/values.rs", "rank": 11, "score": 148126.17148614768 }, { "content": "/// Deserialize each row of a [`ResultSet`](../connector/struct.ResultSet.html).\n\n///\n\n/// For an example, see the docs for [`from_row`](fn.from_row.html).\n\npub fn from_rows<T: DeserializeOwned>(result_set: ResultSet) -> crate::Result<Vec<T>> {\n\n let mut deserialized_rows = Vec::with_capacity(result_set.len());\n\n\n\n for row in result_set {\n\n deserialized_rows.push(from_row(row)?)\n\n }\n\n\n\n Ok(deserialized_rows)\n\n}\n\n\n", "file_path": "src/serde.rs", "rank": 12, "score": 141549.17887836363 }, { "content": "/// We do this because Intellij only recognizes functions annotated with #[test]\n\n/// *before* macro expansion as tests. This way we can add it manually, and the\n\n/// test macro will strip it.\n\nfn strip_test_attribute(function: &mut ItemFn) {\n\n let new_attrs = function\n\n .attrs\n\n .drain(..)\n\n .filter(|attr| attr.path.segments.iter().last().unwrap().ident != \"test\")\n\n .collect();\n\n\n\n function.attrs = new_attrs;\n\n}\n", "file_path": "test-macros/src/lib.rs", "rank": 13, "score": 126979.84342748832 }, { "content": "pub fn test_each_connector_impl(attr: TokenStream, input: TokenStream) -> TokenStream {\n\n let attributes_meta: syn::AttributeArgs = parse_macro_input!(attr as AttributeArgs);\n\n let args = TestEachConnectorArgs::from_list(&attributes_meta);\n\n\n\n let mut test_function = parse_macro_input!(input as ItemFn);\n\n super::strip_test_attribute(&mut test_function);\n\n\n\n let tests = match args {\n\n Ok(args) => test_each_connector_async_wrapper_functions(&args, &test_function),\n\n Err(err) => return err.write_errors().into(),\n\n };\n\n\n\n let output = quote! {\n\n #(#tests)*\n\n\n\n #test_function\n\n };\n\n\n\n output.into()\n\n}\n\n\n", "file_path": "test-macros/src/test_each_connector.rs", "rank": 14, "score": 125197.28425516684 }, { "content": "/// A quick alias to create an asterisk to a table.\n\npub fn asterisk() -> Expression<'static> {\n\n Expression {\n\n kind: ExpressionKind::Asterisk(None),\n\n alias: None,\n\n }\n\n}\n\n\n", "file_path": "src/ast/expression.rs", "rank": 15, "score": 120003.55667333175 }, { "content": "/// A number from 1 to n in specified order\n\n///\n\n/// ```rust\n\n/// # use quaint::{ast::*, visitor::{Visitor, Sqlite}};\n\n/// # fn main() -> Result<(), quaint::error::Error> {\n\n/// let fun = Function::from(row_number().order_by(\"created_at\").partition_by(\"name\"));\n\n///\n\n/// let query = Select::from_table(\"users\")\n\n/// .column(\"id\")\n\n/// .value(fun.alias(\"num\"));\n\n///\n\n/// let (sql, _) = Sqlite::build(query)?;\n\n///\n\n/// assert_eq!(\n\n/// \"SELECT `id`, ROW_NUMBER() OVER(PARTITION BY `name` ORDER BY `created_at`) AS `num` FROM `users`\",\n\n/// sql\n\n/// );\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn row_number<'a>() -> RowNumber<'a> {\n\n RowNumber::default()\n\n}\n", "file_path": "src/ast/function/row_number.rs", "rank": 16, "score": 110832.20672045508 }, { "content": "fn string_to_bits(s: &str) -> crate::Result<BitVec> {\n\n use bit_vec::*;\n\n\n\n let mut bits = BitVec::with_capacity(s.len());\n\n\n\n for c in s.chars() {\n\n match c {\n\n '0' => bits.push(false),\n\n '1' => bits.push(true),\n\n _ => {\n\n let msg = \"Unexpected character for bits input. Expected only 1 and 0.\";\n\n let kind = ErrorKind::conversion(msg);\n\n\n\n return Err(Error::builder(kind).build());\n\n }\n\n }\n\n }\n\n\n\n Ok(bits)\n\n}\n\n\n", "file_path": "src/connector/postgres/conversion.rs", "rank": 17, "score": 109610.39665953748 }, { "content": "fn bits_to_string(bits: &BitVec) -> crate::Result<String> {\n\n let mut s = String::with_capacity(bits.len());\n\n\n\n for bit in bits {\n\n if bit {\n\n s.push('1');\n\n } else {\n\n s.push('0');\n\n }\n\n }\n\n\n\n Ok(s)\n\n}\n", "file_path": "src/connector/postgres/conversion.rs", "rank": 18, "score": 107715.33147439267 }, { "content": "/// Calculates the minimum value of a numeric column.\n\n///\n\n/// ```rust\n\n/// # use quaint::{ast::*, visitor::{Visitor, Sqlite}};\n\n/// # fn main() -> Result<(), quaint::error::Error> {\n\n/// let query = Select::from_table(\"users\").value(min(\"age\"));\n\n/// let (sql, _) = Sqlite::build(query)?;\n\n/// assert_eq!(\"SELECT MIN(`age`) FROM `users`\", sql);\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn min<'a, C>(col: C) -> Function<'a>\n\nwhere\n\n C: Into<Column<'a>>,\n\n{\n\n let fun = Minimum { column: col.into() };\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/minimum.rs", "rank": 19, "score": 104976.64872924492 }, { "content": "/// Calculates the average value of a numeric column.\n\n///\n\n/// ```rust\n\n/// # use quaint::{ast::*, visitor::{Visitor, Sqlite}};\n\n/// # fn main() -> Result<(), quaint::error::Error> {\n\n/// let query = Select::from_table(\"users\").value(avg(\"age\"));\n\n/// let (sql, _) = Sqlite::build(query)?;\n\n/// assert_eq!(\"SELECT AVG(`age`) FROM `users`\", sql);\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn avg<'a, C>(col: C) -> Function<'a>\n\nwhere\n\n C: Into<Column<'a>>,\n\n{\n\n let fun = Average { column: col.into() };\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/average.rs", "rank": 20, "score": 104976.64872924492 }, { "content": "/// Calculates the maximum value of a numeric column.\n\n///\n\n/// ```rust\n\n/// # use quaint::{ast::*, visitor::{Visitor, Sqlite}};\n\n/// # fn main() -> Result<(), quaint::error::Error> {\n\n/// let query = Select::from_table(\"users\").value(max(\"age\"));\n\n/// let (sql, _) = Sqlite::build(query)?;\n\n/// assert_eq!(\"SELECT MAX(`age`) FROM `users`\", sql);\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn max<'a, C>(col: C) -> Function<'a>\n\nwhere\n\n C: Into<Column<'a>>,\n\n{\n\n let fun = Maximum { column: col.into() };\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/maximum.rs", "rank": 21, "score": 104976.64872924492 }, { "content": "/// Calculates the sum value of a numeric column.\n\n///\n\n/// ```rust\n\n/// # use quaint::{ast::*, visitor::{Visitor, Sqlite}, col};\n\n/// # fn main() -> Result<(), quaint::error::Error> {\n\n/// let query = Select::from_table(\"users\").value(sum(col!(\"age\")).alias(\"sum\"));\n\n/// let (sql, _) = Sqlite::build(query)?;\n\n/// assert_eq!(\"SELECT SUM(`age`) AS `sum` FROM `users`\", sql);\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn sum<'a, E>(expr: E) -> Function<'a>\n\nwhere\n\n E: Into<Expression<'a>>,\n\n{\n\n let fun = Sum {\n\n expr: Box::new(expr.into()),\n\n };\n\n\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/sum.rs", "rank": 22, "score": 104976.11378987264 }, { "content": "/// Count of the underlying table where the given expression is not null.\n\n///\n\n/// ```rust\n\n/// # use quaint::{ast::*, visitor::{Visitor, Sqlite}};\n\n/// # fn main() -> Result<(), quaint::error::Error> {\n\n/// let query = Select::from_table(\"users\").value(count(asterisk()));\n\n/// let (sql, _) = Sqlite::build(query)?;\n\n/// assert_eq!(\"SELECT COUNT(*) FROM `users`\", sql);\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn count<'a, T>(expr: T) -> Function<'a>\n\nwhere\n\n T: Into<Expression<'a>>,\n\n{\n\n let fun = Count {\n\n exprs: vec![expr.into()],\n\n };\n\n\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/count.rs", "rank": 23, "score": 104974.70946158603 }, { "content": "/// Converts the result of the expression into lowercase string.\n\n///\n\n/// ```rust\n\n/// # use quaint::{ast::*, visitor::{Visitor, Sqlite}};\n\n/// # fn main() -> Result<(), quaint::error::Error> {\n\n/// let query = Select::from_table(\"users\").value(lower(Column::from(\"name\")));\n\n/// let (sql, _) = Sqlite::build(query)?;\n\n/// assert_eq!(\"SELECT LOWER(`name`) FROM `users`\", sql);\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn lower<'a, E>(expression: E) -> Function<'a>\n\nwhere\n\n E: Into<Expression<'a>>,\n\n{\n\n let fun = Lower {\n\n expression: Box::new(expression.into()),\n\n };\n\n\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/lower.rs", "rank": 24, "score": 104974.59132105668 }, { "content": "/// Converts the result of the expression into uppercase string.\n\n///\n\n/// ```rust\n\n/// # use quaint::{ast::*, visitor::{Visitor, Sqlite}};\n\n/// # fn main() -> Result<(), quaint::error::Error> {\n\n/// let query = Select::from_table(\"users\").value(upper(Column::from(\"name\")));\n\n/// let (sql, _) = Sqlite::build(query)?;\n\n/// assert_eq!(\"SELECT UPPER(`name`) FROM `users`\", sql);\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn upper<'a, E>(expression: E) -> Function<'a>\n\nwhere\n\n E: Into<Expression<'a>>,\n\n{\n\n let fun = Upper {\n\n expression: Box::new(expression.into()),\n\n };\n\n\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/upper.rs", "rank": 25, "score": 104974.59132105668 }, { "content": "/// Returns the first non-null argument\n\n///\n\n/// ```rust\n\n/// # use quaint::{ast::*, visitor::{Visitor, Sqlite}};\n\n/// # fn main() -> Result<(), quaint::error::Error> {\n\n/// let exprs: Vec<Expression> = vec![\n\n/// Column::from((\"users\", \"company\")).into(),\n\n/// Value::text(\"Individual\").into(),\n\n/// ];\n\n/// let query = Select::from_table(\"users\").value(coalesce(exprs));\n\n/// let (sql, params) = Sqlite::build(query)?;\n\n/// assert_eq!(\"SELECT COALESCE(`users`.`company`, ?) FROM `users`\", sql);\n\n/// assert_eq!(vec![Value::text(\"Individual\")], params);\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn coalesce<'a, T, V>(exprs: V) -> Function<'a>\n\nwhere\n\n T: Into<Expression<'a>>,\n\n V: Into<Vec<T>>,\n\n{\n\n let fun = Coalesce {\n\n exprs: exprs.into().into_iter().map(|e| e.into()).collect(),\n\n };\n\n\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/coalesce.rs", "rank": 26, "score": 101415.13199438891 }, { "content": "/// Aggregates the given field into a string.\n\n///\n\n/// ```rust\n\n/// # use quaint::{ast::*, visitor::{Visitor, Sqlite}};\n\n/// # fn main() -> Result<(), quaint::error::Error> {\n\n/// let query = Select::from_table(\"users\").value(aggregate_to_string(Column::new(\"firstName\")))\n\n/// .group_by(\"firstName\");\n\n///\n\n/// let (sql, _) = Sqlite::build(query)?;\n\n/// assert_eq!(\"SELECT GROUP_CONCAT(`firstName`) FROM `users` GROUP BY `firstName`\", sql);\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn aggregate_to_string<'a, T>(expr: T) -> Function<'a>\n\nwhere\n\n T: Into<Expression<'a>>,\n\n{\n\n let fun = AggregateToString {\n\n value: Box::new(expr.into()),\n\n };\n\n\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/aggregate_to_string.rs", "rank": 27, "score": 101084.34179348158 }, { "content": "#[cfg(feature = \"chrono\")]\n\nstruct TimeTz(chrono::NaiveTime);\n\n\n\n#[cfg(feature = \"chrono\")]\n\nimpl<'a> FromSql<'a> for TimeTz {\n\n fn from_sql(_ty: &PostgresType, raw: &'a [u8]) -> Result<TimeTz, Box<dyn std::error::Error + Sync + Send>> {\n\n // We assume UTC.\n\n let time: chrono::NaiveTime = chrono::NaiveTime::from_sql(&PostgresType::TIMETZ, &raw[..8])?;\n\n Ok(TimeTz(time))\n\n }\n\n\n\n fn accepts(ty: &PostgresType) -> bool {\n\n ty == &PostgresType::TIMETZ\n\n }\n\n}\n\n\n\n/// This implementation of FromSql assumes that the precision for money fields is configured to the default\n\n/// of 2 decimals.\n\n///\n\n/// Postgres docs: https://www.postgresql.org/docs/current/datatype-money.html\n", "file_path": "src/connector/postgres/conversion.rs", "rank": 28, "score": 100215.5502953635 }, { "content": "fn to_postgres(decimal: &BigDecimal) -> crate::Result<PostgresDecimal<Vec<i16>>> {\n\n if decimal.is_zero() {\n\n return Ok(PostgresDecimal {\n\n neg: false,\n\n weight: 0,\n\n scale: 0,\n\n digits: vec![],\n\n });\n\n }\n\n\n\n // NOTE: this unfortunately copies the BigInt internally\n\n let (integer, exp) = decimal.as_bigint_and_exponent();\n\n\n\n // scale is only nonzero when we have fractional digits\n\n // since `exp` is the _negative_ decimal exponent, it tells us\n\n // exactly what our scale should be\n\n let scale: u16 = cmp::max(0, exp).try_into()?;\n\n\n\n let (sign, uint) = integer.into_parts();\n\n let mut mantissa = uint.to_u128().unwrap();\n", "file_path": "src/connector/postgres/conversion/decimal.rs", "rank": 29, "score": 98620.27426054211 }, { "content": "#[proc_macro_attribute]\n\npub fn test_each_connector(attr: TokenStream, input: TokenStream) -> TokenStream {\n\n test_each_connector::test_each_connector_impl(attr, input)\n\n}\n\n\n", "file_path": "test-macros/src/lib.rs", "rank": 30, "score": 97694.45910581562 }, { "content": "#[cfg(feature = \"postgresql\")]\n\npub fn text_search<'a, T: Clone>(exprs: &[T]) -> super::Function<'a>\n\nwhere\n\n T: Into<Expression<'a>>,\n\n{\n\n let exprs: Vec<Expression> = exprs.iter().map(|c| c.clone().into()).collect();\n\n let fun = TextSearch { exprs };\n\n\n\n fun.into()\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\n/// Holds the expressions & query on which to perform a text-search ranking compute\n\npub struct TextSearchRelevance<'a> {\n\n pub(crate) exprs: Vec<Expression<'a>>,\n\n pub(crate) query: Cow<'a, str>,\n\n}\n\n\n\n/// Computes the relevance score of a full-text search query against some expressions.\n\n///\n\n/// ```rust\n", "file_path": "src/ast/function/search.rs", "rank": 31, "score": 96275.39232815176 }, { "content": "pub trait TakeRow {\n\n fn take_result_row(&mut self) -> crate::Result<Vec<Value<'static>>>;\n\n}\n\n\n", "file_path": "src/connector/queryable.rs", "rank": 32, "score": 90632.2517377449 }, { "content": "use crate::ast::*;\n\nuse crate::error::{Error, ErrorKind};\n\n\n\n#[cfg(feature = \"bigdecimal\")]\n\nuse bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive};\n\n#[cfg(feature = \"chrono\")]\n\nuse chrono::{DateTime, NaiveDate, NaiveTime, Utc};\n\n#[cfg(feature = \"json\")]\n\nuse serde_json::{Number, Value as JsonValue};\n\nuse std::{\n\n borrow::{Borrow, Cow},\n\n convert::TryFrom,\n\n fmt,\n\n};\n\n#[cfg(feature = \"time\")]\n\nuse time::{Date, OffsetDateTime, Time};\n\n#[cfg(feature = \"uuid\")]\n\nuse uuid::Uuid;\n\n\n\n/// A value written to the query as-is without parameterization.\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Raw<'a>(pub(crate) Value<'a>);\n\n\n\n/// Converts the value into a state to skip parameterization.\n\n///\n\n/// Must be used carefully to avoid SQL injections.\n", "file_path": "src/ast/values.rs", "rank": 33, "score": 90579.27092311786 }, { "content": " pub fn to_bytes(&self) -> Option<Vec<u8>> {\n\n match self {\n\n Value::Text(Some(cow)) => Some(cow.to_string().into_bytes()),\n\n Value::Bytes(Some(cow)) => Some(cow.to_owned().into()),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// `true` if the `Value` is an integer.\n\n pub const fn is_integer(&self) -> bool {\n\n matches!(self, Value::Integer(_))\n\n }\n\n\n\n /// Returns an `i64` if the value is an integer, otherwise `None`.\n\n pub const fn as_i64(&self) -> Option<i64> {\n\n match self {\n\n Value::Integer(i) => *i,\n\n _ => None,\n\n }\n\n }\n", "file_path": "src/ast/values.rs", "rank": 34, "score": 90576.52915476722 }, { "content": " pub const fn as_datetime(&self) -> Option<DateTime<Utc>> {\n\n match self {\n\n Value::DateTime(dt) => *dt,\n\n _ => None,\n\n }\n\n }\n\n\n\n /// `true` if the `Value` is a Date.\n\n #[cfg(feature = \"chrono\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\n pub const fn is_date(&self) -> bool {\n\n matches!(self, Value::Date(_))\n\n }\n\n\n\n /// Returns a `NaiveDate` if the value is a `Date`, otherwise `None`.\n\n #[cfg(feature = \"chrono\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\n pub const fn as_date(&self) -> Option<NaiveDate> {\n\n match self {\n\n Value::Date(dt) => *dt,\n", "file_path": "src/ast/values.rs", "rank": 35, "score": 90572.49421485234 }, { "content": " Value::Bytes(Some(cow)) => String::from_utf8(cow.into_owned()).ok(),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Returns whether this value is the `Bytes` variant.\n\n pub const fn is_bytes(&self) -> bool {\n\n matches!(self, Value::Bytes(_))\n\n }\n\n\n\n /// Returns a bytes slice if the value is text or a byte slice, otherwise `None`.\n\n pub fn as_bytes(&self) -> Option<&[u8]> {\n\n match self {\n\n Value::Text(Some(cow)) => Some(cow.as_ref().as_bytes()),\n\n Value::Bytes(Some(cow)) => Some(cow.as_ref()),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Returns a cloned `Vec<u8>` if the value is text or a byte slice, otherwise `None`.\n", "file_path": "src/ast/values.rs", "rank": 36, "score": 90571.99463566198 }, { "content": " /// Returns an UUID if the value is of UUID type, otherwise `None`.\n\n #[cfg(feature = \"uuid\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"uuid\")))]\n\n pub const fn as_uuid(&self) -> Option<Uuid> {\n\n match self {\n\n Value::Uuid(u) => *u,\n\n _ => None,\n\n }\n\n }\n\n\n\n /// `true` if the `Value` is a DateTime.\n\n #[cfg(feature = \"chrono\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\n pub const fn is_datetime(&self) -> bool {\n\n matches!(self, Value::DateTime(_))\n\n }\n\n\n\n /// Returns a `DateTime` if the value is a `DateTime`, otherwise `None`.\n\n #[cfg(feature = \"chrono\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n", "file_path": "src/ast/values.rs", "rank": 37, "score": 90570.37679389576 }, { "content": " _ => None,\n\n }\n\n }\n\n\n\n /// `true` if the `Value` is a `Time`.\n\n #[cfg(feature = \"chrono\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\n pub const fn is_time(&self) -> bool {\n\n matches!(self, Value::Time(_))\n\n }\n\n\n\n /// Returns a `NaiveTime` if the value is a `Time`, otherwise `None`.\n\n #[cfg(feature = \"chrono\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\n pub const fn as_time(&self) -> Option<NaiveTime> {\n\n match self {\n\n Value::Time(time) => *time,\n\n _ => None,\n\n }\n\n }\n", "file_path": "src/ast/values.rs", "rank": 38, "score": 90569.51526418817 }, { "content": " fn try_from(value: Value<'a>) -> Result<bool, Self::Error> {\n\n value\n\n .as_bool()\n\n .ok_or_else(|| Error::builder(ErrorKind::conversion(\"Not a bool\")).build())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"chrono\")]\n\n#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\nimpl<'a> TryFrom<Value<'a>> for DateTime<Utc> {\n\n type Error = Error;\n\n\n\n fn try_from(value: Value<'a>) -> Result<DateTime<Utc>, Self::Error> {\n\n value\n\n .as_datetime()\n\n .ok_or_else(|| Error::builder(ErrorKind::conversion(\"Not a datetime\")).build())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"time\")]\n", "file_path": "src/ast/values.rs", "rank": 39, "score": 90569.41299584224 }, { "content": " match self {\n\n Value::Boolean(b) => *b,\n\n // For schemas which don't tag booleans\n\n Value::Integer(Some(i)) if *i == 0 || *i == 1 => Some(*i == 1),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// `true` if the `Value` is an Array.\n\n pub const fn is_array(&self) -> bool {\n\n matches!(self, Value::Array(_))\n\n }\n\n\n\n /// `true` if the `Value` is of UUID type.\n\n #[cfg(feature = \"uuid\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"uuid\")))]\n\n pub const fn is_uuid(&self) -> bool {\n\n matches!(self, Value::Uuid(_))\n\n }\n\n\n", "file_path": "src/ast/values.rs", "rank": 40, "score": 90568.34853722621 }, { "content": " /// `true` if the `Value` is a `Time`.\n\n #[cfg(feature = \"time\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\n pub const fn is_time(&self) -> bool {\n\n matches!(self, Value::Time(_))\n\n }\n\n\n\n /// Returns a `NaiveTime` if the value is a `Time`, otherwise `None`.\n\n #[cfg(feature = \"time\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\n pub const fn as_time(&self) -> Option<Time> {\n\n match self {\n\n Value::Time(time) => *time,\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nvalue!(val: i64, Integer, val);\n\nvalue!(val: bool, Boolean, val);\n", "file_path": "src/ast/values.rs", "rank": 41, "score": 90566.32825306754 }, { "content": " #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"json\")))]\n\n pub fn into_json(self) -> Option<serde_json::Value> {\n\n match self {\n\n Value::Json(Some(j)) => Some(j),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Returns a Vec<T> if the value is an array of T, otherwise `None`.\n\n pub fn into_vec<T>(self) -> Option<Vec<T>>\n\n where\n\n // Implement From<Value>\n\n T: TryFrom<Value<'a>>,\n\n {\n\n match self {\n\n Value::Array(Some(vec)) => {\n\n let rslt: Result<Vec<_>, _> = vec.into_iter().map(T::try_from).collect();\n\n match rslt {\n\n Err(_) => None,\n\n Ok(values) => Some(values),\n", "file_path": "src/ast/values.rs", "rank": 42, "score": 90566.17441952627 }, { "content": " Value::Time(time) => time.map(|time| serde_json::Value::String(format!(\"{}\", time))),\n\n };\n\n\n\n match res {\n\n Some(val) => val,\n\n None => serde_json::Value::Null,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Value<'a> {\n\n /// Creates a new integer value.\n\n pub fn integer<I>(value: I) -> Self\n\n where\n\n I: Into<i64>,\n\n {\n\n Value::Integer(Some(value.into()))\n\n }\n\n\n\n /// Creates a new decimal value.\n", "file_path": "src/ast/values.rs", "rank": 43, "score": 90566.1635664913 }, { "content": " pub const fn is_numeric(&self) -> bool {\n\n matches!(self, Value::Numeric(_) | Value::Float(_) | Value::Double(_))\n\n }\n\n\n\n /// Returns a bigdecimal, if the value is a numeric, float or double value,\n\n /// otherwise `None`.\n\n #[cfg(feature = \"bigdecimal\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"bigdecimal\")))]\n\n pub fn into_numeric(self) -> Option<BigDecimal> {\n\n match self {\n\n Value::Numeric(d) => d,\n\n Value::Float(f) => f.and_then(BigDecimal::from_f32),\n\n Value::Double(f) => f.and_then(BigDecimal::from_f64),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Returns a reference to a bigdecimal, if the value is a numeric.\n\n /// Otherwise `None`.\n\n #[cfg(feature = \"bigdecimal\")]\n", "file_path": "src/ast/values.rs", "rank": 44, "score": 90565.74731459357 }, { "content": " #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"bigdecimal\")))]\n\n pub const fn as_numeric(&self) -> Option<&BigDecimal> {\n\n match self {\n\n Value::Numeric(d) => d.as_ref(),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// `true` if the `Value` is a boolean value.\n\n pub const fn is_bool(&self) -> bool {\n\n match self {\n\n Value::Boolean(_) => true,\n\n // For schemas which don't tag booleans\n\n Value::Integer(Some(i)) if *i == 0 || *i == 1 => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n /// Returns a bool if the value is a boolean, otherwise `None`.\n\n pub const fn as_bool(&self) -> Option<bool> {\n", "file_path": "src/ast/values.rs", "rank": 45, "score": 90565.68107971853 }, { "content": " Value::Array(v) => v.is_none(),\n\n Value::Xml(s) => s.is_none(),\n\n #[cfg(feature = \"bigdecimal\")]\n\n Value::Numeric(r) => r.is_none(),\n\n #[cfg(feature = \"uuid\")]\n\n Value::Uuid(u) => u.is_none(),\n\n #[cfg(feature = \"chrono\")]\n\n Value::DateTime(dt) => dt.is_none(),\n\n #[cfg(feature = \"chrono\")]\n\n Value::Date(d) => d.is_none(),\n\n #[cfg(feature = \"chrono\")]\n\n Value::Time(t) => t.is_none(),\n\n #[cfg(feature = \"json\")]\n\n Value::Json(json) => json.is_none(),\n\n\n\n #[cfg(feature = \"time\")]\n\n Value::DateTime(dt) => dt.is_none(),\n\n #[cfg(feature = \"time\")]\n\n Value::Date(d) => d.is_none(),\n\n #[cfg(feature = \"time\")]\n", "file_path": "src/ast/values.rs", "rank": 46, "score": 90565.54237880564 }, { "content": " Value::Time(t) => t.is_none(),\n\n }\n\n }\n\n\n\n /// `true` if the `Value` is text.\n\n pub const fn is_text(&self) -> bool {\n\n matches!(self, Value::Text(_))\n\n }\n\n\n\n /// Returns a &str if the value is text, otherwise `None`.\n\n pub fn as_str(&self) -> Option<&str> {\n\n match self {\n\n Value::Text(Some(cow)) => Some(cow.borrow()),\n\n Value::Bytes(Some(cow)) => std::str::from_utf8(cow.as_ref()).ok(),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Returns a char if the value is a char, otherwise `None`.\n\n pub const fn as_char(&self) -> Option<char> {\n", "file_path": "src/ast/values.rs", "rank": 47, "score": 90565.26834742141 }, { "content": "value!(val: &'a str, Text, val.into());\n\nvalue!(val: String, Text, val.into());\n\nvalue!(val: usize, Integer, i64::try_from(val).unwrap());\n\nvalue!(val: i32, Integer, i64::try_from(val).unwrap());\n\nvalue!(val: &'a [u8], Bytes, val.into());\n\nvalue!(val: f64, Double, val);\n\nvalue!(val: f32, Float, val);\n\n\n\n#[cfg(feature = \"chrono\")]\n\n#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\nvalue!(val: DateTime<Utc>, DateTime, val);\n\n#[cfg(feature = \"chrono\")]\n\n#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\nvalue!(val: chrono::NaiveTime, Time, val);\n\n#[cfg(feature = \"chrono\")]\n\n#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\nvalue!(val: chrono::NaiveDate, Date, val);\n\n#[cfg(feature = \"bigdecimal\")]\n\nvalue!(val: BigDecimal, Numeric, val);\n\n#[cfg(feature = \"json\")]\n", "file_path": "src/ast/values.rs", "rank": 48, "score": 90565.21848800725 }, { "content": "\n\n /// `true` if the `Value` is a JSON value.\n\n #[cfg(feature = \"json\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"json\")))]\n\n pub const fn is_json(&self) -> bool {\n\n matches!(self, Value::Json(_))\n\n }\n\n\n\n /// Returns a reference to a JSON Value if of Json type, otherwise `None`.\n\n #[cfg(feature = \"json\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"json\")))]\n\n pub const fn as_json(&self) -> Option<&serde_json::Value> {\n\n match self {\n\n Value::Json(Some(j)) => Some(j),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Transforms to a JSON Value if of Json type, otherwise `None`.\n\n #[cfg(feature = \"json\")]\n", "file_path": "src/ast/values.rs", "rank": 49, "score": 90565.10097014999 }, { "content": "\n\n match res {\n\n Some(r) => r,\n\n None => write!(f, \"null\"),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"json\")]\n\n#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"json\")))]\n\nimpl<'a> From<Value<'a>> for serde_json::Value {\n\n fn from(pv: Value<'a>) -> Self {\n\n let res = match pv {\n\n Value::Integer(i) => i.map(|i| serde_json::Value::Number(Number::from(i))),\n\n Value::Float(f) => f.map(|f| match Number::from_f64(f as f64) {\n\n Some(number) => serde_json::Value::Number(number),\n\n None => serde_json::Value::Null,\n\n }),\n\n Value::Double(f) => f.map(|f| match Number::from_f64(f) {\n\n Some(number) => serde_json::Value::Number(number),\n", "file_path": "src/ast/values.rs", "rank": 50, "score": 90564.61774475768 }, { "content": " V: Into<Value<'a>>,\n\n {\n\n Value::Array(Some(value.into_iter().map(|v| v.into()).collect()))\n\n }\n\n\n\n /// Creates a new uuid value.\n\n #[cfg(feature = \"uuid\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"uuid\")))]\n\n pub const fn uuid(value: Uuid) -> Self {\n\n Value::Uuid(Some(value))\n\n }\n\n\n\n /// Creates a new datetime value.\n\n #[cfg(feature = \"chrono\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\n pub const fn datetime(value: DateTime<Utc>) -> Self {\n\n Value::DateTime(Some(value))\n\n }\n\n\n\n /// Creates a new date value.\n", "file_path": "src/ast/values.rs", "rank": 51, "score": 90564.50189416503 }, { "content": " None => serde_json::Value::Null,\n\n }),\n\n Value::Text(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())),\n\n Value::Bytes(bytes) => bytes.map(|bytes| serde_json::Value::String(base64::encode(&bytes))),\n\n Value::Enum(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())),\n\n Value::Boolean(b) => b.map(serde_json::Value::Bool),\n\n Value::Char(c) => c.map(|c| {\n\n let bytes = [c as u8];\n\n let s = std::str::from_utf8(&bytes)\n\n .expect(\"interpret byte as UTF-8\")\n\n .to_string();\n\n serde_json::Value::String(s)\n\n }),\n\n Value::Xml(cow) => cow.map(|cow| serde_json::Value::String(cow.into_owned())),\n\n Value::Array(v) => {\n\n v.map(|v| serde_json::Value::Array(v.into_iter().map(serde_json::Value::from).collect()))\n\n }\n\n #[cfg(feature = \"bigdecimal\")]\n\n Value::Numeric(d) => d.map(|d| serde_json::to_value(d.to_f64().unwrap()).unwrap()),\n\n #[cfg(feature = \"json\")]\n", "file_path": "src/ast/values.rs", "rank": 52, "score": 90564.43196334392 }, { "content": " value\n\n .as_i64()\n\n .ok_or_else(|| Error::builder(ErrorKind::conversion(\"Not an i64\")).build())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"bigdecimal\")]\n\nimpl<'a> TryFrom<Value<'a>> for BigDecimal {\n\n type Error = Error;\n\n\n\n fn try_from(value: Value<'a>) -> Result<BigDecimal, Self::Error> {\n\n value\n\n .into_numeric()\n\n .ok_or_else(|| Error::builder(ErrorKind::conversion(\"Not a decimal\")).build())\n\n }\n\n}\n\n\n\nimpl<'a> TryFrom<Value<'a>> for f64 {\n\n type Error = Error;\n\n\n", "file_path": "src/ast/values.rs", "rank": 53, "score": 90564.2719626371 }, { "content": "\n\n /// Creates a new XML value.\n\n pub fn xml<T>(value: T) -> Self\n\n where\n\n T: Into<Cow<'a, str>>,\n\n {\n\n Value::Xml(Some(value.into()))\n\n }\n\n\n\n /// `true` if the `Value` is null.\n\n pub const fn is_null(&self) -> bool {\n\n match self {\n\n Value::Integer(i) => i.is_none(),\n\n Value::Float(i) => i.is_none(),\n\n Value::Double(i) => i.is_none(),\n\n Value::Text(t) => t.is_none(),\n\n Value::Enum(e) => e.is_none(),\n\n Value::Bytes(b) => b.is_none(),\n\n Value::Boolean(b) => b.is_none(),\n\n Value::Char(c) => c.is_none(),\n", "file_path": "src/ast/values.rs", "rank": 54, "score": 90563.6629072903 }, { "content": "#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\nimpl<'a> TryFrom<Value<'a>> for OffsetDateTime {\n\n type Error = Error;\n\n\n\n fn try_from(value: Value<'a>) -> Result<OffsetDateTime, Self::Error> {\n\n value\n\n .as_datetime()\n\n .ok_or_else(|| Error::builder(ErrorKind::conversion(\"Not a datetime\")).build())\n\n }\n\n}\n\n\n\n/// An in-memory temporary table. Can be used in some of the databases in a\n\n/// place of an actual table. Doesn't work in MySQL 5.7.\n\n#[derive(Debug, Clone, Default, PartialEq)]\n\npub struct Values<'a> {\n\n pub(crate) rows: Vec<Row<'a>>,\n\n}\n\n\n\nimpl<'a> Values<'a> {\n\n /// Create a new empty in-memory set of values.\n", "file_path": "src/ast/values.rs", "rank": 55, "score": 90562.37196903468 }, { "content": " }\n\n }\n\n\n\n /// `true` if the `Value` is a Date.\n\n #[cfg(feature = \"time\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\n pub const fn is_date(&self) -> bool {\n\n matches!(self, Value::Date(_))\n\n }\n\n\n\n /// Returns a `NaiveDate` if the value is a `Date`, otherwise `None`.\n\n #[cfg(feature = \"time\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\n pub const fn as_date(&self) -> Option<Date> {\n\n match self {\n\n Value::Date(dt) => *dt,\n\n _ => None,\n\n }\n\n }\n\n\n", "file_path": "src/ast/values.rs", "rank": 56, "score": 90562.24551857385 }, { "content": "\n\n /// Returns a `f64` if the value is a double, otherwise `None`.\n\n pub const fn as_f64(&self) -> Option<f64> {\n\n match self {\n\n Value::Double(Some(f)) => Some(*f),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Returns a `f32` if the value is a double, otherwise `None`.\n\n pub const fn as_f32(&self) -> Option<f32> {\n\n match self {\n\n Value::Float(Some(f)) => Some(*f),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// `true` if the `Value` is a numeric value or can be converted to one.\n\n #[cfg(feature = \"bigdecimal\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"bigdecimal\")))]\n", "file_path": "src/ast/values.rs", "rank": 57, "score": 90562.14170048076 }, { "content": " fn try_from(value: Value<'a>) -> Result<f64, Self::Error> {\n\n value\n\n .as_f64()\n\n .ok_or_else(|| Error::builder(ErrorKind::conversion(\"Not a f64\")).build())\n\n }\n\n}\n\n\n\nimpl<'a> TryFrom<Value<'a>> for String {\n\n type Error = Error;\n\n\n\n fn try_from(value: Value<'a>) -> Result<String, Self::Error> {\n\n value\n\n .into_string()\n\n .ok_or_else(|| Error::builder(ErrorKind::conversion(\"Not a string\")).build())\n\n }\n\n}\n\n\n\nimpl<'a> TryFrom<Value<'a>> for bool {\n\n type Error = Error;\n\n\n", "file_path": "src/ast/values.rs", "rank": 58, "score": 90560.47494281853 }, { "content": " match self {\n\n Value::Char(c) => *c,\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Returns a cloned String if the value is text, otherwise `None`.\n\n pub fn to_string(&self) -> Option<String> {\n\n match self {\n\n Value::Text(Some(cow)) => Some(cow.to_string()),\n\n Value::Bytes(Some(cow)) => std::str::from_utf8(cow.as_ref()).map(|s| s.to_owned()).ok(),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Transforms the `Value` to a `String` if it's text,\n\n /// otherwise `None`.\n\n pub fn into_string(self) -> Option<String> {\n\n match self {\n\n Value::Text(Some(cow)) => Some(cow.into_owned()),\n", "file_path": "src/ast/values.rs", "rank": 59, "score": 90559.94517343657 }, { "content": " #[cfg(feature = \"json\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"json\")))]\n\n /// A JSON value.\n\n Json(Option<serde_json::Value>),\n\n /// A XML value.\n\n Xml(Option<Cow<'a, str>>),\n\n #[cfg(feature = \"uuid\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"uuid\")))]\n\n /// An UUID value.\n\n Uuid(Option<Uuid>),\n\n #[cfg(feature = \"chrono\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\n /// A datetime value.\n\n DateTime(Option<DateTime<Utc>>),\n\n #[cfg(feature = \"chrono\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\n /// A date value.\n\n Date(Option<NaiveDate>),\n\n #[cfg(feature = \"chrono\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n", "file_path": "src/ast/values.rs", "rank": 60, "score": 90559.42474821309 }, { "content": "\n\n pub fn flatten_row(self) -> Option<Row<'a>> {\n\n let mut result = Row::with_capacity(self.len());\n\n\n\n for mut row in self.rows.into_iter() {\n\n match row.pop() {\n\n Some(value) => result.push(value),\n\n None => return None,\n\n }\n\n }\n\n\n\n Some(result)\n\n }\n\n}\n\n\n\nimpl<'a, I, R> From<I> for Values<'a>\n\nwhere\n\n I: Iterator<Item = R>,\n\n R: Into<Row<'a>>,\n\n{\n", "file_path": "src/ast/values.rs", "rank": 61, "score": 90558.42193326945 }, { "content": " }\n\n }\n\n _ => None,\n\n }\n\n }\n\n\n\n /// `true` if the `Value` is a DateTime.\n\n #[cfg(feature = \"time\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\n pub const fn is_datetime(&self) -> bool {\n\n matches!(self, Value::DateTime(_))\n\n }\n\n\n\n /// Returns a `DateTime` if the value is a `DateTime`, otherwise `None`.\n\n #[cfg(feature = \"time\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\n pub const fn as_datetime(&self) -> Option<OffsetDateTime> {\n\n match self {\n\n Value::DateTime(dt) => *dt,\n\n _ => None,\n", "file_path": "src/ast/values.rs", "rank": 62, "score": 90557.95334302525 }, { "content": " assert_eq!(values, vec![\"test\"]);\n\n }\n\n\n\n #[test]\n\n fn a_parameterized_value_of_booleans_can_be_converted_into_a_vec() {\n\n let pv = Value::array(vec![true]);\n\n let values: Vec<bool> = pv.into_vec().expect(\"convert into Vec<bool>\");\n\n assert_eq!(values, vec![true]);\n\n }\n\n\n\n #[test]\n\n #[cfg(feature = \"chrono\")]\n\n fn a_parameterized_value_of_datetimes_can_be_converted_into_a_vec() {\n\n let datetime = DateTime::from_str(\"2019-07-27T05:30:30Z\").expect(\"parsing date/time\");\n\n let pv = Value::array(vec![datetime]);\n\n let values: Vec<DateTime<Utc>> = pv.into_vec().expect(\"convert into Vec<DateTime>\");\n\n assert_eq!(values, vec![datetime]);\n\n }\n\n\n\n #[test]\n", "file_path": "src/ast/values.rs", "rank": 63, "score": 90557.72449816582 }, { "content": " {\n\n Value::Text(Some(value.into()))\n\n }\n\n\n\n /// Creates a new enum value.\n\n pub fn enum_variant<T>(value: T) -> Self\n\n where\n\n T: Into<Cow<'a, str>>,\n\n {\n\n Value::Enum(Some(value.into()))\n\n }\n\n\n\n /// Creates a new bytes value.\n\n pub fn bytes<B>(value: B) -> Self\n\n where\n\n B: Into<Cow<'a, [u8]>>,\n\n {\n\n Value::Bytes(Some(value.into()))\n\n }\n\n\n", "file_path": "src/ast/values.rs", "rank": 64, "score": 90557.40284763764 }, { "content": " /// 32-bit floating point.\n\n Float(Option<f32>),\n\n /// 64-bit floating point.\n\n Double(Option<f64>),\n\n /// String value.\n\n Text(Option<Cow<'a, str>>),\n\n /// Database enum value.\n\n Enum(Option<Cow<'a, str>>),\n\n /// Bytes value.\n\n Bytes(Option<Cow<'a, [u8]>>),\n\n /// Boolean value.\n\n Boolean(Option<bool>),\n\n /// A single character.\n\n Char(Option<char>),\n\n /// An array value (PostgreSQL).\n\n Array(Option<Vec<Value<'a>>>),\n\n /// A numeric value.\n\n #[cfg(feature = \"bigdecimal\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"bigdecimal\")))]\n\n Numeric(Option<BigDecimal>),\n", "file_path": "src/ast/values.rs", "rank": 65, "score": 90557.3948689924 }, { "content": " #[cfg(feature = \"chrono\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\n pub const fn date(value: NaiveDate) -> Self {\n\n Value::Date(Some(value))\n\n }\n\n\n\n /// Creates a new time value.\n\n #[cfg(feature = \"chrono\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"chrono\")))]\n\n pub const fn time(value: NaiveTime) -> Self {\n\n Value::Time(Some(value))\n\n }\n\n\n\n /// Creates a new datetime value.\n\n #[cfg(feature = \"time\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\n pub const fn datetime(value: OffsetDateTime) -> Self {\n\n Value::DateTime(Some(value))\n\n }\n\n\n", "file_path": "src/ast/values.rs", "rank": 66, "score": 90557.31910640505 }, { "content": " /// Creates a new boolean value.\n\n pub fn boolean<B>(value: B) -> Self\n\n where\n\n B: Into<bool>,\n\n {\n\n Value::Boolean(Some(value.into()))\n\n }\n\n\n\n /// Creates a new character value.\n\n pub fn character<C>(value: C) -> Self\n\n where\n\n C: Into<char>,\n\n {\n\n Value::Char(Some(value.into()))\n\n }\n\n\n\n /// Creates a new array value.\n\n pub fn array<I, V>(value: I) -> Self\n\n where\n\n I: IntoIterator<Item = V>,\n", "file_path": "src/ast/values.rs", "rank": 67, "score": 90557.02949834928 }, { "content": "#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"json\")))]\n\nvalue!(val: JsonValue, Json, val);\n\n#[cfg(feature = \"uuid\")]\n\n#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"uuid\")))]\n\nvalue!(val: Uuid, Uuid, val);\n\n\n\n#[cfg(feature = \"time\")]\n\n#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\nvalue!(val: time::OffsetDateTime, DateTime, val);\n\n#[cfg(feature = \"time\")]\n\n#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\nvalue!(val: time::Date, Date, val);\n\n#[cfg(feature = \"time\")]\n\n#[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\nvalue!(val: time::Time, Time, val);\n\n\n\nimpl<'a> TryFrom<Value<'a>> for i64 {\n\n type Error = Error;\n\n\n\n fn try_from(value: Value<'a>) -> Result<i64, Self::Error> {\n", "file_path": "src/ast/values.rs", "rank": 68, "score": 90556.79920957229 }, { "content": " /// A time value.\n\n Time(Option<NaiveTime>),\n\n\n\n /// A datetime value.\n\n #[cfg(feature = \"time\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\n DateTime(Option<OffsetDateTime>),\n\n /// A date value.\n\n #[cfg(feature = \"time\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\n Date(Option<Date>),\n\n /// A time value.\n\n #[cfg(feature = \"time\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\n Time(Option<Time>),\n\n}\n\n\n\npub(crate) struct Params<'a>(pub(crate) &'a [Value<'a>]);\n\n\n\nimpl<'a> fmt::Display for Params<'a> {\n", "file_path": "src/ast/values.rs", "rank": 69, "score": 90556.46940931307 }, { "content": " use std::str::FromStr;\n\n\n\n #[test]\n\n fn a_parameterized_value_of_ints_can_be_converted_into_a_vec() {\n\n let pv = Value::array(vec![1]);\n\n let values: Vec<i64> = pv.into_vec().expect(\"convert into Vec<i64>\");\n\n assert_eq!(values, vec![1]);\n\n }\n\n\n\n #[test]\n\n fn a_parameterized_value_of_reals_can_be_converted_into_a_vec() {\n\n let pv = Value::array(vec![1.0]);\n\n let values: Vec<f64> = pv.into_vec().expect(\"convert into Vec<f64>\");\n\n assert_eq!(values, vec![1.0]);\n\n }\n\n\n\n #[test]\n\n fn a_parameterized_value_of_texts_can_be_converted_into_a_vec() {\n\n let pv = Value::array(vec![\"test\"]);\n\n let values: Vec<String> = pv.into_vec().expect(\"convert into Vec<String>\");\n", "file_path": "src/ast/values.rs", "rank": 70, "score": 90555.61207159924 }, { "content": " #[cfg(feature = \"bigdecimal\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"bigdecimal\")))]\n\n pub const fn numeric(value: BigDecimal) -> Self {\n\n Value::Numeric(Some(value))\n\n }\n\n\n\n /// Creates a new float value.\n\n pub const fn float(value: f32) -> Self {\n\n Self::Float(Some(value))\n\n }\n\n\n\n /// Creates a new double value.\n\n pub const fn double(value: f64) -> Self {\n\n Self::Double(Some(value))\n\n }\n\n\n\n /// Creates a new string value.\n\n pub fn text<T>(value: T) -> Self\n\n where\n\n T: Into<Cow<'a, str>>,\n", "file_path": "src/ast/values.rs", "rank": 71, "score": 90553.32177706248 }, { "content": " #[cfg(feature = \"bigdecimal\")]\n\n Value::Numeric(val) => val.as_ref().map(|v| write!(f, \"{}\", v)),\n\n #[cfg(feature = \"json\")]\n\n Value::Json(val) => val.as_ref().map(|v| write!(f, \"{}\", v)),\n\n #[cfg(feature = \"uuid\")]\n\n Value::Uuid(val) => val.map(|v| write!(f, \"{}\", v)),\n\n #[cfg(feature = \"chrono\")]\n\n Value::DateTime(val) => val.map(|v| write!(f, \"{}\", v)),\n\n #[cfg(feature = \"chrono\")]\n\n Value::Date(val) => val.map(|v| write!(f, \"{}\", v)),\n\n #[cfg(feature = \"chrono\")]\n\n Value::Time(val) => val.map(|v| write!(f, \"{}\", v)),\n\n\n\n #[cfg(feature = \"time\")]\n\n Value::DateTime(val) => val.map(|v| write!(f, \"{}\", v)),\n\n #[cfg(feature = \"time\")]\n\n Value::Date(val) => val.map(|v| write!(f, \"{}\", v)),\n\n #[cfg(feature = \"time\")]\n\n Value::Time(val) => val.map(|v| write!(f, \"{}\", v)),\n\n };\n", "file_path": "src/ast/values.rs", "rank": 72, "score": 90553.15689860647 }, { "content": " /// Creates a new date value.\n\n #[cfg(feature = \"time\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\n pub const fn date(value: Date) -> Self {\n\n Value::Date(Some(value))\n\n }\n\n\n\n /// Creates a new time value.\n\n #[cfg(feature = \"time\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"time\")))]\n\n pub const fn time(value: Time) -> Self {\n\n Value::Time(Some(value))\n\n }\n\n\n\n /// Creates a new JSON value.\n\n #[cfg(feature = \"json\")]\n\n #[cfg_attr(feature = \"docs\", doc(cfg(feature = \"json\")))]\n\n pub const fn json(value: serde_json::Value) -> Self {\n\n Value::Json(Some(value))\n\n }\n", "file_path": "src/ast/values.rs", "rank": 73, "score": 90553.04830090796 }, { "content": " fn from(rows: I) -> Self {\n\n Self {\n\n rows: rows.map(|r| r.into()).collect(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for Values<'a> {\n\n type Item = Row<'a>;\n\n type IntoIter = std::vec::IntoIter<Self::Item>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.rows.into_iter()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[cfg(feature = \"chrono\")]\n", "file_path": "src/ast/values.rs", "rank": 74, "score": 90551.49143827498 }, { "content": " Value::Json(v) => v,\n\n #[cfg(feature = \"uuid\")]\n\n Value::Uuid(u) => u.map(|u| serde_json::Value::String(u.to_hyphenated().to_string())),\n\n #[cfg(feature = \"chrono\")]\n\n Value::DateTime(dt) => dt.map(|dt| serde_json::Value::String(dt.to_rfc3339())),\n\n #[cfg(feature = \"chrono\")]\n\n Value::Date(date) => date.map(|date| serde_json::Value::String(format!(\"{}\", date))),\n\n #[cfg(feature = \"chrono\")]\n\n Value::Time(time) => time.map(|time| serde_json::Value::String(format!(\"{}\", time))),\n\n\n\n #[cfg(feature = \"time\")]\n\n Value::DateTime(dt) => dt.map(|dt| {\n\n serde_json::Value::String(\n\n dt.format(&time::format_description::well_known::Rfc3339)\n\n .expect(\"DateTime composed of invalid parts and cannot be formatted\"),\n\n )\n\n }),\n\n #[cfg(feature = \"time\")]\n\n Value::Date(date) => date.map(|date| serde_json::Value::String(format!(\"{}\", date))),\n\n #[cfg(feature = \"time\")]\n", "file_path": "src/ast/values.rs", "rank": 75, "score": 90551.07282708227 }, { "content": " pub fn empty() -> Self {\n\n Self { rows: Vec::new() }\n\n }\n\n\n\n /// Create a new in-memory set of values.\n\n pub fn new(rows: Vec<Row<'a>>) -> Self {\n\n Self { rows }\n\n }\n\n\n\n /// Create a new in-memory set of values with an allocated capacity.\n\n pub fn with_capacity(capacity: usize) -> Self {\n\n Self {\n\n rows: Vec::with_capacity(capacity),\n\n }\n\n }\n\n\n\n /// Add value to the temporary table.\n\n pub fn push<T>(&mut self, row: T)\n\n where\n\n T: Into<Row<'a>>,\n", "file_path": "src/ast/values.rs", "rank": 76, "score": 90550.48856488425 }, { "content": " #[cfg(feature = \"time\")]\n\n fn a_parameterized_value_of_time_datetimes_can_be_converted_into_a_vec() {\n\n let datetime =\n\n time::OffsetDateTime::parse(\"2019-07-27T05:30:30Z\", &time::format_description::well_known::Rfc3339)\n\n .expect(\"parsing date/time\");\n\n let pv = Value::array(vec![datetime]);\n\n let values: Vec<OffsetDateTime> = pv.into_vec().expect(\"convert into Vec<DateTime>\");\n\n assert_eq!(values, vec![datetime]);\n\n }\n\n\n\n #[test]\n\n fn a_parameterized_value_of_an_array_cant_be_converted_into_a_vec_of_the_wrong_type() {\n\n let pv = Value::array(vec![1]);\n\n let rslt: Option<Vec<f64>> = pv.into_vec();\n\n assert!(rslt.is_none());\n\n }\n\n}\n", "file_path": "src/ast/values.rs", "rank": 77, "score": 90549.87113582967 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let len = self.0.len();\n\n\n\n write!(f, \"[\")?;\n\n for (i, val) in self.0.iter().enumerate() {\n\n write!(f, \"{}\", val)?;\n\n\n\n if i < (len - 1) {\n\n write!(f, \",\")?;\n\n }\n\n }\n\n write!(f, \"]\")\n\n }\n\n}\n\n\n\nimpl<'a> fmt::Display for Value<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let res = match self {\n\n Value::Integer(val) => val.map(|v| write!(f, \"{}\", v)),\n\n Value::Float(val) => val.map(|v| write!(f, \"{}\", v)),\n", "file_path": "src/ast/values.rs", "rank": 78, "score": 90548.03780303441 }, { "content": " Value::Double(val) => val.map(|v| write!(f, \"{}\", v)),\n\n Value::Text(val) => val.as_ref().map(|v| write!(f, \"\\\"{}\\\"\", v)),\n\n Value::Bytes(val) => val.as_ref().map(|v| write!(f, \"<{} bytes blob>\", v.len())),\n\n Value::Enum(val) => val.as_ref().map(|v| write!(f, \"\\\"{}\\\"\", v)),\n\n Value::Boolean(val) => val.map(|v| write!(f, \"{}\", v)),\n\n Value::Char(val) => val.map(|v| write!(f, \"'{}'\", v)),\n\n Value::Array(vals) => vals.as_ref().map(|vals| {\n\n let len = vals.len();\n\n\n\n write!(f, \"[\")?;\n\n for (i, val) in vals.iter().enumerate() {\n\n write!(f, \"{}\", val)?;\n\n\n\n if i < (len - 1) {\n\n write!(f, \",\")?;\n\n }\n\n }\n\n write!(f, \"]\")\n\n }),\n\n Value::Xml(val) => val.as_ref().map(|v| write!(f, \"{}\", v)),\n", "file_path": "src/ast/values.rs", "rank": 79, "score": 90544.2902836573 }, { "content": " {\n\n self.rows.push(row.into());\n\n }\n\n\n\n /// The number of rows in the in-memory table.\n\n pub fn len(&self) -> usize {\n\n self.rows.len()\n\n }\n\n\n\n /// True if has no rows.\n\n pub fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n\n\n\n pub fn row_len(&self) -> usize {\n\n match self.rows.split_first() {\n\n Some((row, _)) => row.len(),\n\n None => 0,\n\n }\n\n }\n", "file_path": "src/ast/values.rs", "rank": 80, "score": 90543.70089139164 }, { "content": "pub fn run_with_tokio<O, F: std::future::Future<Output = O>>(fut: F) -> O {\n\n tokio::runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap()\n\n .block_on(fut)\n\n}\n\n\n", "file_path": "test-setup/src/lib.rs", "rank": 81, "score": 87463.40640497743 }, { "content": "#[cfg(feature = \"postgresql\")]\n\npub fn text_search_relevance<'a, E: Clone, Q>(exprs: &[E], query: Q) -> super::Function<'a>\n\nwhere\n\n E: Into<Expression<'a>>,\n\n Q: Into<Cow<'a, str>>,\n\n{\n\n let exprs: Vec<Expression> = exprs.iter().map(|c| c.clone().into()).collect();\n\n let fun = TextSearchRelevance {\n\n exprs,\n\n query: query.into(),\n\n };\n\n\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/search.rs", "rank": 82, "score": 86372.54770344864 }, { "content": "fn from_postgres<D: ExactSizeIterator<Item = u16>>(dec: PostgresDecimal<D>) -> Result<BigDecimal, InvalidDecimal> {\n\n let PostgresDecimal {\n\n neg, digits, weight, ..\n\n } = dec;\n\n\n\n if digits.len() == 0 {\n\n return Ok(0u64.into());\n\n }\n\n\n\n let sign = match neg {\n\n false => Sign::Plus,\n\n true => Sign::Minus,\n\n };\n\n\n\n // weight is 0 if the decimal point falls after the first base-10000 digit\n\n let scale = (digits.len() as i64 - weight as i64 - 1) * 4;\n\n\n\n // no optimized algorithm for base-10 so use base-100 for faster processing\n\n let mut cents = Vec::with_capacity(digits.len() * 2);\n\n\n", "file_path": "src/connector/postgres/conversion/decimal.rs", "rank": 83, "score": 86007.29635386041 }, { "content": "use crate::prelude::*;\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\n#[cfg(all(feature = \"json\", any(feature = \"postgresql\", feature = \"mysql\")))]\n\npub struct JsonExtractLastArrayElem<'a> {\n\n pub(crate) expr: Box<Expression<'a>>,\n\n}\n\n\n\n/// This is an internal function used to help construct the JsonArrayEndsInto Comparable\n\n#[cfg(all(feature = \"json\", any(feature = \"postgresql\", feature = \"mysql\")))]\n\npub(crate) fn json_extract_last_array_elem<'a, E>(expr: E) -> Function<'a>\n\nwhere\n\n E: Into<Expression<'a>>,\n\n{\n\n let fun = JsonExtractLastArrayElem {\n\n expr: Box::new(expr.into()),\n\n };\n\n\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/json_extract_array.rs", "rank": 84, "score": 82363.26760392718 }, { "content": "\n\n#[derive(Debug, Clone, PartialEq)]\n\n#[cfg(all(feature = \"json\", any(feature = \"postgresql\", feature = \"mysql\")))]\n\npub struct JsonExtractFirstArrayElem<'a> {\n\n pub(crate) expr: Box<Expression<'a>>,\n\n}\n\n\n\n/// This is an internal function used to help construct the JsonArrayBeginsWith Comparable\n\n#[cfg(all(feature = \"json\", any(feature = \"postgresql\", feature = \"mysql\")))]\n\npub(crate) fn json_extract_first_array_elem<'a, E>(expr: E) -> Function<'a>\n\nwhere\n\n E: Into<Expression<'a>>,\n\n{\n\n let fun = JsonExtractFirstArrayElem {\n\n expr: Box::new(expr.into()),\n\n };\n\n\n\n fun.into()\n\n}\n", "file_path": "src/ast/function/json_extract_array.rs", "rank": 85, "score": 82361.8135151187 }, { "content": "type DeserializeError = serde::de::value::Error;\n\n\n", "file_path": "src/serde.rs", "rank": 86, "score": 76980.7306979713 }, { "content": "/// A function travelling through the query AST, building the final query string\n\n/// and gathering parameters sent to the database together with the query.\n\npub trait Visitor<'a> {\n\n /// Opening backtick character to surround identifiers, such as column and table names.\n\n const C_BACKTICK_OPEN: &'static str;\n\n /// Closing backtick character to surround identifiers, such as column and table names.\n\n const C_BACKTICK_CLOSE: &'static str;\n\n /// Wildcard character to be used in `LIKE` queries.\n\n const C_WILDCARD: &'static str;\n\n\n\n /// Convert the given `Query` to an SQL string and a vector of parameters.\n\n /// When certain parameters are replaced with the `C_PARAM` character in the\n\n /// query, the vector should contain the parameter value in the right position.\n\n ///\n\n /// The point of entry for visiting query ASTs.\n\n ///\n\n /// ```\n\n /// # use quaint::{ast::*, visitor::*, error::Error};\n\n /// # fn main() -> Result {\n\n /// let query = Select::from_table(\"cats\");\n\n /// let (sqlite, _) = Sqlite::build(query.clone())?;\n\n /// let (psql, _) = Postgres::build(query.clone())?;\n", "file_path": "src/visitor.rs", "rank": 87, "score": 64978.78883315623 }, { "content": "pub trait GetRow {\n\n fn get_result_row(&self) -> crate::Result<Vec<Value<'static>>>;\n\n}\n\n\n", "file_path": "src/connector/queryable.rs", "rank": 88, "score": 63763.68354100002 }, { "content": "pub trait ToColumnNames {\n\n fn to_column_names(&self) -> Vec<String>;\n\n}\n\n\n\n/// Represents a connection or a transaction that can be queried.\n", "file_path": "src/connector/queryable.rs", "rank": 89, "score": 63763.68354100002 }, { "content": "/// An item that can be compared against other values in the database.\n\npub trait Comparable<'a> {\n\n /// Tests if both sides are the same value.\n\n ///\n\n /// ```rust\n\n /// # use quaint::{ast::*, visitor::{Visitor, Sqlite}};\n\n /// # fn main() -> Result<(), quaint::error::Error> {\n\n /// let query = Select::from_table(\"users\").so_that(\"foo\".equals(\"bar\"));\n\n /// let (sql, params) = Sqlite::build(query)?;\n\n ///\n\n /// assert_eq!(\"SELECT `users`.* FROM `users` WHERE `foo` = ?\", sql);\n\n ///\n\n /// assert_eq!(\n\n /// vec![\n\n /// Value::from(\"bar\"),\n\n /// ],\n\n /// params\n\n /// );\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n", "file_path": "src/ast/compare.rs", "rank": 90, "score": 63548.71676941086 }, { "content": "/// An item that can be used in the `GROUP BY` statement\n\npub trait Groupable<'a>\n\nwhere\n\n Self: Sized,\n\n{\n\n /// Group by `self`\n\n fn group(self) -> GroupByDefinition<'a>;\n\n}\n\n\n", "file_path": "src/ast/grouping.rs", "rank": 91, "score": 63544.54043163117 }, { "content": "/// An item that can be used in the `ORDER BY` statement\n\npub trait Orderable<'a>\n\nwhere\n\n Self: Sized,\n\n{\n\n /// Order by `self` in the given order\n\n fn order(self, order: Option<Order>) -> OrderDefinition<'a>;\n\n\n\n /// Change the order to `ASC`\n\n fn ascend(self) -> OrderDefinition<'a> {\n\n self.order(Some(Order::Asc))\n\n }\n\n\n\n /// Change the order to `DESC`\n\n fn descend(self) -> OrderDefinition<'a> {\n\n self.order(Some(Order::Desc))\n\n }\n\n}\n\n\n", "file_path": "src/ast/ordering.rs", "rank": 92, "score": 63544.54043163117 }, { "content": "/// An item that can be joined.\n\npub trait Joinable<'a> {\n\n /// Add the `JOIN` conditions.\n\n ///\n\n /// ```rust\n\n /// # use quaint::{ast::*, visitor::{Visitor, Sqlite}};\n\n /// # fn main() -> Result<(), quaint::error::Error> {\n\n /// let join_data = \"b\".on((\"b\", \"id\").equals(Column::from((\"a\", \"id\"))));\n\n /// let query = Select::from_table(\"a\").inner_join(join_data);\n\n /// let (sql, _) = Sqlite::build(query)?;\n\n ///\n\n /// assert_eq!(\n\n /// \"SELECT `a`.* FROM `a` INNER JOIN `b` ON `b`.`id` = `a`.`id`\",\n\n /// sql,\n\n /// );\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n fn on<T>(self, conditions: T) -> JoinData<'a>\n\n where\n\n T: Into<ConditionTree<'a>>;\n", "file_path": "src/ast/join.rs", "rank": 93, "score": 63539.44535584641 }, { "content": "/// An object that can be aliased.\n\npub trait Aliasable<'a> {\n\n type Target;\n\n\n\n /// Alias table for usage elsewhere in the query.\n\n fn alias<T>(self, alias: T) -> Self::Target\n\n where\n\n T: Into<Cow<'a, str>>;\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\n/// Either an identifier or a nested query.\n\npub enum TableType<'a> {\n\n Table(Cow<'a, str>),\n\n JoinedTable(Box<(Cow<'a, str>, Vec<Join<'a>>)>),\n\n Query(Box<Select<'a>>),\n\n Values(Values<'a>),\n\n}\n\n\n\n/// A table definition\n\n#[derive(Clone, Debug)]\n", "file_path": "src/ast/table.rs", "rank": 94, "score": 63539.44535584641 }, { "content": "/// `AND`, `OR` and `NOT` conjunctive implementations.\n\npub trait Conjunctive<'a> {\n\n /// Builds an `AND` condition having `self` as the left leaf and `other` as the right.\n\n ///\n\n /// ```rust\n\n /// # use quaint::{ast::*, visitor::{Visitor, Sqlite}};\n\n /// assert_eq!(\n\n /// \"foo\".equals(\"bar\").and(\"wtf\".less_than(3)),\n\n /// ConditionTree::And(vec![\n\n /// Expression::from(\"foo\".equals(\"bar\")),\n\n /// Expression::from(\"wtf\".less_than(3))\n\n /// ])\n\n /// )\n\n /// ```\n\n fn and<E>(self, other: E) -> ConditionTree<'a>\n\n where\n\n E: Into<Expression<'a>>;\n\n\n\n /// Builds an `OR` condition having `self` as the left leaf and `other` as the right.\n\n ///\n\n /// ```rust\n", "file_path": "src/ast/conjunctive.rs", "rank": 95, "score": 63539.44535584641 }, { "content": "#[async_trait::async_trait]\n\npub trait TestApi {\n\n async fn create_type_table(&mut self, r#type: &str) -> crate::Result<String>;\n\n fn render_create_table(&mut self, table_name: &str, columns: &str) -> (String, String);\n\n async fn create_table(&mut self, columns: &str) -> crate::Result<String>;\n\n\n\n async fn create_index(&mut self, table: &str, columns: &str) -> crate::Result<String>;\n\n\n\n fn system(&self) -> &'static str;\n\n fn connector_tag(&self) -> Tags;\n\n fn unique_constraint(&mut self, column: &str) -> String;\n\n fn foreign_key(&mut self, parent_table: &str, parent_column: &str, child_column: &str) -> String;\n\n fn autogen_id(&self, name: &str) -> String;\n\n fn conn(&self) -> &crate::single::Quaint;\n\n fn get_name(&mut self) -> String;\n\n}\n", "file_path": "src/tests/test_api.rs", "rank": 96, "score": 62499.11189054695 }, { "content": " pub trait Sealed {}\n\n impl Sealed for usize {}\n\n impl Sealed for &str {}\n\n}\n\n\n\nimpl ValueIndex<ResultRowRef<'_>, Value<'static>> for usize {\n\n fn index_into<'v>(self, row: &'v ResultRowRef) -> &'v Value<'static> {\n\n row.at(self).unwrap()\n\n }\n\n}\n\n\n\nimpl ValueIndex<ResultRowRef<'_>, Value<'static>> for &str {\n\n fn index_into<'v>(self, row: &'v ResultRowRef) -> &'v Value<'static> {\n\n row.get(self).unwrap()\n\n }\n\n}\n\n\n\nimpl ValueIndex<ResultRow, Value<'static>> for usize {\n\n fn index_into<'v>(self, row: &'v ResultRow) -> &'v Value<'static> {\n\n row.at(self).unwrap()\n", "file_path": "src/connector/result_set/index.rs", "rank": 97, "score": 62499.11189054695 }, { "content": "/// Convert the value into a group by definition.\n\npub trait IntoGroupByDefinition<'a> {\n\n fn into_group_by_definition(self) -> GroupByDefinition<'a>;\n\n}\n\n\n\nimpl<'a> IntoGroupByDefinition<'a> for &'a str {\n\n fn into_group_by_definition(self) -> GroupByDefinition<'a> {\n\n let column: Column = self.into();\n\n column.into()\n\n }\n\n}\n\n\n\nimpl<'a> IntoGroupByDefinition<'a> for (&'a str, &'a str) {\n\n fn into_group_by_definition(self) -> GroupByDefinition<'a> {\n\n let column: Column = self.into();\n\n column.into()\n\n }\n\n}\n\n\n\nimpl<'a> IntoGroupByDefinition<'a> for Column<'a> {\n\n fn into_group_by_definition(self) -> GroupByDefinition<'a> {\n", "file_path": "src/ast/grouping.rs", "rank": 98, "score": 62201.074372496725 }, { "content": "/// Convert the value into an order definition with order item and direction\n\npub trait IntoOrderDefinition<'a> {\n\n fn into_order_definition(self) -> OrderDefinition<'a>;\n\n}\n\n\n\nimpl<'a> IntoOrderDefinition<'a> for &'a str {\n\n fn into_order_definition(self) -> OrderDefinition<'a> {\n\n let column: Column<'a> = self.into();\n\n (column.into(), None)\n\n }\n\n}\n\n\n\nimpl<'a> IntoOrderDefinition<'a> for Column<'a> {\n\n fn into_order_definition(self) -> OrderDefinition<'a> {\n\n (self.into(), None)\n\n }\n\n}\n\n\n\nimpl<'a> IntoOrderDefinition<'a> for OrderDefinition<'a> {\n\n fn into_order_definition(self) -> OrderDefinition<'a> {\n\n self\n", "file_path": "src/ast/ordering.rs", "rank": 99, "score": 62200.851187724875 } ]
Rust
src/services/filler/processor.rs
Emulator000/pdfiller
89df98bbdb95147269ba9161f619ae59a30a43ce
use std::collections::BTreeMap; use std::str; use async_std::sync::Arc; use log::error; use lopdf::{Dictionary, Document as PdfDocument, Object, ObjectId}; use crate::file::FileProvider; use crate::mongo::models::document::Document; const PDF_VERSION: &str = "1.5"; pub struct DocumentObjects { pub objects: BTreeMap<ObjectId, Object>, pub pages: BTreeMap<ObjectId, Object>, } pub fn get_documents_containers<F: FileProvider + ?Sized>( file_type: Arc<Box<F>>, documents: Vec<Document>, compiled: bool, ) -> DocumentObjects { let mut max_id = 1; let mut documents_pages = BTreeMap::new(); let mut documents_objects = BTreeMap::new(); for document in documents { if let Some(ref file_name) = if compiled { file_type.generate_compiled_filepath(&document.file) } else { Some(document.file) } { match PdfDocument::load(file_name) { Ok(mut document) => { document.renumber_objects_with(max_id); max_id = document.max_id + 1; documents_pages.extend( document .get_pages() .into_iter() .map(|(_, object_id)| { ( object_id, document.get_object(object_id).unwrap().to_owned(), ) }) .collect::<BTreeMap<ObjectId, Object>>(), ); documents_objects.extend(document.objects); } Err(e) => { sentry::capture_error(&e); error!("Error loading the PDF: {:#?}", e); } } } } DocumentObjects { pages: documents_pages, objects: documents_objects, } } pub fn process_documents(documents_objects: DocumentObjects) -> Option<PdfDocument> { let mut document = PdfDocument::with_version(PDF_VERSION); let mut catalog_object: Option<(ObjectId, Object)> = None; let mut pages_object: Option<(ObjectId, Object)> = None; for (object_id, object) in documents_objects.objects.iter() { match object.type_name().unwrap_or("") { "Catalog" => { catalog_object = Some(( if let Some((id, _)) = catalog_object { id } else { *object_id }, object.clone(), )); } "Pages" => { if let Some(dictionary) = upsert_dictionary(&object, pages_object.as_ref().map(|(_, object)| object)) { pages_object = Some(( if let Some((id, _)) = pages_object { id } else { *object_id }, Object::Dictionary(dictionary), )); } } "Page" => {} "Outlines" => {} "Outline" => {} _ => { document.objects.insert(*object_id, object.clone()); } } } pages_object.as_ref()?; for (object_id, object) in documents_objects.pages.iter() { if let Ok(dictionary) = object.as_dict() { let mut dictionary = dictionary.clone(); dictionary.set("Parent", pages_object.as_ref().unwrap().0); document .objects .insert(*object_id, Object::Dictionary(dictionary)); } } catalog_object.as_ref()?; let catalog_object = catalog_object.unwrap(); let pages_object = pages_object.unwrap(); if let Ok(dictionary) = pages_object.1.as_dict() { let mut dictionary = dictionary.clone(); dictionary.set("Count", documents_objects.pages.len() as u32); document .objects .insert(pages_object.0, Object::Dictionary(dictionary)); } if let Ok(dictionary) = catalog_object.1.as_dict() { let mut dictionary = dictionary.clone(); dictionary.set("Pages", pages_object.0); dictionary.remove(b"Outlines"); document .objects .insert(catalog_object.0, Object::Dictionary(dictionary)); } document.trailer.set("Root", catalog_object.0); document.max_id = document.objects.len() as u32; document.renumber_objects(); document.compress(); Some(document) } fn upsert_dictionary(object: &Object, other_object: Option<&Object>) -> Option<Dictionary> { if let Ok(dictionary) = object.as_dict() { let mut dictionary = dictionary.clone(); if let Some(object) = other_object { if let Ok(old_dictionary) = object.as_dict() { dictionary.extend(old_dictionary); } } Some(dictionary) } else { None } }
use std::collections::BTreeMap; use std::str; use async_std::sync::Arc; use log::error; use lopdf::{Dictionary, Document as PdfDocument, Object, ObjectId}; use crate::file::FileProvider; use crate::mongo::models::document::Document; const PDF_VERSION: &str = "1.5"; pub struct DocumentObjects { pub objects: BTreeMap<ObjectId, Object>, pub pages: BTreeMap<ObjectId, Object>, } pub fn get_documents_containers<F: FileProvider + ?Sized>( file_type: Arc<Box<F>>, documents: Vec<Document>, compiled: bool, ) -> DocumentObjects { let mut max_id = 1; let mut documents_pages = BTreeMap::new(); let mut documents_objects = BTreeMap::new(); for document in documents { if let Some(ref file_name) = if compiled { file_type.generate_compiled_filepath(&document.file) } else { Some(document.file) } { match PdfDocument::load(file_name) { Ok(mut document) => { document.renumber_objects_with(max_id); max_id = document.max_id + 1; documents_pages.extend( document .get_pages() .into_iter() .map(|(_, object_id)| { ( object_id, document.get_object(object_id).unwrap().to_owned(), ) }) .collect::<BTreeMap<ObjectId, Object>>(), ); documents_objects.extend(document.objects); } Err(e) => { sentry::capture_error(&e); error!("Error loading the PDF: {:#?}", e); } } } } DocumentObjects { pages: documents_pages, objects: documents_objects, } } pub fn process_documents(documents_objects: DocumentObjects) -> Option<PdfDocument> { let mut document = PdfDocument::with_version(PDF_VERSION); let mut catalog_object: Option<(ObjectId, Object)> = None; let mut pages_object: Option<(ObjectId, Object)> = None; for (object_id, object) in documents_objects.objects.iter() { match object.type_name().unwrap_or("") { "Catalog" => { catalog_object =
; } "Pages" => { if let Some(dictionary) = upsert_dictionary(&object, pages_object.as_ref().map(|(_, object)| object)) { pages_object = Some(( if let Some((id, _)) = pages_object { id } else { *object_id }, Object::Dictionary(dictionary), )); } } "Page" => {} "Outlines" => {} "Outline" => {} _ => { document.objects.insert(*object_id, object.clone()); } } } pages_object.as_ref()?; for (object_id, object) in documents_objects.pages.iter() { if let Ok(dictionary) = object.as_dict() { let mut dictionary = dictionary.clone(); dictionary.set("Parent", pages_object.as_ref().unwrap().0); document .objects .insert(*object_id, Object::Dictionary(dictionary)); } } catalog_object.as_ref()?; let catalog_object = catalog_object.unwrap(); let pages_object = pages_object.unwrap(); if let Ok(dictionary) = pages_object.1.as_dict() { let mut dictionary = dictionary.clone(); dictionary.set("Count", documents_objects.pages.len() as u32); document .objects .insert(pages_object.0, Object::Dictionary(dictionary)); } if let Ok(dictionary) = catalog_object.1.as_dict() { let mut dictionary = dictionary.clone(); dictionary.set("Pages", pages_object.0); dictionary.remove(b"Outlines"); document .objects .insert(catalog_object.0, Object::Dictionary(dictionary)); } document.trailer.set("Root", catalog_object.0); document.max_id = document.objects.len() as u32; document.renumber_objects(); document.compress(); Some(document) } fn upsert_dictionary(object: &Object, other_object: Option<&Object>) -> Option<Dictionary> { if let Ok(dictionary) = object.as_dict() { let mut dictionary = dictionary.clone(); if let Some(object) = other_object { if let Ok(old_dictionary) = object.as_dict() { dictionary.extend(old_dictionary); } } Some(dictionary) } else { None } }
Some(( if let Some((id, _)) = catalog_object { id } else { *object_id }, object.clone(), ))
call_expression
[ { "content": "fn get_document_buffer(document: &mut PdfDocument) -> ExportCompilerResult<Vec<u8>> {\n\n let buf = Vec::<u8>::new();\n\n let mut cursor = Cursor::new(buf);\n\n\n\n match document.save_to(&mut cursor) {\n\n Ok(_) => {\n\n let _ = cursor.seek(SeekFrom::Start(0));\n\n\n\n Ok(cursor.get_ref().to_vec())\n\n }\n\n Err(e) => Err(ExportCompilerError::GenericError(format!(\n\n \"An error {:#?} occurred saving the PDFs files.\",\n\n e\n\n ))),\n\n }\n\n}\n\n\n\nasync fn save_compiled_file<F: FileProvider + ?Sized>(\n\n file_type: Arc<Box<F>>,\n\n file_path: String,\n", "file_path": "src/services/filler/compiler.rs", "rank": 0, "score": 141183.60711316636 }, { "content": "pub fn config(cfg: &mut web::ServiceConfig) {\n\n cfg.service(post_document);\n\n cfg.service(get_document);\n\n cfg.service(get_documents);\n\n cfg.service(get_documents_by_token);\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct FormData {\n\n file: String,\n\n}\n\n\n\n#[post(\"/document/{token}\")]\n\npub async fn post_document(\n\n data: web::Data<Data>,\n\n token: web::Path<String>,\n\n form: Option<web::Form<FormData>>,\n\n mut payload: Multipart,\n\n) -> impl Responder {\n\n let mut filepath = None;\n", "file_path": "src/services/document.rs", "rank": 2, "score": 107006.58808134678 }, { "content": "pub fn export_content<S: AsRef<str>>(\n\n accept: S,\n\n export_result: compiler::ExportCompilerResult<Vec<u8>>,\n\n) -> HttpResponse {\n\n match export_result {\n\n Ok(bytes) => HttpResponse::Ok()\n\n .encoding(ContentEncoding::Identity)\n\n .content_type(accept.as_ref())\n\n .append_header((\"accept-ranges\", \"bytes\"))\n\n .append_header((\n\n \"content-disposition\",\n\n format!(\n\n \"attachment; filename=\\\"pdf.{}\\\"\",\n\n if accept.as_ref() != mime::APPLICATION_PDF {\n\n \"zip\"\n\n } else {\n\n \"pdf\"\n\n }\n\n ),\n\n ))\n\n .body(bytes),\n\n Err(compiler::ExportCompilerError::GenericError(message)) => {\n\n HttpResponse::InternalServerError().json(WsError { error: message })\n\n }\n\n }\n\n}\n", "file_path": "src/services/mod.rs", "rank": 3, "score": 93323.20266635017 }, { "content": "pub fn config(cfg: &mut web::ServiceConfig) {\n\n document::config(cfg);\n\n filler::config(cfg);\n\n}\n\n\n", "file_path": "src/services/mod.rs", "rank": 5, "score": 88014.1219346298 }, { "content": "pub fn config(cfg: &mut web::ServiceConfig) {\n\n cfg.service(compile_documents);\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct CompileOptions {\n\n pub merge: Option<bool>,\n\n}\n\n\n\n#[post(\"/compile/{token}\")]\n\npub async fn compile_documents(\n\n data: web::Data<Data>,\n\n token: web::Path<String>,\n\n request: web::HttpRequest,\n\n bytes: web::Bytes,\n\n) -> impl Responder {\n\n match str::from_utf8(&bytes) {\n\n Ok(body) => match serde_json::from_str::<Value>(body) {\n\n Ok(values) => {\n\n if let Some(value) = values.get(\"data\") {\n", "file_path": "src/services/filler/mod.rs", "rank": 6, "score": 85923.10095202037 }, { "content": "pub fn get_object_rect(field: &Dictionary) -> Result<(f64, f64, f64, f64), lopdf::Error> {\n\n let rect = field\n\n .get(b\"Rect\")?\n\n .as_array()?\n\n .iter()\n\n .map(|object| {\n\n object\n\n .as_f64()\n\n .unwrap_or(object.as_i64().unwrap_or(0) as f64)\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n if rect.len() == 4 {\n\n Ok((rect[0], rect[1], rect[2], rect[3]))\n\n } else {\n\n Err(lopdf::Error::ObjectNotFound)\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 8, "score": 63105.066611979346 }, { "content": "pub fn get_accepted_header(request: &web::HttpRequest) -> Option<String> {\n\n if let Some(accept) = request.headers().get(ACCEPT) {\n\n let accept = accept.to_str().unwrap_or(\"\").to_lowercase();\n\n\n\n if accept.as_str() == mime::APPLICATION_PDF\n\n || accept.as_str() == mime::APPLICATION_OCTET_STREAM\n\n {\n\n Some(accept)\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/services/mod.rs", "rank": 9, "score": 51926.12188803385 }, { "content": "#[derive(Serialize)]\n\nstruct WsMessage {\n\n message: String,\n\n}\n\n\n", "file_path": "src/services/mod.rs", "rank": 10, "score": 41786.61908415142 }, { "content": "#[derive(Serialize)]\n\nstruct WsError {\n\n error: String,\n\n}\n\n\n", "file_path": "src/services/mod.rs", "rank": 11, "score": 41786.61908415142 }, { "content": "#[async_trait]\n\npub trait FileProvider: Send + Sync {\n\n fn generate_filepath(&self, file_name: &str) -> String {\n\n format!(\n\n \"{}{}{}\",\n\n self.base_path(),\n\n Uuid::new_v4().to_string(),\n\n sanitize_filename::sanitize(file_name)\n\n )\n\n }\n\n\n\n fn generate_compiled_filepath(&self, file_path: &str) -> Option<String> {\n\n crystalsoft_utils::get_filename(file_path)\n\n .map(|file_name| format!(\"{}{}{}\", self.base_path(), PATH_COMPILED, file_name))\n\n }\n\n\n\n async fn download_and_save(&self, uri: &str) -> Option<String> {\n\n let mut filepath = None;\n\n if let Some(pdf) = client::get(uri).await {\n\n let remote_file_path = self.generate_filepath(\"file.pdf\");\n\n match self.save(remote_file_path.as_str(), pdf).await {\n", "file_path": "src/file/mod.rs", "rank": 12, "score": 31276.713944016225 }, { "content": " sentry::capture_error(&e);\n\n\n\n return HttpResponse::InternalServerError().json(WsError {\n\n error: format!(\n\n \"An error occurred uploading the file: {:#?}\",\n\n e\n\n ),\n\n });\n\n }\n\n }\n\n }\n\n }\n\n None => match read_chuncked_buffer(&mut field).await {\n\n Ok(buf) => match std::str::from_utf8(buf.as_slice()) {\n\n Ok(uri) => {\n\n filepath = data.file.download_and_save(uri).await;\n\n }\n\n Err(e) => {\n\n sentry::capture_error(&e);\n\n\n", "file_path": "src/services/document.rs", "rank": 13, "score": 25675.21298774459 }, { "content": " let export_result = if accept.as_str() == mime::APPLICATION_PDF {\n\n compiler::merge_documents(data.file.clone(), documents, false).await\n\n } else {\n\n compiler::zip_documents(data.file.clone(), documents, false).await\n\n };\n\n\n\n services::export_content(accept, export_result)\n\n } else {\n\n HttpResponse::NotAcceptable().json(WsError {\n\n error: \"Only PDF or Streams are accepted\".into(),\n\n })\n\n }\n\n } else {\n\n HttpResponse::NotFound().json(WsError {\n\n error: \"No documents found for this token!\".into(),\n\n })\n\n }\n\n}\n\n\n\n#[get(\"/documents\")]\n", "file_path": "src/services/document.rs", "rank": 14, "score": 25674.783688493128 }, { "content": " Some(_) => {}\n\n None => {}\n\n }\n\n }\n\n }\n\n }\n\n\n\n if filepath.is_none() {\n\n HttpResponse::BadRequest().json(WsError {\n\n error: \"File missing.\".into(),\n\n })\n\n } else if let Some(file) = filepath {\n\n let document = Document::new(token.to_string(), file);\n\n match data.create_document(document.clone()).await {\n\n Ok(_) => HttpResponse::Created().json(document),\n\n Err(e) => HttpResponse::InternalServerError().json(WsError {\n\n error: format!(\"An error occurred: {:#?}\", e),\n\n }),\n\n }\n\n } else {\n", "file_path": "src/services/document.rs", "rank": 15, "score": 25674.155600809918 }, { "content": " })\n\n }\n\n}\n\n\n\nasync fn read_chuncked_buffer(field: &mut Field) -> Result<Vec<u8>, MultipartError> {\n\n let mut buf = Vec::new();\n\n while let Some(chunk) = field.next().await {\n\n match chunk {\n\n Ok(data) => {\n\n buf.extend(data);\n\n }\n\n Err(e) => {\n\n return Err(e);\n\n }\n\n }\n\n }\n\n\n\n Ok(buf)\n\n}\n", "file_path": "src/services/document.rs", "rank": 16, "score": 25673.51197357032 }, { "content": " if let Some(form) = form {\n\n filepath = data.file.download_and_save(form.file.as_str()).await;\n\n } else {\n\n while let Ok(Some(mut field)) = payload.try_next().await {\n\n if let Some(ref content_type) = field.content_disposition() {\n\n match content_type.get_name() {\n\n Some(\"file\") => match content_type.get_filename() {\n\n Some(filename) => {\n\n if !filename.is_empty() {\n\n match read_chuncked_buffer(&mut field).await {\n\n Ok(buf) => {\n\n let local_filepath = data.file.generate_filepath(&filename);\n\n match data.file.save(&local_filepath, buf).await {\n\n Ok(_) => {\n\n filepath = Some(local_filepath);\n\n }\n\n Err(FileError::S3Error(e)) => {\n\n sentry::capture_error(&e);\n\n\n\n return HttpResponse::InternalServerError().json(WsError {\n", "file_path": "src/services/document.rs", "rank": 17, "score": 25673.213563875717 }, { "content": "use actix_multipart::{Field, Multipart, MultipartError};\n\nuse actix_web::{get, post, web, HttpResponse, Responder};\n\nuse futures_lite::stream::StreamExt;\n\nuse serde::Deserialize;\n\n\n\nuse crate::data::Data;\n\nuse crate::file::FileError;\n\nuse crate::mongo::models::document::Document;\n\nuse crate::services::{self, filler::compiler, WsError};\n\n\n", "file_path": "src/services/document.rs", "rank": 18, "score": 25673.079429233137 }, { "content": "pub async fn get_documents(data: web::Data<Data>) -> impl Responder {\n\n if let Some(documents) = data.get_all_documents().await {\n\n HttpResponse::Ok().json(documents)\n\n } else {\n\n HttpResponse::NotFound().json(WsError {\n\n error: \"No documents found for this token!\".into(),\n\n })\n\n }\n\n}\n\n\n\n#[get(\"/documents/{token}\")]\n\npub async fn get_documents_by_token(\n\n data: web::Data<Data>,\n\n token: web::Path<String>,\n\n) -> impl Responder {\n\n if let Some(documents) = data.get_documents_by_token(token.as_str()).await {\n\n HttpResponse::Ok().json(documents)\n\n } else {\n\n HttpResponse::NotFound().json(WsError {\n\n error: \"No documents found for this token!\".into(),\n", "file_path": "src/services/document.rs", "rank": 19, "score": 25672.11962432039 }, { "content": " HttpResponse::InternalServerError().json(WsError {\n\n error: \"An error occurred\".into(),\n\n })\n\n }\n\n}\n\n\n\n#[get(\"/document/{token}\")]\n\npub async fn get_document(\n\n data: web::Data<Data>,\n\n token: web::Path<String>,\n\n request: web::HttpRequest,\n\n) -> impl Responder {\n\n if let Some(documents) = data.get_documents_by_token(token.as_str()).await {\n\n if documents.is_empty() {\n\n return HttpResponse::NotFound().json(WsError {\n\n error: \"No documents found for this token!\".into(),\n\n });\n\n }\n\n\n\n if let Some(accept) = services::get_accepted_header(&request) {\n", "file_path": "src/services/document.rs", "rank": 20, "score": 25671.22571655047 }, { "content": " error: format!(\n\n \"An error occurred uploading the file: {:#?}\",\n\n e\n\n ),\n\n });\n\n }\n\n Err(FileError::IoError(e)) => {\n\n sentry::capture_error(&e);\n\n\n\n return HttpResponse::InternalServerError().json(WsError {\n\n error: format!(\n\n \"An error occurred uploading the file: {:#?}\",\n\n e\n\n ),\n\n });\n\n }\n\n _ => {}\n\n }\n\n }\n\n Err(e) => {\n", "file_path": "src/services/document.rs", "rank": 21, "score": 25668.747106355277 }, { "content": " return HttpResponse::InternalServerError().json(WsError {\n\n error: format!(\n\n \"An error occurred downloading the remote file: {:#?}\",\n\n e\n\n ),\n\n });\n\n }\n\n },\n\n Err(e) => {\n\n sentry::capture_error(&e);\n\n\n\n return HttpResponse::InternalServerError().json(WsError {\n\n error: format!(\n\n \"An error occurred uploading the file: {:#?}\",\n\n e\n\n ),\n\n });\n\n }\n\n },\n\n },\n", "file_path": "src/services/document.rs", "rank": 22, "score": 25667.971900079767 }, { "content": "pub trait Model: CacheItem + Send + Sync + Unpin + Serialize + DeserializeOwned {\n\n fn name() -> &'static str;\n\n\n\n fn prefix() -> String {\n\n Self::name().to_string()\n\n }\n\n\n\n fn default() -> Self;\n\n\n\n fn debug(&self) -> String;\n\n\n\n fn to_document(&self) -> MongoDocument;\n\n\n\n fn from_document(document: MongoDocument) -> Result<Self, ValueAccessError>;\n\n}\n", "file_path": "src/mongo/models/mod.rs", "rank": 23, "score": 24927.713460689498 }, { "content": " let mut bytes = zip_result.unwrap_or_default();\n\n let _ = bytes.seek(SeekFrom::Start(0));\n\n\n\n Ok(bytes.get_ref().to_vec())\n\n}\n\n\n\npub async fn merge_documents<F: FileProvider + ?Sized>(\n\n file_type: Arc<Box<F>>,\n\n mut documents: Vec<Document>,\n\n compiled: bool,\n\n) -> ExportCompilerResult<Vec<u8>> {\n\n if documents.len() == 1 {\n\n let document = documents.pop().unwrap();\n\n if let Some(ref file_path) = if compiled {\n\n file_type.generate_compiled_filepath(&document.file)\n\n } else {\n\n Some(document.file)\n\n } {\n\n match file_type.load(file_path).await {\n\n Ok(buffer) => match PdfDocument::load_mem(&buffer) {\n", "file_path": "src/services/filler/compiler.rs", "rank": 24, "score": 24705.899631742992 }, { "content": " Err(ExportCompilerError::GenericError(\n\n \"Error getting the compiled PDF file.\".to_string(),\n\n ))\n\n }\n\n } else {\n\n let documents_objects = processor::get_documents_containers(file_type, documents, compiled);\n\n if documents_objects.pages.is_empty() || documents_objects.objects.is_empty() {\n\n Err(ExportCompilerError::GenericError(\n\n \"Cannot extract PDFs documents\".into(),\n\n ))\n\n } else if let Some(mut document) = processor::process_documents(documents_objects) {\n\n get_document_buffer(&mut document)\n\n } else {\n\n Err(ExportCompilerError::GenericError(\n\n \"Error decoding the PDFs files.\".to_string(),\n\n ))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/services/filler/compiler.rs", "rank": 25, "score": 24705.31100315412 }, { "content": " for document in documents.iter() {\n\n if let Err(e) = compile_document(file_type.clone(), map, &document).await {\n\n return Err(e);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub async fn compile_document<F: FileProvider + ?Sized>(\n\n file_type: Arc<Box<F>>,\n\n map: &PDFillerMap,\n\n document: &Document,\n\n) -> HandlerCompilerResult<()> {\n\n match form::fields_filler(map, document).await {\n\n Ok(mut form) => {\n\n if let Some(compiled_filename) =\n\n file_type.generate_compiled_filepath(document.file.as_str())\n\n {\n\n let mut buf = Vec::new();\n", "file_path": "src/services/filler/compiler.rs", "rank": 26, "score": 24705.290713277984 }, { "content": " Ok(mut document) => get_document_buffer(&mut document),\n\n Err(e) => {\n\n sentry::capture_error(&e);\n\n\n\n Err(ExportCompilerError::GenericError(format!(\n\n \"Error loading the PDF: {:#?}\",\n\n e\n\n )))\n\n }\n\n },\n\n Err(e) => {\n\n sentry::capture_error(&e);\n\n\n\n Err(ExportCompilerError::GenericError(format!(\n\n \"Error loading the PDF: {:#?}\",\n\n e\n\n )))\n\n }\n\n }\n\n } else {\n", "file_path": "src/services/filler/compiler.rs", "rank": 27, "score": 24704.797460068316 }, { "content": " match form.save_to(&mut buf) {\n\n Ok(_) => save_compiled_file(file_type, compiled_filename, buf).await,\n\n Err(e) => {\n\n sentry::capture_error(&e);\n\n\n\n Err(HandlerCompilerError::Error(format!(\n\n \"Error {:#?} saving a PDF file, aborted.\",\n\n e\n\n )))\n\n }\n\n }\n\n } else {\n\n Err(HandlerCompilerError::Error(\n\n \"Error saving a PDF file, aborted.\".into(),\n\n ))\n\n }\n\n }\n\n Err(e) => match e {\n\n FillingError::Load(e) => match e {\n\n LoadError::LopdfError(Error::DictKey) => {\n", "file_path": "src/services/filler/compiler.rs", "rank": 28, "score": 24704.12740889491 }, { "content": " _ => Err(HandlerCompilerError::Error(\n\n \"Error saving a PDF file, aborted.\".to_string(),\n\n )),\n\n },\n\n _ => Err(HandlerCompilerError::FillingError(e)),\n\n },\n\n }\n\n}\n\n\n\npub async fn zip_documents<F: FileProvider + ?Sized>(\n\n file_type: Arc<Box<F>>,\n\n documents: Vec<Document>,\n\n compiled: bool,\n\n) -> ExportCompilerResult<Vec<u8>> {\n\n let buf = Vec::new();\n\n let w = std::io::Cursor::new(buf);\n\n let mut zip = zip::ZipWriter::new(w);\n\n\n\n for document in documents {\n\n if let Some(ref file_name) = crystalsoft_utils::get_filename(&document.file) {\n", "file_path": "src/services/filler/compiler.rs", "rank": 29, "score": 24703.365859364712 }, { "content": " buf: Vec<u8>,\n\n) -> HandlerCompilerResult<()> {\n\n match file_type.save(file_path.as_str(), buf).await {\n\n Ok(_) => Ok(()),\n\n Err(FileError::IoError(e)) => {\n\n sentry::capture_error(&e);\n\n\n\n Err(HandlerCompilerError::Error(format!(\n\n \"Error {:#?} saving a PDF file, aborted.\",\n\n e\n\n )))\n\n }\n\n Err(FileError::BlockingError(e)) => {\n\n sentry::capture_error(&e);\n\n\n\n Err(HandlerCompilerError::Error(format!(\n\n \"Error {:#?} saving a PDF file, aborted.\",\n\n e\n\n )))\n\n }\n", "file_path": "src/services/filler/compiler.rs", "rank": 30, "score": 24699.99541290722 }, { "content": "pub type PDFillerMap = HashMap<String, Value>;\n\n\n\npub type HandlerCompilerResult<T> = Result<T, HandlerCompilerError>;\n\n\n\npub enum HandlerCompilerError {\n\n FillingError(FillingError),\n\n Error(String),\n\n}\n\n\n\npub type ExportCompilerResult<T> = Result<T, ExportCompilerError>;\n\n\n\npub enum ExportCompilerError {\n\n GenericError(String),\n\n}\n\n\n\npub async fn compile_documents<F: FileProvider + ?Sized>(\n\n file_type: Arc<Box<F>>,\n\n map: &PDFillerMap,\n\n documents: &[Document],\n\n) -> HandlerCompilerResult<()> {\n", "file_path": "src/services/filler/compiler.rs", "rank": 31, "score": 24699.161414121692 }, { "content": " match zip.start_file(file_name, FileOptions::default()) {\n\n Ok(_) => match if compiled {\n\n file_type.generate_compiled_filepath(&document.file)\n\n } else {\n\n Some(document.file)\n\n } {\n\n Some(ref file_path) => match file_type.load(file_path).await {\n\n Ok(buffer) => match zip.write_all(&buffer) {\n\n Ok(_) => {}\n\n Err(e) => {\n\n return Err(ExportCompilerError::GenericError(format!(\n\n \"Error making a ZIP file: {:#?}\",\n\n e\n\n )));\n\n }\n\n },\n\n Err(e) => {\n\n return Err(ExportCompilerError::GenericError(format!(\n\n \"Error making a ZIP file: {:#?}\",\n\n e\n", "file_path": "src/services/filler/compiler.rs", "rank": 32, "score": 24699.087671130754 }, { "content": " if let Some(compiled_filename) =\n\n file_type.generate_compiled_filepath(&document.file)\n\n {\n\n match crystalsoft_utils::read_file_buf(&document.file) {\n\n Ok(buf) => save_compiled_file(file_type, compiled_filename, buf).await,\n\n Err(e) => {\n\n sentry::capture_error(&e);\n\n\n\n Err(HandlerCompilerError::Error(format!(\n\n \"Error {:#?} saving a PDF file, aborted.\",\n\n e\n\n )))\n\n }\n\n }\n\n } else {\n\n Err(HandlerCompilerError::Error(\n\n \"Error saving a PDF file, aborted.\".to_string(),\n\n ))\n\n }\n\n }\n", "file_path": "src/services/filler/compiler.rs", "rank": 33, "score": 24698.90921283079 }, { "content": "use std::collections::HashMap;\n\nuse std::io::{Cursor, SeekFrom};\n\nuse std::io::{Seek, Write};\n\n\n\nuse async_std::sync::Arc;\n\n\n\nuse serde_json::Value;\n\n\n\nuse pdf_forms::LoadError;\n\n\n\nuse lopdf::{Document as PdfDocument, Error};\n\n\n\nuse zip::write::FileOptions;\n\n\n\nuse crate::file::{FileError, FileProvider};\n\nuse crate::mongo::models::document::Document;\n\nuse crate::services::filler::form;\n\nuse crate::services::filler::form::FillingError;\n\nuse crate::services::filler::processor;\n\n\n", "file_path": "src/services/filler/compiler.rs", "rank": 34, "score": 24698.849995369048 }, { "content": " Err(FileError::S3Error(e)) => {\n\n sentry::capture_error(&e);\n\n\n\n Err(HandlerCompilerError::Error(format!(\n\n \"Error {:#?} saving a PDF file, aborted.\",\n\n e\n\n )))\n\n }\n\n _ => Err(HandlerCompilerError::Error(\n\n \"Error: cannot save the file.\".into(),\n\n )),\n\n }\n\n}\n", "file_path": "src/services/filler/compiler.rs", "rank": 35, "score": 24697.152595050822 }, { "content": " )));\n\n }\n\n },\n\n None => {\n\n return Err(ExportCompilerError::GenericError(\n\n \"Error making a ZIP file.\".into(),\n\n ));\n\n }\n\n },\n\n Err(e) => {\n\n return Err(ExportCompilerError::GenericError(format!(\n\n \"Error making a ZIP file: {:#?}\",\n\n e\n\n )));\n\n }\n\n }\n\n }\n\n }\n\n\n\n let zip_result = zip.finish();\n", "file_path": "src/services/filler/compiler.rs", "rank": 36, "score": 24697.119158147005 }, { "content": "use bson::doc;\n\nuse bson::document::ValueAccessError;\n\nuse chrono::{DateTime, Utc};\n\nuse mongodb::bson::Document as MongoDocument;\n\nuse serde::{Deserialize, Serialize};\n\nuse simple_cache::CacheItem;\n\n\n\nuse crate::mongo::models::Model;\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Document {\n\n #[serde(rename = \"_id\", skip_serializing_if = \"Option::is_none\")]\n\n id: Option<String>,\n\n pub token: String,\n\n pub file: String,\n\n pub date: DateTime<Utc>,\n\n}\n\n\n\nimpl Document {\n\n pub fn new(token: String, file: String) -> Self {\n", "file_path": "src/mongo/models/document.rs", "rank": 37, "score": 24449.621943035098 }, { "content": " Self {\n\n id: None,\n\n token,\n\n file,\n\n date: Utc::now(),\n\n }\n\n }\n\n}\n\n\n\nimpl CacheItem for Document {}\n\n\n\nimpl Model for Document {\n\n fn name() -> &'static str {\n\n \"document\"\n\n }\n\n\n\n fn default() -> Self {\n\n Self {\n\n id: None,\n\n token: \"\".into(),\n", "file_path": "src/mongo/models/document.rs", "rank": 38, "score": 24447.137637902226 }, { "content": "\n\n Ok(Self {\n\n id: Some(document.get_object_id(\"_id\")?.to_hex()),\n\n token: document.get_str(\"token\")?.to_owned(),\n\n file: document.get_str(\"file\")?.to_owned(),\n\n date: document.get_datetime(\"date\")?.to_owned(),\n\n })\n\n }\n\n}\n", "file_path": "src/mongo/models/document.rs", "rank": 39, "score": 24444.216840058853 }, { "content": " file: \"\".into(),\n\n date: Utc::now(),\n\n }\n\n }\n\n\n\n fn debug(&self) -> String {\n\n format!(\"{:#?}\", self)\n\n }\n\n\n\n fn to_document(&self) -> MongoDocument {\n\n doc! {\n\n \"token\": self.token.clone(),\n\n \"file\": self.file.clone(),\n\n \"date\": self.date,\n\n }\n\n }\n\n\n\n fn from_document(document: MongoDocument) -> Result<Self, ValueAccessError> {\n\n // Currently not working\n\n // bson::from_bson::<Document>(bson::Bson::Document(document)).unwrap_or(Self::default())\n", "file_path": "src/mongo/models/document.rs", "rank": 40, "score": 24442.742382185188 }, { "content": "use std::collections::HashMap;\n\nuse std::str;\n\n\n\nuse serde_json::Value;\n\n\n\nuse pdf_forms::{FieldState, Form, LoadError, ValueError};\n\n\n\nuse lopdf::xobject;\n\n\n\nuse regex::Regex;\n\n\n\nuse crate::client;\n\nuse crate::mongo::models::document::Document;\n\nuse crate::utils;\n\n\n\npub type PDFillerMap = HashMap<String, Value>;\n\n\n\nconst REQUIRED_MARKER: char = '!';\n\nconst IMAGE_REGEX: &str = r\"_af_image$\";\n\n\n", "file_path": "src/services/filler/form.rs", "rank": 43, "score": 11.957048498107879 }, { "content": "use std::str;\n\n\n\nuse serde::Serialize;\n\n\n\nuse reqwest::Client;\n\n\n\nconst USER_AGENT_KEY: &str = \"User-Agent\";\n\nconst UA: &str = \"PDFiller\";\n\n\n\npub async fn get<S: AsRef<str>>(uri: S) -> Option<Vec<u8>> {\n\n let client_request = Client::default().get(uri.as_ref());\n\n let response = client_request.header(USER_AGENT_KEY, UA).send().await;\n\n\n\n match response {\n\n Ok(response) => match response.bytes().await {\n\n Ok(body) => Some(body.to_vec()),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n", "file_path": "src/client.rs", "rank": 46, "score": 11.21393006916041 }, { "content": "pub type FormResult = Result<Form, FillingError>;\n\n\n\n#[derive(Debug)]\n\npub enum FillingError {\n\n Load(LoadError),\n\n Value(ValueError),\n\n RequiredField(String),\n\n InternalError,\n\n}\n\n\n\npub async fn fields_filler(map: &PDFillerMap, document: &Document) -> FormResult {\n\n match Form::load(&document.file) {\n\n Ok(mut form) => {\n\n for (index, name) in form.get_all_names().iter().enumerate() {\n\n if let Some(name) = name {\n\n let name = name.trim_start_matches(REQUIRED_MARKER);\n\n\n\n let mut value = map.get(name);\n\n let result = {\n\n if value.is_some() {\n", "file_path": "src/services/filler/form.rs", "rank": 47, "score": 10.88392982440004 }, { "content": "pub mod local;\n\npub mod s3;\n\n\n\nuse std::fmt;\n\nuse std::io::Error;\n\n\n\nuse ::s3::S3Error;\n\nuse actix_web::error::BlockingError;\n\nuse async_trait::async_trait;\n\nuse serde::de::StdError;\n\nuse uuid::Uuid;\n\n\n\nuse crate::client;\n\n\n\npub const PATH_COMPILED: &str = \"compiled/\";\n\n\n\npub type FileResult<T> = Result<T, FileError>;\n\n\n\n#[derive(Debug)]\n\npub enum FileError {\n", "file_path": "src/file/mod.rs", "rank": 48, "score": 10.32042588444745 }, { "content": " }\n\n }\n\n\n\n pub async fn get_all_documents(&self) -> Option<Vec<Document>> {\n\n if let Some(documents) = self.mongo.get_all::<Document, _>(\"date\").await {\n\n if !documents.is_empty() {\n\n Some(documents)\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub async fn get_documents_by_token<S: AsRef<str>>(&self, value: S) -> Option<Vec<Document>> {\n\n if let Some(documents) = self\n\n .mongo\n\n .get_all_by::<Document, _>(\"token\", value.as_ref(), \"date\")\n\n .await\n", "file_path": "src/data/mod.rs", "rank": 49, "score": 10.202805949562782 }, { "content": "pub struct SentryConfig {\n\n pub dsn: String,\n\n}\n\n\n\nimpl Config {\n\n pub fn new<S: AsRef<str>>(path: S) -> Self {\n\n match crystalsoft_utils::read_file_string(path.as_ref()) {\n\n Ok(configuration) => {\n\n info!(\"\\\"{}\\\" loaded correctly.\", path.as_ref());\n\n\n\n let configuration =\n\n envsubst::substitute(configuration, &env::vars().collect()).unwrap();\n\n\n\n toml::from_str(&configuration).unwrap_or_else(|e| {\n\n panic!(\n\n \"Error {:#?} loading this configuration: {:#?}\",\n\n e, configuration\n\n )\n\n })\n\n }\n\n Err(e) => panic!(\"Couldn't open \\\"{}\\\", error: {:#?}\", path.as_ref(), e),\n\n }\n\n }\n\n}\n", "file_path": "src/config/mod.rs", "rank": 50, "score": 10.187581935110003 }, { "content": " Credentials::new(\n\n config.s3_access_key.as_deref(),\n\n config.s3_secret_key.as_deref(),\n\n None,\n\n None,\n\n None,\n\n )\n\n .unwrap(),\n\n )\n\n .unwrap(),\n\n config,\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl FileProvider for S3 {\n\n async fn load(&self, file_path: &str) -> FileResult<Vec<u8>> {\n\n match self.bucket.get_object(file_path).await {\n\n Ok((data, _code)) => Ok(data),\n", "file_path": "src/file/s3.rs", "rank": 52, "score": 9.753269839951392 }, { "content": " match self\n\n .get_collection(T::name())\n\n .await\n\n .insert_one(model.to_document(), None)\n\n .await\n\n .map_err(|e| {\n\n error!(\n\n \"Error getting {} with model {}: {:#?}\",\n\n T::name(),\n\n model.debug(),\n\n e\n\n );\n\n\n\n sentry::capture_error(&e);\n\n\n\n e\n\n }) {\n\n Ok(result) => {\n\n if let Some(object_id) = result.inserted_id.as_object_id() {\n\n self.cache\n", "file_path": "src/mongo/mod.rs", "rank": 53, "score": 9.618915312160356 }, { "content": " match <compiler::PDFillerMap>::deserialize(value) {\n\n Ok(ref map) => {\n\n if let Some(documents) =\n\n data.get_documents_by_token(token.as_str()).await\n\n {\n\n if documents.is_empty() {\n\n return HttpResponse::NotFound().json(WsError {\n\n error: \"No documents found for this token!\".into(),\n\n });\n\n }\n\n\n\n match compiler::compile_documents(\n\n data.file.clone(),\n\n map,\n\n &documents,\n\n )\n\n .await\n\n {\n\n Ok(_) => {\n\n if let Some(accept) =\n", "file_path": "src/services/filler/mod.rs", "rank": 54, "score": 9.583819023760173 }, { "content": " Err(e) => Err(FileError::S3Error(e)),\n\n }\n\n }\n\n\n\n async fn save(&self, file_path: &str, data: Vec<u8>) -> FileResult<()> {\n\n match self.bucket.put_object(file_path, &data).await {\n\n Ok((_data, _code)) => Ok(()),\n\n Err(e) => Err(FileError::S3Error(e)),\n\n }\n\n }\n\n\n\n fn base_path(&self) -> &str {\n\n self.config.path.as_str()\n\n }\n\n}\n", "file_path": "src/file/s3.rs", "rank": 55, "score": 9.509926897239392 }, { "content": "pub mod compiler;\n\nmod form;\n\nmod processor;\n\n\n\nuse std::str;\n\n\n\nuse actix_web::{post, web, HttpResponse, Responder};\n\n\n\nuse serde::Deserialize;\n\nuse serde_json::Value;\n\n\n\nuse crate::data::Data;\n\nuse crate::services::{self, WsError};\n\n\n", "file_path": "src/services/filler/mod.rs", "rank": 56, "score": 9.416607887688897 }, { "content": "\n\n#[async_trait]\n\nimpl FileProvider for Local {\n\n async fn load(&self, file_path: &str) -> FileResult<Vec<u8>> {\n\n crystalsoft_utils::read_file_buf(file_path).map_err(FileError::IoError)\n\n }\n\n\n\n async fn save(&self, file_path: &str, data: Vec<u8>) -> FileResult<()> {\n\n match crystalsoft_utils::get_filepath(file_path) {\n\n Some(path) => match fs::create_dir_all(path) {\n\n Ok(_) => {\n\n let file_path: String = file_path.into();\n\n match web::block(|| fs::File::create(file_path)).await {\n\n Ok(Ok(mut file)) => match file.write_all(&data) {\n\n Ok(_) => Ok(()),\n\n Err(e) => Err(FileError::IoError(e)),\n\n },\n\n Ok(Err(e)) => Err(FileError::IoError(e)),\n\n Err(e) => Err(FileError::BlockingError(e)),\n\n }\n", "file_path": "src/file/local.rs", "rank": 57, "score": 9.401597905190197 }, { "content": " services::get_accepted_header(&request)\n\n {\n\n let export_result =\n\n if accept.as_str() == mime::APPLICATION_PDF {\n\n compiler::merge_documents(\n\n data.file.clone(),\n\n documents,\n\n true,\n\n )\n\n .await\n\n } else {\n\n compiler::zip_documents(\n\n data.file.clone(),\n\n documents,\n\n true,\n\n )\n\n .await\n\n };\n\n\n\n services::export_content(accept, export_result)\n", "file_path": "src/services/filler/mod.rs", "rank": 58, "score": 9.26507541786688 }, { "content": " Err(e) => {\n\n error!(\n\n \"Error getting keys with pattern {:#?}: {:#?}\",\n\n if let Some(ref key_value) = key_value {\n\n format!(\"{}, {}\", key_value.0.as_ref(), key_value.1.as_ref())\n\n } else {\n\n \"[empty]\".into()\n\n },\n\n e\n\n );\n\n\n\n sentry::capture_error(&e);\n\n\n\n None\n\n }\n\n }\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub async fn get_one<T: 'static + Model, S: AsRef<str>>(&self, id: ObjectId) -> Option<Arc<T>> {\n", "file_path": "src/mongo/mod.rs", "rank": 59, "score": 8.719581855523746 }, { "content": " }\n\n Err(e) => Err(FileError::IoError(e)),\n\n },\n\n None => Err(FileError::NotSaved),\n\n }\n\n }\n\n\n\n fn base_path(&self) -> &str {\n\n self.config.path.as_str()\n\n }\n\n}\n", "file_path": "src/file/local.rs", "rank": 60, "score": 8.6792747708759 }, { "content": " {\n\n if !documents.is_empty() {\n\n Some(documents)\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub async fn create_document(&self, document: Document) -> DataResult<()> {\n\n self.mongo.create::<Document>(document).await?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/data/mod.rs", "rank": 61, "score": 8.675543486031426 }, { "content": "use crate::mongo::models::Model;\n\nuse crate::mongo::{MongoDB, MongoResult};\n\n\n\n#[derive(Clone)]\n\npub struct MongoWrapper {\n\n mongo: MongoDB,\n\n}\n\n\n\nimpl MongoWrapper {\n\n pub fn new(mongo: MongoDB) -> Self {\n\n Self { mongo }\n\n }\n\n\n\n /// Generic\n\n pub async fn get_all<T: 'static + Model, S: AsRef<str>>(&self, sort_by: S) -> Option<Vec<T>> {\n\n self.mongo.get::<T, _>(None, sort_by).await\n\n }\n\n\n\n pub async fn get_all_by<T: 'static + Model, S: AsRef<str>>(\n\n &self,\n", "file_path": "src/mongo/wrapper.rs", "rank": 62, "score": 8.610457291533603 }, { "content": " Err(_) => None,\n\n }\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub async fn delete_one<T: Model>(&self, id: ObjectId) -> MongoResult<()> {\n\n self.get_collection(T::name())\n\n .await\n\n .delete_one(\n\n doc! {\n\n \"_id\": id.clone(),\n\n },\n\n None,\n\n )\n\n .await\n\n .map_err(|e| {\n\n error!(\"Error getting {} with key {}: {:#?}\", T::name(), &id, e);\n\n\n\n sentry::capture_error(&e);\n\n\n", "file_path": "src/mongo/mod.rs", "rank": 63, "score": 8.592846654562857 }, { "content": " let image_regex = Regex::new(IMAGE_REGEX)\n\n .map_err(|_err| FillingError::InternalError)?;\n\n\n\n value = map.get(image_regex.replace(name, \"\").as_ref());\n\n\n\n if let Some(uri) = value {\n\n let object_id = form.get_object_id(index);\n\n if let Ok(page_id) = form.document.get_object_page(object_id) {\n\n if let Some(image) =\n\n client::get(uri.as_str().unwrap_or(\"\")).await\n\n {\n\n if let Ok(object) = form.document.get_object(object_id) {\n\n if let Ok(dict) = object.as_dict() {\n\n if let Ok(rect) = utils::get_object_rect(dict) {\n\n if let Ok(stream) = xobject::image_from(image) {\n\n let _ = form.document.insert_image(\n\n page_id,\n\n stream,\n\n (rect.0, rect.1),\n\n (rect.3, rect.2),\n", "file_path": "src/services/filler/form.rs", "rank": 64, "score": 8.566098390999379 }, { "content": " match form.get_state(index) {\n\n FieldState::Text { required, .. } => {\n\n if required && value.is_none() {\n\n Err(FillingError::RequiredField(name.to_owned()))\n\n } else if let Some(value) = value {\n\n form.set_text(index, value.as_str().unwrap_or(\"\").into())\n\n .map_err(FillingError::Value)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n FieldState::Radio { required, .. } => {\n\n if required && value.is_none() {\n\n Err(FillingError::RequiredField(name.to_owned()))\n\n } else if let Some(value) = value {\n\n form.set_radio(index, value.as_str().unwrap_or(\"\").into())\n\n .map_err(FillingError::Value)\n\n } else {\n\n Ok(())\n\n }\n", "file_path": "src/services/filler/form.rs", "rank": 65, "score": 8.545992373022266 }, { "content": " .iter()\n\n .map(|value| {\n\n value.as_str().unwrap_or(\"\").to_string()\n\n })\n\n .collect(),\n\n )\n\n .map_err(FillingError::Value),\n\n None => Ok(()),\n\n }\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n FieldState::ComboBox { required, .. } => {\n\n if required && value.is_none() {\n\n Err(FillingError::RequiredField(name.to_owned()))\n\n } else if let Some(value) = value {\n\n match value.as_array() {\n\n Some(values) => form\n\n .set_combo_box(\n", "file_path": "src/services/filler/form.rs", "rank": 66, "score": 8.508604114682422 }, { "content": "use serde::de::DeserializeOwned;\n\nuse serde::Serialize;\n\n\n\nuse mongodb::bson::Document as MongoDocument;\n\n\n\nuse bson::document::ValueAccessError;\n\n\n\nuse simple_cache::CacheItem;\n\n\n\npub mod document;\n\n\n", "file_path": "src/mongo/models/mod.rs", "rank": 67, "score": 8.379963992004264 }, { "content": "\n\n #[allow(dead_code)]\n\n pub async fn update_one<T: 'static + Model>(&self, id: ObjectId, model: T) -> MongoResult<()> {\n\n self.get_collection(T::name())\n\n .await\n\n .update_one(\n\n doc! {\n\n \"_id\": id.clone(),\n\n },\n\n model.to_document(),\n\n None,\n\n )\n\n .await\n\n .map_err(|e| {\n\n error!(\"Error getting {} with key {}: {:#?}\", T::name(), &id, e);\n\n\n\n sentry::capture_error(&e);\n\n\n\n Error::MongoDBError(e)\n\n })?;\n", "file_path": "src/mongo/mod.rs", "rank": 68, "score": 8.298706017355336 }, { "content": " }\n\n FieldState::CheckBox { required, .. } => {\n\n if required && value.is_none() {\n\n Err(FillingError::RequiredField(name.to_owned()))\n\n } else if let Some(value) = value {\n\n form.set_check_box(index, value.as_bool().unwrap_or(false))\n\n .map_err(FillingError::Value)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n FieldState::ListBox { required, .. } => {\n\n if required && value.is_none() {\n\n Err(FillingError::RequiredField(name.to_owned()))\n\n } else if let Some(value) = value {\n\n match value.as_array() {\n\n Some(values) => form\n\n .set_list_box(\n\n index,\n\n values\n", "file_path": "src/services/filler/form.rs", "rank": 69, "score": 7.904997836723403 }, { "content": "}\n\n\n\n#[allow(dead_code)]\n\npub async fn post<S: AsRef<str>, D: Serialize>(uri: S, request: D) -> Option<Vec<u8>> {\n\n let client_request = Client::default().post(uri.as_ref());\n\n let response = client_request\n\n .header(USER_AGENT_KEY, UA)\n\n .json(&request)\n\n .send()\n\n .await;\n\n\n\n match response {\n\n Ok(response) => match response.bytes().await {\n\n Ok(body) => Some(body.to_vec()),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n", "file_path": "src/client.rs", "rank": 70, "score": 7.800329868169403 }, { "content": " match self.cache.get::<T, _>(&id) {\n\n Ok(Some(value)) => value,\n\n Ok(None) => {\n\n match self\n\n .get_collection(T::name())\n\n .await\n\n .find_one(\n\n doc! {\n\n \"_id\": id.clone(),\n\n },\n\n None,\n\n )\n\n .await\n\n {\n\n Ok(result) => match result {\n\n Some(document) => {\n\n let _ = self.cache.insert::<T>(\n\n id.clone(),\n\n Some(T::from_document(document).unwrap_or_else(|_| T::default())),\n\n );\n", "file_path": "src/mongo/mod.rs", "rank": 71, "score": 7.243786516519996 }, { "content": "use std::sync::Arc;\n\n\n\nuse async_std::sync::RwLock;\n\nuse bson::{doc, oid::ObjectId};\n\nuse futures_lite::StreamExt;\n\nuse log::error;\n\nuse mongodb::error::Error as MongoDBError;\n\nuse mongodb::options::{ClientOptions, FindOptions};\n\nuse mongodb::{Client, Collection, Database};\n\nuse simple_cache::{Cache, CacheError};\n\n\n\nuse crate::config::MongoConfig;\n\nuse crate::mongo::models::Model;\n\n\n\npub mod models;\n\npub mod wrapper;\n\n\n", "file_path": "src/mongo/mod.rs", "rank": 72, "score": 7.212951872409576 }, { "content": "use async_std::sync::Arc;\n\n\n\nuse crate::file::FileProvider;\n\nuse crate::mongo::models::document::Document;\n\nuse crate::mongo::wrapper::MongoWrapper;\n\nuse crate::mongo::Error;\n\n\n\npub type DataResult<T> = Result<T, Error>;\n\n\n\n#[derive(Clone)]\n\npub struct Data {\n\n pub file: Arc<Box<dyn FileProvider>>,\n\n mongo: MongoWrapper,\n\n}\n\n\n\nimpl Data {\n\n pub fn new(file: Box<dyn FileProvider>, mongo: MongoWrapper) -> Self {\n\n Data {\n\n file: Arc::new(file),\n\n mongo,\n", "file_path": "src/data/mod.rs", "rank": 73, "score": 7.206584326619004 }, { "content": " } else {\n\n HttpResponse::NotAcceptable().json(WsError {\n\n error: \"Only PDF or Streams are accepted\".into(),\n\n })\n\n }\n\n }\n\n Err(compiler::HandlerCompilerError::FillingError(e)) => {\n\n HttpResponse::BadRequest().json(WsError {\n\n error: format!(\n\n \"Error during document filling: {:#?}\",\n\n e\n\n ),\n\n })\n\n }\n\n Err(compiler::HandlerCompilerError::Error(message)) => {\n\n HttpResponse::InternalServerError()\n\n .json(WsError { error: message })\n\n }\n\n }\n\n } else {\n", "file_path": "src/services/filler/mod.rs", "rank": 75, "score": 7.16139385056066 }, { "content": " Ok(_) => {\n\n filepath = Some(remote_file_path.clone());\n\n }\n\n Err(FileError::IoError(e)) => {\n\n sentry::capture_error(&e);\n\n }\n\n Err(FileError::S3Error(e)) => {\n\n sentry::capture_error(&e);\n\n }\n\n Err(FileError::BlockingError(e)) => {\n\n sentry::capture_error(&e);\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n filepath\n\n }\n\n\n\n async fn load(&self, file_path: &str) -> FileResult<Vec<u8>>;\n\n\n\n async fn save(&self, file_path: &str, data: Vec<u8>) -> FileResult<()>;\n\n\n\n fn base_path(&self) -> &str;\n\n}\n", "file_path": "src/file/mod.rs", "rank": 76, "score": 6.849364574400303 }, { "content": "\n\n self.cache\n\n .insert::<T>(id, Some(model))\n\n .map_err(Error::CacheError)?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub async fn get<T: Model, S: AsRef<str>>(\n\n &self,\n\n key_value: Option<(S, S)>,\n\n sort_by: S,\n\n ) -> Option<Vec<T>> {\n\n let filter = key_value.as_ref().map(|key_value| {\n\n doc! {\n\n key_value.0.as_ref(): key_value.1.as_ref(),\n\n }\n\n });\n\n\n\n match self\n", "file_path": "src/mongo/mod.rs", "rank": 77, "score": 6.8035387449420295 }, { "content": "# PDFiller\n\nPDF Form filler API with a built-in reverse proxy with Nginx, Amazon S3 integration for storage and MongoDB.\n\n\n\n## Running the web service\n\nThe application uses `docker-compose` in order to run all the needed services. You have to install last Docker CE with Docker compose in order to run it.\n\n\n\n## Compiling\n\nYou must compile the binary before running it, use the command `cargo +stable build` and then `cargo +stable run` in order to compile and execute it.\n\n\n\nFor the standard compile just run `cargo +stable build --release` but if you have to run the application inside a container, you have to build it with `cargo +stable build --release --target x86_64-unknown-linux-musl --locked`.\n\n\n\n## Commands\n\nRunning:\n\n\n\n* `make start_local` starts the local instance (WARNING: you must have MongoDB running locally)\n\n* `make start` for default: starts the dev instance\n\n* `make start_prod` for the production instance\n\n\n\n*Append `_recreate` to each command in order to force the recreation of containers.*\n\n\n\nStopping:\n\n\n\n* `make stop` to shutdown all containers\n\n\n\n## TO DO\n\n\n\nThe following features aren't implemented yet:\n\n- [x] Image fields with a pattern for the field name\n\n- [x] Merge all PDFs into one PDF in addition to ZIP option (default)\n\n- [x] Fix merged PDF order while uploaded or compiled\n\n- [ ] PDF's pages rasterization (flattening)\n\n- [ ] Files caching\n\n- [ ] Temporary files deletion monitor\n", "file_path": "README.md", "rank": 78, "score": 6.765260375857631 }, { "content": " (Some(connection_str), _) => String::from(connection_str),\n\n (None, Some(host)) => {\n\n let connection_str =\n\n format!(\"{}:{}\", host, config.port.unwrap_or(Self::DEFAULT_PORT));\n\n\n\n if let Some(ref user) = config.user {\n\n format!(\n\n \"{}://{}:{}@{}\",\n\n Self::MONGDB_STR,\n\n user,\n\n config.password.as_deref().unwrap_or(\"\"),\n\n connection_str\n\n )\n\n } else {\n\n format!(\"{}://{}\", Self::MONGDB_STR, connection_str)\n\n }\n\n }\n\n (None, None) => {\n\n panic!(\"Missing configuration for MongoDB\")\n\n }\n", "file_path": "src/mongo/mod.rs", "rank": 79, "score": 6.254873585999584 }, { "content": " );\n\n\n\n let _ = form.remove_field(index);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n };\n\n\n\n if let Err(e) = result {\n\n return Err(e);\n\n }\n\n }\n\n }\n\n\n\n Ok(form)\n\n }\n\n Err(e) => Err(FillingError::Load(e)),\n\n }\n\n}\n", "file_path": "src/services/filler/form.rs", "rank": 80, "score": 6.168923446618189 }, { "content": "use std::env;\n\n\n\nuse log::info;\n\nuse serde::Deserialize;\n\n\n\n#[derive(Clone, Deserialize)]\n\npub struct Config {\n\n pub service: ServiceConfig,\n\n pub server: ServerConfig,\n\n pub mongo: MongoConfig,\n\n pub sentry: Option<SentryConfig>,\n\n}\n\n\n\n#[derive(Clone, Deserialize)]\n\npub struct ServiceConfig {\n\n pub filesystem: String,\n\n pub s3_access_key: Option<String>,\n\n pub s3_secret_key: Option<String>,\n\n pub s3_bucket: Option<String>,\n\n pub s3_region: Option<String>,\n", "file_path": "src/config/mod.rs", "rank": 81, "score": 6.16623336256618 }, { "content": " index,\n\n values\n\n .iter()\n\n .map(|value| {\n\n value.as_str().unwrap_or(\"\").to_string()\n\n })\n\n .collect(),\n\n )\n\n .map_err(FillingError::Value),\n\n None => Ok(()),\n\n }\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n _ => Ok(()),\n\n }\n\n } else {\n\n // This is needed as the current regex is a bit unuseful\n\n #[allow(clippy::trivial_regex)]\n", "file_path": "src/services/filler/form.rs", "rank": 82, "score": 6.10612991187731 }, { "content": "use std::fs;\n\nuse std::io::Write;\n\n\n\nuse async_trait::async_trait;\n\n\n\nuse actix_web::web;\n\n\n\nuse crate::config::ServiceConfig;\n\nuse crate::file::{FileError, FileProvider, FileResult};\n\n\n\n#[derive(Clone)]\n\npub struct Local {\n\n config: ServiceConfig,\n\n}\n\n\n\nimpl Local {\n\n pub fn new(config: ServiceConfig) -> Self {\n\n Self { config }\n\n }\n\n}\n", "file_path": "src/file/local.rs", "rank": 83, "score": 5.791064658388906 }, { "content": "mod document;\n\nmod filler;\n\n\n\nuse actix_web::dev::BodyEncoding;\n\nuse actix_web::http::{header::ACCEPT, ContentEncoding};\n\nuse actix_web::{web, HttpResponse};\n\nuse serde::Serialize;\n\n\n\nuse crate::services::filler::compiler;\n\n\n\n#[derive(Serialize)]\n", "file_path": "src/services/mod.rs", "rank": 84, "score": 5.447994565347174 }, { "content": "use async_trait::async_trait;\n\n\n\nuse s3::creds::Credentials;\n\nuse s3::Bucket;\n\n\n\nuse crate::config::ServiceConfig;\n\nuse crate::file::{FileError, FileProvider, FileResult};\n\n\n\n#[derive(Clone)]\n\npub struct S3 {\n\n config: ServiceConfig,\n\n bucket: Bucket,\n\n}\n\n\n\nimpl S3 {\n\n pub fn new(config: ServiceConfig) -> Self {\n\n Self {\n\n bucket: Bucket::new(\n\n config.s3_bucket.as_deref().unwrap(),\n\n config.s3_region.as_deref().unwrap().parse().unwrap(),\n", "file_path": "src/file/s3.rs", "rank": 85, "score": 5.311657483506886 }, { "content": " }\n\n None => {\n\n let _ = self.cache.insert::<T>(id.clone(), None);\n\n }\n\n },\n\n Err(e) => {\n\n error!(\"Error getting {} with key {}: {:#?}\", T::name(), &id, e);\n\n\n\n sentry::capture_error(&e);\n\n\n\n let _ = self.cache.insert::<T>(id.clone(), None);\n\n }\n\n }\n\n\n\n if let Ok(Some(value)) = self.cache.get::<T, _>(&id) {\n\n value\n\n } else {\n\n None\n\n }\n\n }\n", "file_path": "src/mongo/mod.rs", "rank": 86, "score": 5.216217651215554 }, { "content": " .get_collection(T::name())\n\n .await\n\n .find(\n\n filter,\n\n FindOptions::builder()\n\n .sort(doc! { sort_by.as_ref(): 1 })\n\n .build(),\n\n )\n\n .await\n\n {\n\n Ok(mut cursor) => {\n\n let mut results = Vec::new();\n\n while let Some(document) = cursor.next().await {\n\n if let Ok(document) = document {\n\n results.push(T::from_document(document).unwrap_or_else(|_| T::default()));\n\n }\n\n }\n\n\n\n Some(results)\n\n }\n", "file_path": "src/mongo/mod.rs", "rank": 87, "score": 4.438196997835206 }, { "content": "use crate::data::Data;\n\nuse crate::file::local::Local;\n\nuse crate::file::s3::S3;\n\nuse crate::mongo::wrapper::MongoWrapper;\n\nuse crate::mongo::MongoDB;\n\n\n\nconst API_VERSION: &str = \"v1\";\n\n\n\n#[actix_rt::main]\n\nasync fn main() -> std::io::Result<()> {\n\n env_logger::Builder::from_env(Env::default().default_filter_or(\"info\"))\n\n .format(|buf, record| {\n\n writeln!(\n\n buf,\n\n \"{} [{}] - {} - {}\",\n\n ChronoLocal::now().format(\"%Y-%m-%dT%H:%M:%S\"),\n\n record.level(),\n\n record.module_path().unwrap_or(\"main\"),\n\n record.args()\n\n )\n", "file_path": "src/main.rs", "rank": 88, "score": 4.377921422767415 }, { "content": " };\n\n\n\n let options = ClientOptions::parse(connection_str.as_str())\n\n .await\n\n .unwrap_or_else(|e| panic!(\"Error {:#?} creating connection to MongoDB\", e));\n\n\n\n let client = Client::with_options(options)\n\n .unwrap_or_else(|e| panic!(\"Error {:#?} connecting to MongoDB\", e));\n\n\n\n MongoDB {\n\n database: Arc::new(RwLock::new(client.database(config.db_name.as_str()))),\n\n cache: Cache::new(),\n\n }\n\n }\n\n\n\n async fn get_collection<S: AsRef<str>>(&self, name: S) -> Collection {\n\n self.database.write().await.collection(name.as_ref())\n\n }\n\n\n\n pub async fn insert<T: 'static + Model>(&self, model: T) -> MongoResult<()> {\n", "file_path": "src/mongo/mod.rs", "rank": 89, "score": 4.025764878340031 }, { "content": " })\n\n .init();\n\n\n\n let name = \"PDFIller\";\n\n\n\n let matches = ClapApp::new(env!(\"CARGO_PKG_NAME\"))\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .name(name)\n\n .author(\"Dario Cancelliere <dario.cancelliere@facile.it>\")\n\n .about(env!(\"CARGO_PKG_DESCRIPTION\"))\n\n .arg(\n\n Arg::with_name(\"path\")\n\n .short(\"p\")\n\n .long(\"path\")\n\n .required(false)\n\n .takes_value(true)\n\n .default_value(\"config\")\n\n .help(\"Base config file path\"),\n\n )\n\n .get_matches();\n", "file_path": "src/main.rs", "rank": 90, "score": 3.9636252236879104 }, { "content": " pub path: String,\n\n}\n\n\n\n#[derive(Clone, Deserialize)]\n\npub struct ServerConfig {\n\n pub bind_address: String,\n\n pub bind_port: u32,\n\n}\n\n\n\n#[derive(Clone, Deserialize)]\n\npub struct MongoConfig {\n\n pub string: Option<String>,\n\n pub host: Option<String>,\n\n pub port: Option<u16>,\n\n pub db_name: String,\n\n pub user: Option<String>,\n\n pub password: Option<String>,\n\n}\n\n\n\n#[derive(Clone, Deserialize)]\n", "file_path": "src/config/mod.rs", "rank": 91, "score": 3.626936586254136 }, { "content": "mod client;\n\nmod config;\n\nmod data;\n\nmod file;\n\nmod mongo;\n\nmod services;\n\nmod utils;\n\n\n\nuse std::io::Write;\n\n\n\nuse actix_web::{\n\n middleware::{Logger, NormalizePath, TrailingSlash},\n\n web, App, HttpServer,\n\n};\n\nuse chrono::Local as ChronoLocal;\n\nuse clap::{App as ClapApp, Arg};\n\nuse env_logger::Env;\n\nuse log::info;\n\n\n\nuse crate::config::Config;\n", "file_path": "src/main.rs", "rank": 92, "score": 3.4769333074577684 }, { "content": " NotSaved,\n\n BlockingError(BlockingError),\n\n IoError(Error),\n\n S3Error(S3Error),\n\n}\n\n\n\nimpl fmt::Display for FileError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::NotSaved => {\n\n write!(f, \"File couldn't be saved\")\n\n }\n\n FileError::BlockingError(e) => {\n\n write!(f, \"{:#?}\", e)\n\n }\n\n FileError::IoError(e) => {\n\n write!(f, \"{:#?}\", e)\n\n }\n\n FileError::S3Error(e) => {\n\n write!(f, \"{:#?}\", e)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl StdError for FileError {}\n\n\n\n#[async_trait]\n", "file_path": "src/file/mod.rs", "rank": 93, "score": 3.225864390588145 }, { "content": "use lopdf::Dictionary;\n\n\n", "file_path": "src/utils.rs", "rank": 94, "score": 3.1314725462571658 }, { "content": " .insert::<T>(object_id.to_owned(), Some(model))\n\n .map_err(Error::CacheError)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n Err(e) => {\n\n error!(\n\n \"Error getting {} with model {}: {:#?}\",\n\n T::name(),\n\n model.debug(),\n\n e\n\n );\n\n\n\n sentry::capture_error(&e);\n\n\n\n Err(Error::MongoDBError(e))\n\n }\n\n }\n\n }\n", "file_path": "src/mongo/mod.rs", "rank": 95, "score": 3.0054994508924606 }, { "content": " e\n\n })\n\n .map_err(Error::MongoDBError)?;\n\n\n\n self.cache.remove(&id).map_err(Error::CacheError)?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/mongo/mod.rs", "rank": 96, "score": 3.0014448492602765 }, { "content": " HttpResponse::NotFound().json(WsError {\n\n error: \"No documents found for this token!\".into(),\n\n })\n\n }\n\n }\n\n Err(e) => HttpResponse::BadRequest().json(WsError {\n\n error: format!(\"Not a valid PDFiller request: {:#?}\", e),\n\n }),\n\n }\n\n } else {\n\n HttpResponse::BadRequest().json(WsError {\n\n error: \"Not a valid PDFiller request.\".into(),\n\n })\n\n }\n\n }\n\n Err(e) => HttpResponse::BadRequest().json(WsError {\n\n error: format!(\"Couldn't decode the body as JSON: {:#?}\", e),\n\n }),\n\n },\n\n Err(e) => HttpResponse::InternalServerError().json(WsError {\n\n error: format!(\"Error decoding the body: {:#?}\", e),\n\n }),\n\n }\n\n}\n", "file_path": "src/services/filler/mod.rs", "rank": 97, "score": 2.434726111096732 }, { "content": " key: S,\n\n value: S,\n\n sort_by: S,\n\n ) -> Option<Vec<T>> {\n\n self.mongo.get::<T, _>(Some((key, value)), sort_by).await\n\n }\n\n\n\n pub async fn create<T: 'static + Model>(&self, model: T) -> MongoResult<()> {\n\n self.mongo.insert::<T>(model).await\n\n }\n\n}\n", "file_path": "src/mongo/wrapper.rs", "rank": 98, "score": 1.9667478896835795 }, { "content": "\n\n info!(\n\n \"{} v{} by Dario Cancelliere\",\n\n name,\n\n env!(\"CARGO_PKG_VERSION\")\n\n );\n\n info!(\"{}\", env!(\"CARGO_PKG_DESCRIPTION\"));\n\n info!(\"\");\n\n\n\n let config = Config::new(&format!(\n\n \"{}/config.toml\",\n\n matches.value_of(\"path\").unwrap()\n\n ));\n\n\n\n if let Some(sentry) = config.sentry {\n\n let _guard = sentry::init(sentry.dsn);\n\n }\n\n\n\n let data = Data::new(\n\n if config.service.filesystem == \"local\" {\n", "file_path": "src/main.rs", "rank": 99, "score": 1.8149751985012705 } ]
Rust
openethereum/rpc/src/v1/helpers/subscribers.rs
snuspl/fluffy
d17e1fb3cda259d6f2c244ef67ee3a4bdf83261b
use std::{ops, str}; use std::collections::HashMap; use jsonrpc_pubsub::{typed::{Subscriber, Sink}, SubscriptionId}; use ethereum_types::H64; #[derive(Debug, Clone, Hash, Eq, PartialEq)] pub struct Id(H64); impl str::FromStr for Id { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.starts_with("0x") { Ok(Id(s[2..].parse().map_err(|e| format!("{}", e))?)) } else { Err("The id must start with 0x".into()) } } } impl Id { pub fn as_string(&self) -> String { format!("{:?}", self.0) } } #[cfg(not(test))] mod random { use rand::rngs::OsRng; pub type Rng = rand::rngs::OsRng; pub fn new() -> Rng { OsRng } } #[cfg(test)] mod random { use rand::SeedableRng; use rand_xorshift::XorShiftRng; const RNG_SEED: [u8; 16] = [0u8; 16]; pub type Rng = XorShiftRng; pub fn new() -> Rng { Rng::from_seed(RNG_SEED) } } pub struct Subscribers<T> { rand: random::Rng, subscriptions: HashMap<Id, T>, } impl<T> Default for Subscribers<T> { fn default() -> Self { Subscribers { rand: random::new(), subscriptions: HashMap::new(), } } } impl<T> Subscribers<T> { fn next_id(&mut self) -> Id { let data = H64::random_using(&mut self.rand); Id(data) } pub fn insert(&mut self, val: T) -> SubscriptionId { let id = self.next_id(); debug!(target: "pubsub", "Adding subscription id={:?}", id); let s = id.as_string(); self.subscriptions.insert(id, val); SubscriptionId::String(s) } pub fn remove(&mut self, id: &SubscriptionId) -> Option<T> { trace!(target: "pubsub", "Removing subscription id={:?}", id); match *id { SubscriptionId::String(ref id) => match id.parse() { Ok(id) => self.subscriptions.remove(&id), Err(_) => None, }, _ => None, } } } impl<T> Subscribers<Sink<T>> { pub fn push(&mut self, sub: Subscriber<T>) { let id = self.next_id(); if let Ok(sink) = sub.assign_id(SubscriptionId::String(id.as_string())) { debug!(target: "pubsub", "Adding subscription id={:?}", id); self.subscriptions.insert(id, sink); } } } impl<T, V> Subscribers<(Sink<T>, V)> { pub fn push(&mut self, sub: Subscriber<T>, val: V) { let id = self.next_id(); if let Ok(sink) = sub.assign_id(SubscriptionId::String(id.as_string())) { debug!(target: "pubsub", "Adding subscription id={:?}", id); self.subscriptions.insert(id, (sink, val)); } } } impl<T> ops::Deref for Subscribers<T> { type Target = HashMap<Id, T>; fn deref(&self) -> &Self::Target { &self.subscriptions } }
use std::{ops, str}; use std::collections::HashMap; use jsonrpc_pubsub::{typed::{Subscriber, Sink}, SubscriptionId}; use ethereum_types::H64; #[derive(Debug, Clone, Hash, Eq, PartialEq)] pub struct Id(H64); impl str::FromStr for Id { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.starts_with("0x") { Ok(Id(s[2..].parse().map_err(|e| format!("{}", e))?)) } else { Err("The id must start with 0x".into()) } } } impl Id { pub fn as_string(&self) -> String { format!("{:?}", self.0) } } #[cfg(not(test))] mod random { use rand::rngs::OsRng; pub type Rng = rand::rngs::OsRng; pub fn new() -> Rng { OsRng } } #[cfg(test)] mod random { use rand::SeedableRng; use rand_xorshift::XorShiftRng; const RNG_SEED: [u8; 16] = [0u8; 16]; pub type Rng = XorShiftRng; pub fn new() -> Rng { Rng::from_seed(RNG_SEED) } } pub struct Subscribers<T> { rand: random::Rng, subscriptions: HashMap<Id, T>, } impl<T> Default for Subscribers<T> { fn default() -> Self { Subscribers { rand: random::new(), subscriptions: HashMap::new(), } } } impl<T> Subscribers<T> { fn next_id(&mut self) -> Id { let data = H64::random_using(&mut self.rand); Id(data) } pub fn insert(&mut self, val: T) -> SubscriptionId { let id = self.next_id(); debug!(target: "pubsub", "Adding subscription id={:?}", id); let s = id.as_string(); self.subscriptions.insert(id, val); SubscriptionId::String(s) }
} impl<T> Subscribers<Sink<T>> { pub fn push(&mut self, sub: Subscriber<T>) { let id = self.next_id(); if let Ok(sink) = sub.assign_id(SubscriptionId::String(id.as_string())) { debug!(target: "pubsub", "Adding subscription id={:?}", id); self.subscriptions.insert(id, sink); } } } impl<T, V> Subscribers<(Sink<T>, V)> { pub fn push(&mut self, sub: Subscriber<T>, val: V) { let id = self.next_id(); if let Ok(sink) = sub.assign_id(SubscriptionId::String(id.as_string())) { debug!(target: "pubsub", "Adding subscription id={:?}", id); self.subscriptions.insert(id, (sink, val)); } } } impl<T> ops::Deref for Subscribers<T> { type Target = HashMap<Id, T>; fn deref(&self) -> &Self::Target { &self.subscriptions } }
pub fn remove(&mut self, id: &SubscriptionId) -> Option<T> { trace!(target: "pubsub", "Removing subscription id={:?}", id); match *id { SubscriptionId::String(ref id) => match id.parse() { Ok(id) => self.subscriptions.remove(&id), Err(_) => None, }, _ => None, } }
function_block-full_function
[ { "content": "#[allow(dead_code)]\n\npub fn json_chain_test<H: FnMut(&str, HookType)>(path: &Path, json_data: &[u8], start_stop_hook: &mut H) -> Vec<String> {\n\n\tlet _ = ::env_logger::try_init();\n\n\tlet tests = ethjson::test_helpers::state::Test::load(json_data)\n\n\t\t.expect(&format!(\"Could not parse JSON state test data from {}\", path.display()));\n\n\tlet mut failed = Vec::new();\n\n\n\n\tfor (name, test) in tests.into_iter() {\n\n\t\tstart_stop_hook(&name, HookType::OnStart);\n\n\n\n\t\t{\n\n\t\t\tlet multitransaction = test.transaction;\n\n\t\t\tlet env: EnvInfo = test.env.into();\n\n\t\t\tlet pre: PodState = test.pre_state.into();\n\n\n\n\t\t\tfor (spec_name, states) in test.post_states {\n\n\t\t\t\tlet total = states.len();\n\n\t\t\t\tlet spec = match EvmTestClient::fork_spec_from_json(&spec_name) {\n\n\t\t\t\t\tSome(spec) => spec,\n\n\t\t\t\t\tNone => {\n\n\t\t\t\t\t\tprintln!(\" - {} | {:?} Ignoring tests because of missing chainspec\", name, spec_name);\n", "file_path": "openethereum/ethcore/src/json_tests/state.rs", "rank": 0, "score": 512134.0065655274 }, { "content": "#[allow(dead_code)]\n\npub fn json_chain_test<H: FnMut(&str, HookType)>(path: &Path, json_data: &[u8], start_stop_hook: &mut H) -> Vec<String> {\n\n\tlet _ = ::env_logger::try_init();\n\n\tlet tests = blockchain::Test::load(json_data)\n\n\t\t.expect(&format!(\"Could not parse JSON chain test data from {}\", path.display()));\n\n\tlet mut failed = Vec::new();\n\n\n\n\tfor (name, blockchain) in tests.into_iter() {\n\n\t\tif skip_test(&name) {\n\n\t\t\tprintln!(\" - {} | {:?}: SKIPPED\", name, blockchain.network);\n\n\t\t\tcontinue;\n\n\t\t}\n\n\n\n\t\tstart_stop_hook(&name, HookType::OnStart);\n\n\n\n\t\tlet mut fail = false;\n\n\t\t{\n\n\t\t\tlet mut fail_unless = |cond: bool| {\n\n\t\t\t\tif !cond && !fail {\n\n\t\t\t\t\tfailed.push(name.clone());\n\n\t\t\t\t\tflushed_writeln!(\"FAIL\");\n", "file_path": "openethereum/ethcore/src/json_tests/chain.rs", "rank": 1, "score": 512134.0065655275 }, { "content": "#[allow(dead_code)]\n\nfn do_json_test<H: FnMut(&str, HookType)>(path: &Path, json_data: &[u8], start_stop_hook: &mut H) -> Vec<String> {\n\n\t// Block number used to run the tests.\n\n\t// Make sure that all the specified features are activated.\n\n\tconst BLOCK_NUMBER: u64 = 0x6ffffffffffffe;\n\n\n\n\tlet tests = ethjson::test_helpers::transaction::Test::load(json_data)\n\n\t\t.expect(&format!(\"Could not parse JSON transaction test data from {}\", path.display()));\n\n\tlet mut failed = Vec::new();\n\n\tfor (name, test) in tests.into_iter() {\n\n\t\tstart_stop_hook(&name, HookType::OnStart);\n\n\n\n\t\tfor (spec_name, result) in test.post_state {\n\n\t\t\tlet spec = match EvmTestClient::fork_spec_from_json(&spec_name) {\n\n\t\t\t\tSome(spec) => spec,\n\n\t\t\t\tNone => {\n\n\t\t\t\t\tprintln!(\" - {} | {:?} Ignoring tests because of missing spec\", name, spec_name);\n\n\t\t\t\t\tcontinue;\n\n\t\t\t\t}\n\n\t\t\t};\n\n\n", "file_path": "openethereum/ethcore/src/json_tests/transaction.rs", "rank": 2, "score": 456665.0495468712 }, { "content": "/// Default data path\n\npub fn default_data_path() -> String {\n\n\tdefault_data_pathbuf().to_string_lossy().into_owned()\n\n}\n\n\n", "file_path": "openethereum/util/dir/src/lib.rs", "rank": 3, "score": 431197.69753783767 }, { "content": "pub fn to_block_id(s: &str) -> Result<BlockId, String> {\n\n\tif s == \"latest\" {\n\n\t\tOk(BlockId::Latest)\n\n\t} else if let Ok(num) = s.parse() {\n\n\t\tOk(BlockId::Number(num))\n\n\t} else if let Ok(hash) = s.parse() {\n\n\t\tOk(BlockId::Hash(hash))\n\n\t} else {\n\n\t\tErr(\"Invalid block.\".into())\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 4, "score": 425906.94440666644 }, { "content": "/// Find a unique filename that does not exist using four-letter random suffix.\n\npub fn find_unique_filename_using_random_suffix(parent_path: &Path, original_filename: &str) -> io::Result<String> {\n\n\tlet mut path = parent_path.join(original_filename);\n\n\tlet mut deduped_filename = original_filename.to_string();\n\n\n\n\tif path.exists() {\n\n\t\tconst MAX_RETRIES: usize = 500;\n\n\t\tlet mut retries = 0;\n\n\n\n\t\twhile path.exists() {\n\n\t\t\tif retries >= MAX_RETRIES {\n\n\t\t\t\treturn Err(io::Error::new(io::ErrorKind::Other, \"Exceeded maximum retries when deduplicating filename.\"));\n\n\t\t\t}\n\n\n\n\t\t\tlet suffix = ::random::random_string(4);\n\n\t\t\tdeduped_filename = format!(\"{}-{}\", original_filename, suffix);\n\n\t\t\tpath.set_file_name(&deduped_filename);\n\n\t\t\tretries += 1;\n\n\t\t}\n\n\t}\n\n\n\n\tOk(deduped_filename)\n\n}\n\n\n\n/// Create a new file and restrict permissions to owner only. It errors if the file already exists.\n", "file_path": "openethereum/accounts/ethstore/src/accounts_dir/disk.rs", "rank": 5, "score": 413201.4522335024 }, { "content": "/// Default SystemOrCodeCall implementation.\n\npub fn default_system_or_code_call<'a>(machine: &'a Machine, block: &'a mut ExecutedBlock) -> impl FnMut(SystemOrCodeCallKind, Vec<u8>) -> Result<Vec<u8>, String> + 'a {\n\n\tmove |to, data| {\n\n\t\tlet result = match to {\n\n\t\t\tSystemOrCodeCallKind::Address(address) => {\n\n\t\t\t\tmachine.execute_as_system(\n\n\t\t\t\t\tblock,\n\n\t\t\t\t\taddress,\n\n\t\t\t\t\tU256::max_value(),\n\n\t\t\t\t\tSome(data),\n\n\t\t\t\t)\n\n\t\t\t},\n\n\t\t\tSystemOrCodeCallKind::Code(code, code_hash) => {\n\n\t\t\t\tmachine.execute_code_as_system(\n\n\t\t\t\t\tblock,\n\n\t\t\t\t\tNone,\n\n\t\t\t\t\tSome(code),\n\n\t\t\t\t\tSome(code_hash),\n\n\t\t\t\t\tSome(ActionValue::Apparent(U256::zero())),\n\n\t\t\t\t\tU256::max_value(),\n\n\t\t\t\t\tSome(data),\n\n\t\t\t\t\tSome(ActionType::StaticCall),\n\n\t\t\t\t)\n\n\t\t\t},\n\n\t\t};\n\n\n\n\t\tresult.map_err(|e| format!(\"{}\", e))\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/ethcore/engine/src/engine.rs", "rank": 6, "score": 411559.30359111336 }, { "content": "#[doc(hidden)]\n\n#[export_name = \"LLVMFuzzerTestOneInput\"]\n\npub fn test_input_wrap(data: *const u8, size: usize) -> i32 {\n\n let test_input = ::std::panic::catch_unwind(|| unsafe {\n\n let data_slice = ::std::slice::from_raw_parts(data, size);\n\n rust_fuzzer_test_input(data_slice);\n\n });\n\n if test_input.err().is_some() {\n\n // hopefully the custom panic hook will be called before and abort the\n\n // process before the stack frames are unwinded.\n\n ::std::process::abort();\n\n }\n\n 0\n\n}\n\n\n\n/*\n", "file_path": "custom-libfuzzer/src/lib.rs", "rank": 7, "score": 409945.756456285 }, { "content": "/// Formats duration into human readable format.\n\npub fn format_time(time: &Duration) -> String {\n\n\tformat!(\"{}.{:09}s\", time.as_secs(), time.subsec_nanos())\n\n}\n\n\n", "file_path": "openethereum/evmbin/src/display/mod.rs", "rank": 8, "score": 405423.5415022202 }, { "content": "pub fn format_ether(i: U256) -> String {\n\n\tlet mut string = format!(\"{}\", i);\n\n\tlet idx = string.len() as isize - 18;\n\n\tif idx <= 0 {\n\n\t\tlet mut prefix = String::from(\"0.\");\n\n\t\tfor _ in 0..idx.abs() {\n\n\t\t\tprefix.push('0');\n\n\t\t}\n\n\t\tstring = prefix + &string;\n\n\t} else {\n\n\t\tstring.insert(idx as usize, '.');\n\n\t}\n\n\tString::from(string.trim_end_matches('0').trim_end_matches('.'))\n\n}\n\n\n\nimpl fmt::Display for TransactionRequest {\n\n\tfn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n\t\tlet eth = self.value.unwrap_or_default();\n\n\t\tmatch self.to {\n\n\t\t\tSome(ref to) => write!(\n", "file_path": "openethereum/rpc/src/v1/types/transaction_request.rs", "rank": 9, "score": 400982.3540586786 }, { "content": "pub fn preset_config_string(arg: &str) -> Result<&'static str, Error> {\n\n match arg.to_lowercase().as_ref() {\n\n \"dev\" => Ok(include_str!(\"./config.dev.toml\")),\n\n \"mining\" => Ok(include_str!(\"./config.mining.toml\")),\n\n \"non-standard-ports\" => Ok(include_str!(\"./config.non-standard-ports.toml\")),\n\n \"insecure\" => Ok(include_str!(\"./config.insecure.toml\")),\n\n \"dev-insecure\" => Ok(include_str!(\"./config.dev-insecure.toml\")),\n\n _ => Err(Error::new(ErrorKind::InvalidInput, \"Config doesn't match any presets [dev, mining, non-standard-ports, insecure, dev-insecure]\"))\n\n }\n\n}\n", "file_path": "openethereum/parity/cli/presets/mod.rs", "rank": 10, "score": 398725.91382397973 }, { "content": "/// Returns a eth_sign-compatible hash of data to sign.\n\n/// The data is prepended with special message to prevent\n\n/// malicious DApps from using the function to sign forged transactions.\n\npub fn eth_data_hash(mut data: Bytes) -> H256 {\n\n\tlet mut message_data =\n\n\t\tformat!(\"\\x19Ethereum Signed Message:\\n{}\", data.len())\n\n\t\t.into_bytes();\n\n\tmessage_data.append(&mut data);\n\n\tkeccak(message_data)\n\n}\n\n\n", "file_path": "openethereum/rpc/src/v1/helpers/dispatch/mod.rs", "rank": 11, "score": 397172.394282639 }, { "content": "#[allow(dead_code)]\n\nfn test_trie<H: FnMut(&str, HookType)>(path: &Path, json: &[u8], trie: TrieSpec, start_stop_hook: &mut H) -> Vec<String> {\n\n\tlet tests = ethjson::test_helpers::trie::Test::load(json)\n\n\t\t.expect(&format!(\"Could not parse JSON trie test data from {}\", path.display()));\n\n\tlet factory = TrieFactory::new(trie, ethtrie::Layout);\n\n\tlet mut result = vec![];\n\n\n\n\tfor (name, test) in tests.into_iter() {\n\n\t\tstart_stop_hook(&name, HookType::OnStart);\n\n\n\n\t\tlet mut memdb = journaldb::new_memory_db();\n\n\t\tlet mut root = H256::zero();\n\n\t\tlet mut t = factory.create(&mut memdb, &mut root);\n\n\n\n\t\tfor (key, value) in test.input.data.into_iter() {\n\n\t\t\tlet key: Vec<u8> = key.into();\n\n\t\t\tlet value: Vec<u8> = value.map_or_else(Vec::new, Into::into);\n\n\t\t\tt.insert(&key, &value)\n\n\t\t\t\t.expect(&format!(\"Trie test '{:?}' failed due to internal error\", name));\n\n\t\t}\n\n\n", "file_path": "openethereum/ethcore/src/json_tests/trie.rs", "rank": 12, "score": 388789.50294941355 }, { "content": "fn rng(gas: U256, c: &mut Criterion, bench_id: &str) {\n\n\tlet code = black_box(\n\n\t\thex!(\"6060604052600360056007600b60005b62004240811215607f5767ffe7649d5eca84179490940267f47ed85c4b9a6379019367f8e5dd9a5c994bba9390930267f91d87e4b8b74e55019267ff97f6f3b29cda529290920267f393ada8dd75c938019167fe8d437c45bb3735830267f47d9a7b5428ffec019150600101600f565b838518831882186000555050505050600680609a6000396000f3606060405200\").to_vec()\n\n\t);\n\n\n\n\tc.bench_function(bench_id, move |b| {\n\n\t\tb.iter(|| {\n\n\t\t\tlet mut params = ActionParams::default();\n\n\t\t\tparams.gas = gas;\n\n\t\t\tparams.code = Some(Arc::new(code.clone()));\n\n\n\n\t\t\tlet mut ext = FakeExt::new();\n\n\t\t\tlet evm = Factory::default().create(params, ext.schedule(), ext.depth());\n\n\t\t\tlet _ = evm.exec(&mut ext);\n\n\t\t})\n\n\t});\n\n}\n", "file_path": "openethereum/evmbin/benches/mod.rs", "rank": 13, "score": 383112.4369225807 }, { "content": "/// encodes and hashes the given EIP712 struct\n\npub fn hash_structured_data(typed_data: EIP712) -> Result<H256> {\n\n\t// validate input\n\n\ttyped_data.validate()?;\n\n\t// EIP-191 compliant\n\n\tlet prefix = (b\"\\x19\\x01\").to_vec();\n\n\tlet domain = to_value(&typed_data.domain).unwrap();\n\n\tlet (domain_hash, data_hash) = (\n\n\t\tencode_data(\n\n\t\t\t&Type::Custom(\"EIP712Domain\".into()),\n\n\t\t\t&typed_data.types,\n\n\t\t\t&domain,\n\n\t\t\tNone\n\n\t\t)?,\n\n\t\tencode_data(\n\n\t\t\t&Type::Custom(typed_data.primary_type),\n\n\t\t\t&typed_data.types,\n\n\t\t\t&typed_data.message,\n\n\t\t\tNone\n\n\t\t)?\n\n\t);\n", "file_path": "openethereum/util/EIP-712/src/encode.rs", "rank": 14, "score": 379558.727909842 }, { "content": "fn type_hash(message_type: &str, typed_data: &MessageTypes) -> Result<H256> {\n\n\tOk(keccak(encode_type(message_type, typed_data)?))\n\n}\n\n\n", "file_path": "openethereum/util/EIP-712/src/encode.rs", "rank": 15, "score": 379282.3861976054 }, { "content": "fn map_serde_err<T: Display>(struct_name: &'static str) -> impl Fn(T) -> Error {\n\n\tmove |error: T| {\n\n\t\terrors::invalid_call_data(format!(\"Error deserializing '{}': {}\", struct_name, error))\n\n\t}\n\n}\n", "file_path": "openethereum/rpc/src/v1/helpers/eip191.rs", "rank": 16, "score": 375745.08291739697 }, { "content": "pub fn to_hex(bytes: &[u8]) -> String {\n\n\tlet mut v = Vec::with_capacity(bytes.len() * 2);\n\n\tfor &byte in bytes.iter() {\n\n\t\tv.push(CHARS[(byte >> 4) as usize]);\n\n\t\tv.push(CHARS[(byte & 0xf) as usize]);\n\n\t}\n\n\n\n\tunsafe { String::from_utf8_unchecked(v) }\n\n}\n\n\n", "file_path": "openethereum/ethash/src/shared.rs", "rank": 17, "score": 368991.1470417774 }, { "content": "/// Generate a random string of given length.\n\npub fn random_string(length: usize) -> String {\n\n\tlet rng = OsRng;\n\n\trng.sample_iter(&Alphanumeric).take(length).collect()\n\n}\n", "file_path": "openethereum/accounts/ethstore/src/random.rs", "rank": 18, "score": 367677.07838018594 }, { "content": "/// the type string is being validated before it's parsed.\n\npub fn parse_type(field_type: &str) -> Result<Type> {\n\n\t#[derive(PartialEq)]\n\n\tenum State { Open, Close }\n\n\n\n\tlet mut lexer = Lexer::new(field_type);\n\n\tlet mut token = None;\n\n\tlet mut state = State::Close;\n\n\tlet mut array_depth = 0;\n\n\tlet mut current_array_length: Option<u64> = None;\n\n\n\n\twhile lexer.token != Token::EndOfProgram {\n\n\t\tlet type_ = match lexer.token {\n\n\t\t\tToken::Identifier => Type::Custom(lexer.slice().to_owned()),\n\n\t\t\tToken::TypeByte => Type::Byte(lexer.extras.0),\n\n\t\t\tToken::TypeBytes => Type::Bytes,\n\n\t\t\tToken::TypeBool => Type::Bool,\n\n\t\t\tToken::TypeUint => Type::Uint,\n\n\t\t\tToken::TypeInt => Type::Int,\n\n\t\t\tToken::TypeString => Type::String,\n\n\t\t\tToken::TypeAddress => Type::Address,\n", "file_path": "openethereum/util/EIP-712/src/parser.rs", "rank": 19, "score": 366825.6383129547 }, { "content": "/// Replaces `$HOME` str with home directory path.\n\npub fn replace_home(base: &str, arg: &str) -> String {\n\n\t// the $HOME directory on mac os should be `~/Library` or `~/Library/Application Support`\n\n\t// We use an `if` so that we don't need to call `home_dir()` if not necessary.\n\n\tlet r = if arg.contains(\"$HOME\") {\n\n\t\targ.replace(\"$HOME\", home_dir().expect(\"$HOME isn't defined\").to_str().unwrap())\n\n\t} else {\n\n\t\targ.to_owned()\n\n\t};\n\n\tlet r = r.replace(\"$BASE\", base);\n\n\tr.replace(\"/\", &::std::path::MAIN_SEPARATOR.to_string())\n\n}\n\n\n", "file_path": "openethereum/util/dir/src/helpers.rs", "rank": 20, "score": 365702.1917661585 }, { "content": "fn create_file(filename: &str, data: String) {\n\n\tlet out_dir = env::var(\"OUT_DIR\").expect(ERROR_MSG);\n\n\tlet dest_path = Path::new(&out_dir).join(filename);\n\n\tlet mut f = File::create(&dest_path).expect(ERROR_MSG);\n\n\tf.write_all(data.as_bytes()).expect(ERROR_MSG);\n\n}\n", "file_path": "openethereum/util/version/build.rs", "rank": 21, "score": 365508.87389428355 }, { "content": "/// Default local path\n\npub fn default_local_path() -> String {\n\n\tdefault_path(AppDataType::UserCache).unwrap_or_else(fallback_path).to_string_lossy().into_owned()\n\n}\n\n\n", "file_path": "openethereum/util/dir/src/lib.rs", "rank": 22, "score": 363297.3893313329 }, { "content": "type Client = Sink<pubsub::Result>;\n\n\n\n/// Eth PubSub implementation.\n\npub struct EthPubSubClient<C> {\n\n\thandler: Arc<ChainNotificationHandler<C>>,\n\n\theads_subscribers: Arc<RwLock<Subscribers<Client>>>,\n\n\tlogs_subscribers: Arc<RwLock<Subscribers<(Client, EthFilter)>>>,\n\n\ttransactions_subscribers: Arc<RwLock<Subscribers<Client>>>,\n\n\tsync_subscribers: Arc<RwLock<Subscribers<Client>>>,\n\n}\n\n\n\nimpl<C> EthPubSubClient<C>\n\n\twhere\n\n\t\tC: 'static + Send + Sync\n\n{\n\n\t/// adds a sync notification channel to the pubsub client\n\n\tpub fn add_sync_notifier<F>(&mut self, receiver: Notification<SyncState>, f: F)\n\n\t\twhere\n\n\t\t\tF: 'static + Fn(SyncState) -> Option<pubsub::PubSubSyncStatus> + Send\n\n\t{\n", "file_path": "openethereum/rpc/src/v1/impls/eth_pubsub.rs", "rank": 23, "score": 362491.3476557081 }, { "content": "#[doc(hidden)]\n\n#[export_name = \"LLVMFuzzerInitialize\"]\n\npub fn initialize(_argc: *const isize, _argv: *const *const *const u8) -> isize {\n\n // Registers a panic hook that aborts the process before unwinding.\n\n // It is useful to abort before unwinding so that the fuzzer will then be\n\n // able to analyse the process stack frames to tell different bugs appart.\n\n //\n\n // HACK / FIXME: it would be better to use `-C panic=abort` but it's currently\n\n // impossible to build code using compiler plugins with this flag.\n\n // We will be able to remove this code when\n\n // https://github.com/rust-lang/cargo/issues/5423 is fixed.\n\n let default_hook = ::std::panic::take_hook();\n\n ::std::panic::set_hook(Box::new(move |panic_info| {\n\n default_hook(panic_info);\n\n ::std::process::abort();\n\n }));\n\n 0\n\n}\n\n\n\n/// Define a fuzz target.\n\n///\n\n/// ## Example\n", "file_path": "custom-libfuzzer/src/lib.rs", "rank": 24, "score": 362232.54974590894 }, { "content": "/// Replaces `$HOME` str with home directory path and `$LOCAL` with local path.\n\npub fn replace_home_and_local(base: &str, local: &str, arg: &str) -> String {\n\n\tlet r = replace_home(base, arg);\n\n\tr.replace(\"$LOCAL\", local)\n\n}\n", "file_path": "openethereum/util/dir/src/helpers.rs", "rank": 25, "score": 359326.8884412367 }, { "content": "/// Tries to parse string as a price.\n\npub fn to_price(s: &str) -> Result<f32, String> {\n\n\ts.parse::<f32>().map_err(|_| format!(\"Invalid transaction price {:?} given. Must be a decimal number.\", s))\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 26, "score": 351232.215720003 }, { "content": "pub fn to_duration(s: &str) -> Result<Duration, String> {\n\n\tto_seconds(s).map(Duration::from_secs)\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 27, "score": 351225.06306972523 }, { "content": "pub fn to_u256(s: &str) -> Result<U256, String> {\n\n\tif let Ok(decimal) = U256::from_dec_str(s) {\n\n\t\tOk(decimal)\n\n\t} else if let Ok(hex) = clean_0x(s).parse() {\n\n\t\tOk(hex)\n\n\t} else {\n\n\t\tErr(format!(\"Invalid numeric value: {}\", s))\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 28, "score": 351225.06306972523 }, { "content": "/// Formats and returns parity ipc path.\n\npub fn parity_ipc_path(base: &str, path: &str, shift: u16) -> String {\n\n\tlet mut path = path.to_owned();\n\n\tif shift != 0 {\n\n\t\tpath = path.replace(\"jsonrpc.ipc\", &format!(\"jsonrpc-{}.ipc\", shift));\n\n\t}\n\n\treplace_home(base, &path)\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 29, "score": 349210.489964131 }, { "content": "/// Format byte counts to standard denominations.\n\npub fn format_bytes(b: u64) -> String {\n\n\tmatch binary_prefix(b as f64) {\n\n\t\tStandalone(bytes) => format!(\"{} bytes\", bytes),\n\n\t\tPrefixed(prefix, n) => format!(\"{:.0} {}B\", n, prefix),\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/parity/informant.rs", "rank": 30, "score": 349081.08042299614 }, { "content": "pub fn to_pending_set(s: &str) -> Result<PendingSet, String> {\n\n\tmatch s {\n\n\t\t\"cheap\" => Ok(PendingSet::AlwaysQueue),\n\n\t\t\"strict\" => Ok(PendingSet::AlwaysSealing),\n\n\t\t\"lenient\" => Ok(PendingSet::SealingOrElseQueue),\n\n\t\tother => Err(format!(\"Invalid pending set value: {:?}\", other)),\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 31, "score": 344249.12858500506 }, { "content": "pub fn to_queue_strategy(s: &str) -> Result<PrioritizationStrategy, String> {\n\n\tmatch s {\n\n\t\t\"gas_price\" => Ok(PrioritizationStrategy::GasPriceOnly),\n\n\t\tother => Err(format!(\"Invalid queue strategy: {}\", other)),\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 32, "score": 344249.12858500506 }, { "content": "fn assert_set_contains<T : Debug + Eq + PartialEq + Hash>(set: &HashSet<T>, val: &T) {\n\n\tlet contains = set.contains(val);\n\n\tif !contains {\n\n\t\tprintln!(\"Set: {:?}\", set);\n\n\t\tprintln!(\"Elem: {:?}\", val);\n\n\t}\n\n\tassert!(contains, \"Element not found in HashSet\");\n\n}\n\n\n", "file_path": "openethereum/ethcore/evm/src/tests.rs", "rank": 33, "score": 342147.4716945647 }, { "content": "pub fn join_set(set: Option<&HashSet<String>>) -> Option<String> {\n\n\tmatch set {\n\n\t\tSome(s) => Some(s.iter().map(|s| s.as_str()).collect::<Vec<&str>>().join(\",\")),\n\n\t\tNone => None\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 34, "score": 340824.22754228377 }, { "content": "fn encode_type(message_type: &str, message_types: &MessageTypes) -> Result<String> {\n\n\tlet deps = {\n\n\t\tlet mut temp = build_dependencies(message_type, message_types)\n\n\t\t\t.ok_or(ErrorKind::NonExistentType)?;\n\n\t\ttemp.remove(message_type);\n\n\t\tlet mut temp = temp.into_iter().collect::<Vec<_>>();\n\n\t\t(&mut temp[..]).sort_unstable();\n\n\t\ttemp.insert(0, message_type);\n\n\t\ttemp\n\n\t};\n\n\n\n\tlet encoded = deps\n\n\t\t.into_iter()\n\n\t\t.filter_map(|dep| {\n\n\t\t\tmessage_types.get(dep).map(|field_types| {\n\n\t\t\t\tlet types = field_types\n\n\t\t\t\t\t.iter()\n\n\t\t\t\t\t.map(|value| format!(\"{} {}\", value.type_, value.name))\n\n\t\t\t\t\t.join(\",\");\n\n\t\t\t\treturn format!(\"{}({})\", dep, types);\n\n\t\t\t})\n\n\t\t})\n\n\t\t.collect::<Vec<_>>()\n\n\t\t.concat();\n\n\tOk(encoded)\n\n}\n\n\n", "file_path": "openethereum/util/EIP-712/src/encode.rs", "rank": 35, "score": 335403.136878453 }, { "content": "pub fn well_formed_create_tx_data(fuzzed_code: Vec<u8>, constructor_length: u8) -> Vec<u8> {\n\n\tlet valid_instruction_list = VALID_INSTRUCTION_LIST.clone();\n\n\n\n\tlet mut valid_code: Vec<u8> = Vec::new();\n\n\n\n\tlet mut position = 0;\n\n\twhile position < fuzzed_code.len() {\n\n\t\tlet fuzzed_instruction = fuzzed_code[position];\n\n\t\tlet valid_instruction: Instruction = match is_valid(fuzzed_instruction) {\n\n\t\t\ttrue => Instruction::from_u8(fuzzed_instruction).unwrap(),\n\n\t\t\tfalse => valid_instruction_list[fuzzed_instruction as usize % valid_instruction_list.len()],\n\n\t\t};\n\n\n\n\t\tvalid_code.push(valid_instruction as u8);\n\n\t\tposition = position + 1;\n\n\n\n\n\n\t\tif valid_instruction.is_push() {\n\n\t\t\tpanic!(); \n\n\t\t}\n", "file_path": "openethereum/evmfuzz/src/lib.rs", "rank": 36, "score": 335366.875233796 }, { "content": "pub fn json_difficulty_test<H: FnMut(&str, HookType)>(\n\n\tpath: &Path,\n\n\tjson_data: &[u8],\n\n\tspec: Spec,\n\n\tstart_stop_hook: &mut H\n\n) -> Vec<String> {\n\n\tlet _ = env_logger::try_init();\n\n\tlet tests = DifficultyTest::load(json_data)\n\n\t\t.expect(&format!(\"Could not parse JSON difficulty test data from {}\", path.display()));\n\n\tlet engine = &spec.engine;\n\n\n\n\tfor (name, test) in tests.into_iter() {\n\n\t\tstart_stop_hook(&name, HookType::OnStart);\n\n\n\n\t\tflushed_writeln!(\" - {}...\", name);\n\n\n\n\t\tlet mut parent_header = Header::new();\n\n\t\tlet block_number: u64 = test.current_block_number.into();\n\n\t\tparent_header.set_number(block_number - 1);\n\n\t\tparent_header.set_gas_limit(0x20000.into());\n", "file_path": "openethereum/ethcore/src/json_tests/difficulty.rs", "rank": 37, "score": 330326.7533738626 }, { "content": "/// Default data path\n\npub fn default_data_pathbuf() -> PathBuf {\n\n\tdefault_path(AppDataType::UserData).unwrap_or_else(fallback_path)\n\n}\n\n\n", "file_path": "openethereum/util/dir/src/lib.rs", "rank": 38, "score": 329163.1071825616 }, { "content": "pub fn timeout_new_peer(err: &OnDemandError) -> Error {\n\n\tError {\n\n\t\tcode: ErrorCode::ServerError(codes::NO_LIGHT_PEERS),\n\n\t\tmessage: err.to_string(),\n\n\t\tdata: None,\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/rpc/src/v1/helpers/errors.rs", "rank": 39, "score": 328513.78471520176 }, { "content": "/// Run all tests under the given path (except for the test files named in the skip list) using the\n\n/// provided runner function.\n\npub fn run_test_path<H: FnMut(&str, HookType)>(\n\n\tpath: &Path,\n\n\tskip: &[&'static str],\n\n\trunner: fn(path: &Path, json_data: &[u8], start_stop_hook: &mut H) -> Vec<String>,\n\n\tstart_stop_hook: &mut H\n\n) {\n\n\tif !skip.is_empty() {\n\n\t\t// todo[dvdplm] it's really annoying to have to use flushln here. Should be `info!(target:\n\n\t\t// \"json-tests\", …)`. Issue https://github.com/openethereum/openethereum/issues/11084\n\n\t\tflushed_writeln!(\"[run_test_path] Skipping tests in {}: {:?}\", path.display(), skip);\n\n\t}\n\n\tlet mut errors = Vec::new();\n\n\trun_test_path_inner(path, skip, runner, start_stop_hook, &mut errors);\n\n\tlet empty: [String; 0] = [];\n\n\tassert_eq!(errors, empty, \"\\nThere were {} tests in '{}' that failed.\", errors.len(), path.display());\n\n}\n\n\n", "file_path": "openethereum/ethcore/src/json_tests/test_common.rs", "rank": 40, "score": 327528.7472776795 }, { "content": "pub fn run_test_file<H: FnMut(&str, HookType)>(\n\n\tpath: &Path,\n\n\trunner: fn(path: &Path, json_data: &[u8], start_stop_hook: &mut H) -> Vec<String>,\n\n\tstart_stop_hook: &mut H\n\n) {\n\n\tlet mut data = Vec::new();\n\n\tlet mut file = match File::open(&path) {\n\n\t\tOk(file) => file,\n\n\t\tErr(_) => panic!(\"Error opening test file at: {:?}\", path),\n\n\t};\n\n\tfile.read_to_end(&mut data).expect(\"Error reading test file\");\n\n\tlet results = runner(&path, &data, start_stop_hook);\n\n\tlet empty: [String; 0] = [];\n\n\tassert_eq!(results, empty);\n\n}\n", "file_path": "openethereum/ethcore/src/json_tests/test_common.rs", "rank": 41, "score": 327522.3646177915 }, { "content": "/// Generates dummy client (not test client) with corresponding amount of blocks, txs per block and spec\n\npub fn generate_dummy_client_with_spec_and_data<F>(\n\n\ttest_spec: F, block_number: u32, txs_per_block: usize, tx_gas_prices: &[U256], force_sealing: bool,\n\n) -> Arc<Client> where\n\n\tF: Fn() -> Spec\n\n{\n\n\tlet test_spec = test_spec();\n\n\tlet client_db = new_db();\n\n\n\n\tlet miner = Miner::new_for_tests_force_sealing(&test_spec, None, force_sealing);\n\n\n\n\tlet client = Client::new(\n\n\t\tClientConfig::default(),\n\n\t\t&test_spec,\n\n\t\tclient_db,\n\n\t\tArc::new(miner),\n\n\t\tIoChannel::disconnected(),\n\n\t).unwrap();\n\n\tlet test_engine = &*test_spec.engine;\n\n\n\n\tlet mut db = test_spec.ensure_db_good(get_temp_state_db(), &Default::default()).unwrap();\n", "file_path": "openethereum/ethcore/src/test_helpers/mod.rs", "rank": 42, "score": 322378.5535537548 }, { "content": "/// given a type and HashMap<String, Vec<FieldType>>\n\n/// returns a HashSet of dependent types of the given type\n\nfn build_dependencies<'a>(message_type: &'a str, message_types: &'a MessageTypes) -> Option<HashSet<&'a str>>\n\n{\n\n\tif message_types.get(message_type).is_none() {\n\n\t\treturn None;\n\n\t}\n\n\n\n\tlet mut types = IndexSet::new();\n\n\ttypes.insert(message_type);\n\n\tlet mut deps = HashSet::new();\n\n\n\n\twhile let Some(item) = types.pop() {\n\n\t\tif let Some(fields) = message_types.get(item) {\n\n\t\t\tdeps.insert(item);\n\n\n\n\t\t\tfor field in fields {\n\n\t\t\t\t// check if this field is an array type\n\n\t\t\t\tlet field_type = if let Some(index) = field.type_.find('[') {\n\n\t\t\t\t\t&field.type_[..index]\n\n\t\t\t\t} else {\n\n\t\t\t\t\t&field.type_\n", "file_path": "openethereum/util/EIP-712/src/encode.rs", "rank": 43, "score": 320662.52406035166 }, { "content": "/// Returns sequence of hashes of the dummy blocks beginning from corresponding parent\n\npub fn get_good_dummy_block_fork_seq(start_number: usize, count: usize, parent_hash: &H256) -> Vec<Bytes> {\n\n\tlet test_spec = spec::new_test();\n\n\tlet genesis_gas = test_spec.genesis_header().gas_limit().clone();\n\n\tlet mut rolling_timestamp = start_number as u64 * 10;\n\n\tlet mut parent = *parent_hash;\n\n\tlet mut r = Vec::new();\n\n\tfor i in start_number .. start_number + count + 1 {\n\n\t\tlet mut block_header = Header::new();\n\n\t\tblock_header.set_gas_limit(genesis_gas);\n\n\t\tblock_header.set_difficulty(U256::from(i) * U256([0, 1, 0, 0]));\n\n\t\tblock_header.set_timestamp(rolling_timestamp);\n\n\t\tblock_header.set_number(i as u64);\n\n\t\tblock_header.set_parent_hash(parent);\n\n\t\tblock_header.set_state_root(test_spec.genesis_header().state_root().clone());\n\n\n\n\t\tparent = block_header.hash();\n\n\t\trolling_timestamp = rolling_timestamp + 10;\n\n\n\n\t\tr.push(create_test_block(&block_header));\n\n\t}\n\n\tr\n\n}\n\n\n", "file_path": "openethereum/ethcore/src/test_helpers/mod.rs", "rank": 44, "score": 320295.6224815322 }, { "content": "/// Insert vault name to the JSON meta field\n\npub fn insert_vault_name_to_json_meta(meta: &str, vault_name: &str) -> Result<String, error::Error> {\n\n\tlet mut meta = if meta.is_empty() {\n\n\t\tValue::Object(serde_json::Map::new())\n\n\t} else {\n\n\t\tserde_json::from_str(meta)?\n\n\t};\n\n\n\n\tif let Some(meta_obj) = meta.as_object_mut() {\n\n\t\tmeta_obj.insert(VAULT_NAME_META_KEY.to_owned(), Value::String(vault_name.to_owned()));\n\n\t\tserde_json::to_string(meta_obj)\n\n\t} else {\n\n\t\tErr(error::Error::custom(\"Meta is expected to be a serialized JSON object\"))\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/accounts/ethstore/src/json/vault_key_file.rs", "rank": 45, "score": 319629.42017052806 }, { "content": "pub fn convert_to_proto(fuzz_data: &[u8]) -> Option<fuzzer::Fuzzed> {\n\n\tmatch protobuf::parse_from_bytes::<fuzzer::Fuzzed>(fuzz_data) {\n\n\t\tOk(fuzzed) => Some(fuzzed),\n\n\t\tErr(_) => None,\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/evmfuzz/src/lib.rs", "rank": 46, "score": 318520.7616515563 }, { "content": "pub fn to_url(address: &Option<::Host>) -> Option<String> {\n\n\taddress.as_ref().map(|host| (**host).to_owned())\n\n}\n", "file_path": "openethereum/rpc/src/v1/helpers/mod.rs", "rank": 47, "score": 318375.982625274 }, { "content": "fn copy_into(from: &str, into: &mut [u8]) -> Result<(), Error> {\n\n\tlet from: Vec<u8> = from.from_hex().map_err(|_| Error::InvalidUuid)?;\n\n\n\n\tif from.len() != into.len() {\n\n\t\treturn Err(Error::InvalidUuid);\n\n\t}\n\n\n\n\tinto.copy_from_slice(&from);\n\n\tOk(())\n\n}\n\n\n\nimpl str::FromStr for Uuid {\n\n\ttype Err = Error;\n\n\n\n\tfn from_str(s: &str) -> Result<Self, Self::Err> {\n\n\t\tlet parts: Vec<&str> = s.split(\"-\").collect();\n\n\n\n\t\tif parts.len() != 5 {\n\n\t\t\treturn Err(Error::InvalidUuid);\n\n\t\t}\n", "file_path": "openethereum/accounts/ethstore/src/json/id.rs", "rank": 48, "score": 316995.92386295146 }, { "content": "pub fn to_mode(s: &str, timeout: u64, alarm: u64) -> Result<Mode, String> {\n\n\tmatch s {\n\n\t\t\"active\" => Ok(Mode::Active),\n\n\t\t\"passive\" => Ok(Mode::Passive(Duration::from_secs(timeout), Duration::from_secs(alarm))),\n\n\t\t\"dark\" => Ok(Mode::Dark(Duration::from_secs(timeout))),\n\n\t\t\"offline\" => Ok(Mode::Off),\n\n\t\t_ => Err(format!(\"{}: Invalid value for --mode. Must be one of active, passive, dark or offline.\", s)),\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 49, "score": 316711.5810421582 }, { "content": "pub fn tracing_switch_to_bool(switch: Switch, user_defaults: &UserDefaults) -> Result<bool, String> {\n\n\tmatch (user_defaults.is_first_launch, switch, user_defaults.tracing) {\n\n\t\t(false, Switch::On, false) => Err(\"TraceDB resync required\".into()),\n\n\t\t(_, Switch::On, _) => Ok(true),\n\n\t\t(_, Switch::Off, _) => Ok(false),\n\n\t\t(_, Switch::Auto, def) => Ok(def),\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/parity/params.rs", "rank": 50, "score": 315653.9989769028 }, { "content": "/// Converts `BlockNumber` to `BlockId`, panics on `BlockNumber::Pending`\n\npub fn block_number_to_id(number: BlockNumber) -> BlockId {\n\n\tmatch number {\n\n\t\tBlockNumber::Hash { hash, .. } => BlockId::Hash(hash),\n\n\t\tBlockNumber::Num(num) => BlockId::Number(num),\n\n\t\tBlockNumber::Earliest => BlockId::Earliest,\n\n\t\tBlockNumber::Latest => BlockId::Latest,\n\n\t\tBlockNumber::Pending => panic!(\"`BlockNumber::Pending` should be handled manually\")\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse types::ids::BlockId;\n\n\tuse super::*;\n\n\tuse std::str::FromStr;\n\n\tuse serde_json;\n\n\n\n\t#[test]\n\n\tfn block_number_deserialization() {\n\n\t\tlet s = r#\"[\n", "file_path": "openethereum/rpc/src/v1/types/block_number.rs", "rank": 51, "score": 314154.4818624389 }, { "content": "#[test]\n\nfn should_subscribe_to_new_heads() {\n\n\t// given\n\n\tlet el = Runtime::with_thread_count(1);\n\n\tlet mut client = TestBlockChainClient::new();\n\n\t// Insert some blocks\n\n\tclient.add_blocks(3, EachBlockWith::Nothing);\n\n\tlet h3 = client.block_hash_delta_minus(1);\n\n\tlet h2 = client.block_hash_delta_minus(2);\n\n\tlet h1 = client.block_hash_delta_minus(3);\n\n\n\n\tlet (_, pool_receiver) = mpsc::unbounded();\n\n\n\n\tlet pubsub = EthPubSubClient::new(Arc::new(client), el.executor(), pool_receiver);\n\n\tlet handler = pubsub.handler().upgrade().unwrap();\n\n\tlet pubsub = pubsub.to_delegate();\n\n\n\n\tlet mut io = MetaIoHandler::default();\n\n\tio.extend_with(pubsub);\n\n\n\n\tlet mut metadata = Metadata::default();\n", "file_path": "openethereum/rpc/src/v1/tests/mocked/eth_pubsub.rs", "rank": 52, "score": 311852.7699228787 }, { "content": "/// Internal error signifying a logic error in code.\n\n/// Should not be used when function can just fail\n\n/// because of invalid parameters or incomplete node state.\n\npub fn internal<T: fmt::Debug>(error: &str, data: T) -> Error {\n\n\tError {\n\n\t\tcode: ErrorCode::InternalError,\n\n\t\tmessage: format!(\"Internal error occurred: {}\", error),\n\n\t\tdata: Some(Value::String(format!(\"{:?}\", data))),\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/rpc/src/v1/helpers/errors.rs", "rank": 53, "score": 311427.60141126986 }, { "content": "/// Run executive jsontests on a given folder.\n\npub fn run_test_path<H: FnMut(&str, HookType)>(p: &Path, skip: &[&'static str], h: &mut H) {\n\n\t::json_tests::test_common::run_test_path(p, skip, do_json_test, h)\n\n}\n\n\n", "file_path": "openethereum/ethcore/src/json_tests/executive.rs", "rank": 54, "score": 311242.8191503046 }, { "content": "/// Returns hash and header of the correct dummy block\n\npub fn get_good_dummy_block_hash() -> (H256, Bytes) {\n\n\tlet mut block_header = Header::new();\n\n\tlet test_spec = spec::new_test();\n\n\tlet genesis_gas = test_spec.genesis_header().gas_limit().clone();\n\n\tblock_header.set_gas_limit(genesis_gas);\n\n\tblock_header.set_difficulty(U256::from(0x20000));\n\n\tblock_header.set_timestamp(40);\n\n\tblock_header.set_number(1);\n\n\tblock_header.set_parent_hash(test_spec.genesis_header().hash());\n\n\tblock_header.set_state_root(test_spec.genesis_header().state_root().clone());\n\n\n\n\t(block_header.hash(), create_test_block(&block_header))\n\n}\n\n\n", "file_path": "openethereum/ethcore/src/test_helpers/mod.rs", "rank": 55, "score": 311082.1430606415 }, { "content": "/// Remove vault name from the JSON meta field\n\npub fn remove_vault_name_from_json_meta(meta: &str) -> Result<String, error::Error> {\n\n\tlet mut meta = if meta.is_empty() {\n\n\t\tValue::Object(serde_json::Map::new())\n\n\t} else {\n\n\t\tserde_json::from_str(meta)?\n\n\t};\n\n\n\n\tif let Some(meta_obj) = meta.as_object_mut() {\n\n\t\tmeta_obj.remove(VAULT_NAME_META_KEY);\n\n\t\tserde_json::to_string(meta_obj)\n\n\t} else {\n\n\t\tErr(error::Error::custom(\"Meta is expected to be a serialized JSON object\"))\n\n\t}\n\n}\n\n\n\nimpl VaultKeyFile {\n\n\tpub fn load<R>(reader: R) -> Result<Self, serde_json::Error> where R: Read {\n\n\t\tserde_json::from_reader(reader)\n\n\t}\n\n\n", "file_path": "openethereum/accounts/ethstore/src/json/vault_key_file.rs", "rank": 56, "score": 310208.9095759872 }, { "content": "pub fn mode_switch_to_bool(switch: Option<Mode>, user_defaults: &UserDefaults) -> Result<Mode, String> {\n\n\tOk(switch.unwrap_or(user_defaults.mode().clone()))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse journaldb::Algorithm;\n\n\tuse user_defaults::UserDefaults;\n\n\tuse super::{SpecType, Pruning, ResealPolicy, Switch, tracing_switch_to_bool};\n\n\n\n\t#[test]\n\n\tfn test_spec_type_parsing() {\n\n\t\tassert_eq!(SpecType::Foundation, \"eth\".parse().unwrap());\n\n\t\tassert_eq!(SpecType::Foundation, \"ethereum\".parse().unwrap());\n\n\t\tassert_eq!(SpecType::Foundation, \"foundation\".parse().unwrap());\n\n\t\tassert_eq!(SpecType::Foundation, \"mainnet\".parse().unwrap());\n\n\t\tassert_eq!(SpecType::Classic, \"etc\".parse().unwrap());\n\n\t\tassert_eq!(SpecType::Classic, \"classic\".parse().unwrap());\n\n\t\tassert_eq!(SpecType::Poanet, \"poanet\".parse().unwrap());\n\n\t\tassert_eq!(SpecType::Poanet, \"poacore\".parse().unwrap());\n", "file_path": "openethereum/parity/params.rs", "rank": 57, "score": 309308.6178916282 }, { "content": "fn guess_mime_type(url: &str) -> Option<Mime> {\n\n\tconst CONTENT_TYPE: &'static str = \"content-type=\";\n\n\n\n\tlet mut it = url.split('#');\n\n\t// skip url\n\n\tlet url = it.next();\n\n\t// get meta headers\n\n\tlet metas = it.next();\n\n\tif let Some(metas) = metas {\n\n\t\tfor meta in metas.split('&') {\n\n\t\t\tlet meta = meta.to_lowercase();\n\n\t\t\tif meta.starts_with(CONTENT_TYPE) {\n\n\t\t\t\treturn meta[CONTENT_TYPE.len()..].parse().ok();\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\turl.and_then(|url| {\n\n\t\turl.split('.').last()\n\n\t}).and_then(|extension| {\n\n\t\tmime_guess::from_ext(extension).first()\n", "file_path": "openethereum/updater/hash-fetch/src/urlhint.rs", "rank": 58, "score": 307966.241280801 }, { "content": "pub fn upgrade_data_paths(base_path: &str, dirs: &DatabaseDirectories, pruning: Algorithm) {\n\n\tif home_dir().is_none() {\n\n\t\treturn;\n\n\t}\n\n\n\n\tlet legacy_root_path = replace_home(\"\", \"$HOME/.parity\");\n\n\tlet default_path = default_data_path();\n\n\tif legacy_root_path != base_path && base_path == default_path {\n\n\t\tupgrade_dir_location(&PathBuf::from(legacy_root_path), &PathBuf::from(&base_path));\n\n\t}\n\n\tupgrade_dir_location(&dirs.legacy_version_path(pruning), &dirs.db_path(pruning));\n\n\tupgrade_dir_location(&dirs.legacy_snapshot_path(), &dirs.snapshot_path());\n\n\tupgrade_dir_location(&dirs.legacy_network_path(), &dirs.network_path());\n\n\tupgrade_user_defaults(&dirs);\n\n}\n", "file_path": "openethereum/parity/upgrade.rs", "rank": 59, "score": 307269.5724091959 }, { "content": "/// Creates new test instance of `BlockChainDB`\n\npub fn new_db() -> Arc<dyn BlockChainDB> {\n\n\tlet blooms_dir = TempDir::new().unwrap();\n\n\tlet trace_blooms_dir = TempDir::new().unwrap();\n\n\n\n\tlet db = TestBlockChainDB {\n\n\t\tblooms: blooms_db::Database::open(blooms_dir.path()).unwrap(),\n\n\t\ttrace_blooms: blooms_db::Database::open(trace_blooms_dir.path()).unwrap(),\n\n\t\t_blooms_dir: blooms_dir,\n\n\t\t_trace_blooms_dir: trace_blooms_dir,\n\n\t\tkey_value: Arc::new(::kvdb_memorydb::create(::db::NUM_COLUMNS))\n\n\t};\n\n\n\n\tArc::new(db)\n\n}\n\n\n", "file_path": "openethereum/ethcore/src/test_helpers/mod.rs", "rank": 60, "score": 304890.7482287205 }, { "content": "pub fn fatdb_switch_to_bool(switch: Switch, user_defaults: &UserDefaults, _algorithm: Algorithm) -> Result<bool, String> {\n\n\tlet result = match (user_defaults.is_first_launch, switch, user_defaults.fat_db) {\n\n\t\t(false, Switch::On, false) => Err(\"FatDB resync required\".into()),\n\n\t\t(_, Switch::On, _) => Ok(true),\n\n\t\t(_, Switch::Off, _) => Ok(false),\n\n\t\t(_, Switch::Auto, def) => Ok(def),\n\n\t};\n\n\tresult\n\n}\n\n\n", "file_path": "openethereum/parity/params.rs", "rank": 61, "score": 303508.66373875336 }, { "content": "/// Start secret store-related functionality\n\npub fn start(conf: Configuration, deps: Dependencies, executor: Executor) -> Result<Option<KeyServer>, String> {\n\n\tif !conf.enabled {\n\n\t\treturn Ok(None);\n\n\t}\n\n\n\n\tKeyServer::new(conf, deps, executor)\n\n\t\t.map(|s| Some(s))\n\n}\n", "file_path": "openethereum/parity/secretstore/server.rs", "rank": 62, "score": 300840.76994624006 }, { "content": "/// Get the standard version string for this software.\n\npub fn version() -> String {\n\n\tlet commit_date = format!(\"{}\", env!(\"VERGEN_COMMIT_DATE\")).replace(\"-\", \"\");\n\n\tformat!(\n\n\t\t\"OpenEthereum/v{}-{}-{}-{}/{}/rustc{}\",\n\n\t\tenv!(\"CARGO_PKG_VERSION\"),\n\n\t\tTHIS_TRACK,\n\n\t\tenv!(\"VERGEN_SHA_SHORT\"),\n\n\t\tcommit_date,\n\n\t\tplatform(),\n\n\t\tgenerated::rustc_version(),\n\n\t)\n\n}\n\n\n", "file_path": "openethereum/util/version/src/lib.rs", "rank": 63, "score": 300714.83759337023 }, { "content": "/// Get the platform identifier.\n\npub fn platform() -> String {\n\n\tformat!(\"{}\", env!(\"VERGEN_TARGET_TRIPLE\"))\n\n}\n\n\n", "file_path": "openethereum/util/version/src/lib.rs", "rank": 64, "score": 300707.76086203184 }, { "content": "/// Run executive jsontests on a given file.\n\npub fn run_test_file<H: FnMut(&str, HookType)>(p: &Path, h: &mut H) {\n\n\t::json_tests::test_common::run_test_file(p, do_json_test, h)\n\n}\n\n\n", "file_path": "openethereum/ethcore/src/json_tests/executive.rs", "rank": 65, "score": 300459.7200312549 }, { "content": "/// Parse known parity formats. Recognizes either a short format with four fields\n\n/// or a long format which includes the same fields and an identity one.\n\nfn parse_parity_format(client_version: &str) -> Result<ParityClientData, ()> {\n\n\tconst PARITY_ID_STRING_MINIMUM_TOKENS: usize = 4;\n\n\n\n\tlet tokens: Vec<&str> = client_version.split(\"/\").collect();\n\n\n\n\tif tokens.len() < PARITY_ID_STRING_MINIMUM_TOKENS {\n\n\t\treturn Err(())\n\n\t}\n\n\n\n\tlet name = tokens[0];\n\n\n\n\tlet identity = if tokens.len() - 3 > 1 {\n\n\t\tSome(tokens[1..(tokens.len() - 3)].join(\"/\"))\n\n\t} else {\n\n\t\tNone\n\n\t};\n\n\n\n\tlet compiler = tokens[tokens.len() - 1];\n\n\tlet os = tokens[tokens.len() - 2];\n\n\n", "file_path": "openethereum/util/network/src/client_version.rs", "rank": 66, "score": 299059.42006644484 }, { "content": "/// Given the hash of the parent block and a step number, returns an RLP encoded partial empty step\n\n/// ready to be signed.\n\npub fn empty_step_rlp(step: u64, parent_hash: &H256) -> Vec<u8> {\n\n\tlet mut s = RlpStream::new_list(2);\n\n\ts.append(&step).append(parent_hash);\n\n\ts.out()\n\n}\n\n\n", "file_path": "openethereum/ethcore/engines/authority-round/src/lib.rs", "rank": 67, "score": 298322.2596348675 }, { "content": "pub fn token(e: String) -> Error {\n\n\tError {\n\n\t\tcode: ErrorCode::ServerError(codes::UNKNOWN_ERROR),\n\n\t\tmessage: \"There was an error when saving your authorization tokens.\".into(),\n\n\t\tdata: Some(Value::String(e)),\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/rpc/src/v1/helpers/errors.rs", "rank": 68, "score": 297499.4665563919 }, { "content": "fn run_geth(data: &[u8]) -> Vec<u8> {\n\n\tunsafe {\n\n\t\tlet mut writeTo_file = OpenOptions::new().write(true).open(WRITE_TO.clone()).unwrap();\n\n\t\twriteTo_file.write_all(data).unwrap();\n\n\t}\n\n\n\n\tlet mut response = Vec::new();\n\n\tunsafe {\n\n\t\tlet mut readfrom_file = OpenOptions::new().read(true).open(READ_FROM.clone()).unwrap();\n\n\t\treadfrom_file.read_to_end(&mut response).unwrap();\n\n\t}\n\n\n\n\treturn response;\n\n}\n\n\n", "file_path": "openethereum/evmfuzz/fuzz/fuzz_targets/fuzz_target_1.rs", "rank": 69, "score": 296227.99904983473 }, { "content": "fn simple_loop(gas: U256, c: &mut Criterion, bench_id: &str) {\n\n\tlet code = black_box(\n\n\t\thex!(\"606060405260005b620042408112156019575b6001016007565b600081905550600680602b6000396000f3606060405200\").to_vec()\n\n\t);\n\n\n\n\tc.bench_function(bench_id, move |b| {\n\n\t\tb.iter(|| {\n\n\t\t\tlet mut params = ActionParams::default();\n\n\t\t\tparams.gas = gas;\n\n\t\t\tparams.code = Some(Arc::new(code.clone()));\n\n\n\n\t\t\tlet mut ext = FakeExt::new();\n\n\t\t\tlet evm = Factory::default().create(params, ext.schedule(), ext.depth());\n\n\t\t\tlet _ = evm.exec(&mut ext);\n\n\t\t})\n\n\t});\n\n}\n\n\n", "file_path": "openethereum/evmbin/benches/mod.rs", "rank": 70, "score": 296117.7047604966 }, { "content": "#[doc(hidden)]\n\n#[export_name = \"LLVMFuzzerCustomMutator\"]\n\npub fn mutate_input_wrap(data: *mut u8, size: usize, max_size: usize, seed: u32) -> usize {\n\n let test_input = ::std::panic::catch_unwind(|| unsafe {\n\n let data_slice = ::std::slice::from_raw_parts_mut(data, size);\n\n\n\n rust_fuzzer_mutate_input( data_slice, max_size, seed);\n\n });\n\n\n\n if test_input.err().is_some() {\n\n // hopefully the custom panic hook will be called before and abort the\n\n // process before the stack frames are unwinded.\n\n ::std::process::abort();\n\n }\n\n 0\n\n}\n\n*/\n\n\n\n\n", "file_path": "custom-libfuzzer/src/lib.rs", "rank": 71, "score": 295099.2399448393 }, { "content": "fn fuzz_main(data: &[u8]) {\n\n\tunsafe {\n\n\t\tif (FIRST_TIME) {\n\n\n\n\t\t\tWRITE_TO = get_absolute_path_string(format!(\"fifos/{}\", rand::thread_rng().next_u64().to_string()));\n\n\t\t\tREAD_FROM = get_absolute_path_string(format!(\"fifos/{}\", rand::thread_rng().next_u64().to_string()));\n\n\n\n\t\t\tlibc::mkfifo(CString::new(WRITE_TO.clone()).unwrap().as_ptr(), 0o644);\n\n\t\t\tlibc::mkfifo(CString::new(READ_FROM.clone()).unwrap().as_ptr(), 0o644);\n\n\n\n\t\t\tCommand::new(get_absolute_path_string(\"geth/src/github.com/ethereum/go-ethereum/build/bin/evm\".into()))\n\n\t\t\t\t.arg(WRITE_TO.as_str())\n\n\t\t\t\t.arg(READ_FROM.as_str())\n\n\t\t\t\t.spawn() \n\n\t\t\t\t.unwrap();\n\n\n\n\t\t\tFIRST_TIME = false;\n\n\t\t}\n\n\n\n\t}\n", "file_path": "openethereum/evmfuzz/fuzz/fuzz_targets/fuzz_target_1.rs", "rank": 72, "score": 292895.2507993722 }, { "content": "#[derive(Debug)]\n\nstruct DiskMap<K: hash::Hash + Eq, V> {\n\n\tpath: PathBuf,\n\n\tcache: HashMap<K, V>,\n\n\ttransient: bool,\n\n}\n\n\n\nimpl<K: hash::Hash + Eq, V> ops::Deref for DiskMap<K, V> {\n\n\ttype Target = HashMap<K, V>;\n\n\tfn deref(&self) -> &Self::Target {\n\n\t\t&self.cache\n\n\t}\n\n}\n\n\n\nimpl<K: hash::Hash + Eq, V> ops::DerefMut for DiskMap<K, V> {\n\n\tfn deref_mut(&mut self) -> &mut Self::Target {\n\n\t\t&mut self.cache\n\n\t}\n\n}\n\n\n\nimpl<K: hash::Hash + Eq, V> DiskMap<K, V> {\n", "file_path": "openethereum/accounts/src/stores.rs", "rank": 73, "score": 292841.1305608586 }, { "content": "/// Starts the parity client.\n\n///\n\n/// `on_client_rq` is the action to perform when the client receives an RPC request to be restarted\n\n/// with a different chain.\n\n///\n\n/// `on_updater_rq` is the action to perform when the updater has a new binary to execute.\n\n///\n\n/// The first parameter is the command line arguments that you would pass when running the parity\n\n/// binary.\n\n///\n\n/// On error, returns what to print on stderr.\n\n// FIXME: totally independent logging capability, see https://github.com/openethereum/openethereum/issues/10252\n\npub fn start<Cr, Rr>(\n\n\tconf: Configuration,\n\n\tlogger: Arc<RotatingLogger>,\n\n\ton_client_rq: Cr,\n\n\ton_updater_rq: Rr\n\n) -> Result<ExecutionAction, String>\n\n\twhere\n\n\t\tCr: Fn(String) + 'static + Send,\n\n\t\tRr: Fn() + 'static + Send\n\n{\n\n\tlet deprecated = find_deprecated(&conf.args);\n\n\tfor d in deprecated {\n\n\t\tprintln!(\"{}\", d);\n\n\t}\n\n\n\n\texecute(conf.into_command()?, logger, on_client_rq, on_updater_rq)\n\n}\n", "file_path": "openethereum/parity/lib.rs", "rank": 74, "score": 291457.19328657247 }, { "content": "/// Validates and formats bootnodes option.\n\npub fn to_bootnodes(bootnodes: &Option<String>) -> Result<Vec<String>, String> {\n\n\tmatch *bootnodes {\n\n\t\tSome(ref x) if !x.is_empty() => x.split(',').map(|s| {\n\n\t\t\tmatch validate_node_url(s).map(Into::into) {\n\n\t\t\t\tNone => Ok(s.to_owned()),\n\n\t\t\t\tSome(sync::Error::AddressResolve(_)) => Err(format!(\"Failed to resolve hostname of a boot node: {}\", s)),\n\n\t\t\t\tSome(_) => Err(format!(\"Invalid node address format given for a boot node: {}\", s)),\n\n\t\t\t}\n\n\t\t}).collect(),\n\n\t\tSome(_) => Ok(vec![]),\n\n\t\tNone => Ok(vec![])\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 75, "score": 288352.44042478694 }, { "content": "/// Provide raw information on the package.\n\npub fn raw_package_info() -> (&'static str, &'static str, &'static str) {\n\n\t(THIS_TRACK, env![\"CARGO_PKG_VERSION\"], env![\"VERGEN_SHA\"])\n\n}\n", "file_path": "openethereum/util/version/src/lib.rs", "rank": 76, "score": 287312.2166258146 }, { "content": "// helper for making a packet out of `Requests`.\n\nfn make_packet(req_id: usize, requests: &NetworkRequests) -> Vec<u8> {\n\n\tlet mut stream = RlpStream::new_list(2);\n\n\tstream.append(&req_id).append_list(&requests.requests());\n\n\tstream.out()\n\n}\n\n\n\n// expected result from a call.\n", "file_path": "openethereum/ethcore/light/src/net/tests/mod.rs", "rank": 77, "score": 287092.4848002197 }, { "content": "/// Creates a new temporary `BlockChainDB` on FS\n\npub fn new_temp_db(tempdir: &Path) -> Arc<dyn BlockChainDB> {\n\n\tlet blooms_dir = TempDir::new().unwrap();\n\n\tlet trace_blooms_dir = TempDir::new().unwrap();\n\n\tlet key_value_dir = tempdir.join(\"key_value\");\n\n\n\n\tlet db_config = DatabaseConfig::with_columns(::db::NUM_COLUMNS);\n\n\tlet key_value_db = Database::open(&db_config, key_value_dir.to_str().unwrap()).unwrap();\n\n\n\n\tlet db = TestBlockChainDB {\n\n\t\tblooms: blooms_db::Database::open(blooms_dir.path()).unwrap(),\n\n\t\ttrace_blooms: blooms_db::Database::open(trace_blooms_dir.path()).unwrap(),\n\n\t\t_blooms_dir: blooms_dir,\n\n\t\t_trace_blooms_dir: trace_blooms_dir,\n\n\t\tkey_value: Arc::new(key_value_db)\n\n\t};\n\n\n\n\tArc::new(db)\n\n}\n\n\n", "file_path": "openethereum/ethcore/src/test_helpers/mod.rs", "rank": 78, "score": 286905.02712785214 }, { "content": "/// Create a factory for building snapshot chunks and restoring from them.\n\n/// `None` indicates that the engine doesn't support snapshot creation.\n\npub fn chunker(snapshot_type: Snapshotting) -> Option<Box<dyn SnapshotComponents>> {\n\n\tmatch snapshot_type {\n\n\t\tPoA => Some(Box::new(PoaSnapshot)),\n\n\t\tPoW { blocks, max_restore_blocks } => Some(Box::new(PowSnapshot::new(blocks, max_restore_blocks))),\n\n\t\tUnsupported => None,\n\n\t}\n\n}\n", "file_path": "openethereum/ethcore/snapshot/src/consensus/mod.rs", "rank": 79, "score": 286328.7676292999 }, { "content": "fn kill_color(s: &str) -> String {\n\n\tlazy_static! {\n\n\t\tstatic ref RE: Regex = Regex::new(\"\\x1b\\\\[[^m]+m\").unwrap();\n\n\t}\n\n\tRE.replace_all(s, \"\").to_string()\n\n}\n\n\n", "file_path": "openethereum/parity/logger/src/lib.rs", "rank": 80, "score": 286012.18194883247 }, { "content": "#[cfg(not(feature = \"accounts\"))]\n\npub fn execute(_cmd: AccountCmd) -> Result<String, String> {\n\n\t\tErr(\"Account management is deprecated. Please see #9997 for alternatives:\\nhttps://github.com/openethereum/openethereum/issues/9997\".into())\n\n}\n\n\n\n#[cfg(feature = \"accounts\")]\n\nmod command {\n\n\tuse super::*;\n\n\tuse std::path::PathBuf;\n\n\tuse accounts::{AccountProvider, AccountProviderSettings};\n\n\tuse ethstore::{EthStore, SecretStore, SecretVaultRef, import_account, import_accounts, read_geth_accounts};\n\n\tuse ethstore::accounts_dir::RootDiskDirectory;\n\n\tuse helpers::{password_prompt, password_from_file};\n\n\n\n\tpub fn execute(cmd: AccountCmd) -> Result<String, String> {\n\n\t\tmatch cmd {\n\n\t\t\tAccountCmd::New(new_cmd) => new(new_cmd),\n\n\t\t\tAccountCmd::List(list_cmd) => list(list_cmd),\n\n\t\t\tAccountCmd::Import(import_cmd) => import(import_cmd),\n\n\t\t\tAccountCmd::ImportFromGeth(import_geth_cmd) => import_geth(import_geth_cmd)\n\n\t\t}\n", "file_path": "openethereum/parity/account.rs", "rank": 81, "score": 285706.8967518617 }, { "content": "/// Read a password from password file.\n\npub fn password_from_file(path: String) -> Result<Password, String> {\n\n\tlet passwords = passwords_from_files(&[path])?;\n\n\t// use only first password from the file\n\n\tpasswords.get(0).map(Password::clone)\n\n\t\t.ok_or_else(|| \"Password file seems to be empty.\".to_owned())\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 82, "score": 285706.89675186167 }, { "content": "pub fn execute(cmd: ImportWallet) -> Result<String, String> {\n\n\tlet password = match cmd.password_file.clone() {\n\n\t\tSome(file) => password_from_file(file)?,\n\n\t\tNone => password_prompt()?,\n\n\t};\n\n\n\n\tlet wallet = PresaleWallet::open(cmd.wallet_path.clone()).map_err(|_| \"Unable to open presale wallet.\")?;\n\n\tlet kp = wallet.decrypt(&password).map_err(|_| \"Invalid password.\")?;\n\n\tlet address = kp.address();\n\n\timport_account(&cmd, kp, password);\n\n\tOk(format!(\"{:?}\", address))\n\n}\n\n\n", "file_path": "openethereum/parity/presale.rs", "rank": 83, "score": 285706.89675186167 }, { "content": "pub fn generate_token_and_url(ws_conf: &rpc::WsConfiguration, logger_config: &LogConfig) -> Result<NewToken, String> {\n\n\tlet code = generate_new_token(&ws_conf.signer_path, logger_config.color).map_err(|err| format!(\"Error generating token: {:?}\", err))?;\n\n\tlet colored = |s: String| match logger_config.color {\n\n\t\ttrue => format!(\"{}\", White.bold().paint(s)),\n\n\t\tfalse => s,\n\n\t};\n\n\n\n\tOk(NewToken {\n\n\t\ttoken: code.clone(),\n\n\t\tmessage: format!(\n\n\t\t\tr#\"\n\nGenerated token:\n\n{}\n\n\"#,\n\n\t\t\tcolored(code)\n\n\t\t),\n\n\t})\n\n}\n\n\n", "file_path": "openethereum/parity/signer.rs", "rank": 84, "score": 284965.4365889256 }, { "content": "fn default_path(t: AppDataType) -> Option<PathBuf> {\n\n\tlet app_info = AppInfo { name: PRODUCT, author: AUTHOR };\n\n\tlet old_root = get_app_root(t, &app_info).ok()?;\n\n\tif old_root.exists() {\n\n\t\treturn Some(old_root);\n\n\t}\n\n\n\n\tlet mut root = data_root(t).ok()?;\n\n\troot.push(if LOWERCASE { \"openethereum\" } else { \"OpenEthereum\" });\n\n\tSome(root)\n\n}\n\n\n", "file_path": "openethereum/util/dir/src/lib.rs", "rank": 85, "score": 284467.9400509996 }, { "content": "fn format_vaults(vaults: &[String]) -> String {\n\n\tvaults.join(\"\\n\")\n\n}\n\n\n", "file_path": "openethereum/accounts/ethstore/cli/src/main.rs", "rank": 86, "score": 284309.6458216437 }, { "content": "/// Execute this snapshot command.\n\npub fn execute(cmd: SnapshotCommand) -> Result<String, String> {\n\n\tmatch cmd.kind {\n\n\t\tKind::Take => cmd.take_snapshot()?,\n\n\t\tKind::Restore => cmd.restore()?,\n\n\t}\n\n\n\n\tOk(String::new())\n\n}\n", "file_path": "openethereum/parity/snapshot_cmd.rs", "rank": 87, "score": 283404.4799878573 }, { "content": "fn run(args: &[&str]) -> String {\n\n\tlet output = Command::new(\"cargo\")\n\n\t\t.args(&[\"run\", \"--\"])\n\n\t\t.args(args)\n\n\t\t.output()\n\n\t\t.unwrap();\n\n\tassert!(output.status.success());\n\n\tString::from_utf8(output.stdout).unwrap()\n\n}\n\n\n", "file_path": "openethereum/accounts/ethstore/cli/tests/cli.rs", "rank": 88, "score": 283362.77088565764 }, { "content": "fn check_hex(string: &str) -> Result<()> {\n\n\tif string.len() >= 2 && &string[..2] == \"0x\" {\n\n\t\treturn Ok(())\n\n\t}\n\n\n\n\treturn Err(ErrorKind::HexParseError(\n\n\t\tformat!(\"Expected a 0x-prefixed string of even length, found {} length string\", string.len()))\n\n\t)?\n\n}\n", "file_path": "openethereum/util/EIP-712/src/encode.rs", "rank": 89, "score": 283362.77088565764 }, { "content": "#[doc(hidden)]\n\npub fn write_and_flush(s: String) {\n\n\tif let Err(err) = std::io::Write::write_all(&mut std::io::stdout(), s.as_bytes()) {\n\n\t\terror!(target: \"json_tests\", \"io::Write::write_all to stdout failed because of: {:?}\", err);\n\n\t}\n\n\tif let Err(err) = std::io::Write::flush(&mut std::io::stdout()) {\n\n\t\terror!(target: \"json_tests\", \"io::Write::flush stdout failed because of: {:?}\", err);\n\n\t}\n\n}\n", "file_path": "openethereum/ethcore/src/json_tests/macros.rs", "rank": 90, "score": 283236.36139839736 }, { "content": "pub fn to_address(s: Option<String>) -> Result<Address, String> {\n\n\tmatch s {\n\n\t\tSome(ref a) => clean_0x(a).parse().map_err(|_| format!(\"Invalid address: {:?}\", a)),\n\n\t\tNone => Ok(Address::zero())\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 91, "score": 282410.69919174403 }, { "content": "fn validate_phrase(phrase: &str) -> String {\n\n\tmatch Brain::validate_phrase(phrase, BRAIN_WORDS) {\n\n\t\tOk(()) => format!(\"The recovery phrase looks correct.\\n\"),\n\n\t\tErr(err) => format!(\"The recover phrase was not generated by Parity: {}\", err)\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/accounts/ethkey/cli/src/main.rs", "rank": 92, "score": 280780.5731726105 }, { "content": "fn print_hash_of(maybe_file: Option<String>) -> Result<String, String> {\n\n\tif let Some(file) = maybe_file {\n\n\t\tlet mut f = BufReader::new(File::open(&file).map_err(|_| \"Unable to open file\".to_owned())?);\n\n\t\tlet hash = keccak_buffer(&mut f).map_err(|_| \"Unable to read from file\".to_owned())?;\n\n\t\tOk(format!(\"{:x}\", hash))\n\n\t} else {\n\n\t\tErr(\"Streaming from standard input not yet supported. Specify a file.\".to_owned())\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/parity/lib.rs", "rank": 93, "score": 280319.91777965974 }, { "content": "pub fn filter_block_not_found(id: BlockId) -> Error {\n\n\tError {\n\n\t\tcode: ErrorCode::ServerError(codes::UNSUPPORTED_REQUEST), // Specified in EIP-234.\n\n\t\tmessage: \"One of the blocks specified in filter (fromBlock, toBlock or blockHash) cannot be found\".into(),\n\n\t\tdata: Some(Value::String(match id {\n\n\t\t\tBlockId::Hash(hash) => format!(\"0x{:x}\", hash),\n\n\t\t\tBlockId::Number(number) => format!(\"0x{:x}\", number),\n\n\t\t\tBlockId::Earliest => \"earliest\".to_string(),\n\n\t\t\tBlockId::Latest => \"latest\".to_string(),\n\n\t\t})),\n\n\t}\n\n}\n\n\n", "file_path": "openethereum/rpc/src/v1/helpers/errors.rs", "rank": 94, "score": 279230.89016104 }, { "content": "pub fn open_database(client_path: &str, config: &DatabaseConfig) -> io::Result<Arc<dyn BlockChainDB>> {\n\n\tlet path = Path::new(client_path);\n\n\n\n\tlet blooms_path = path.join(\"blooms\");\n\n\tlet trace_blooms_path = path.join(\"trace_blooms\");\n\n\tfs::create_dir_all(&blooms_path)?;\n\n\tfs::create_dir_all(&trace_blooms_path)?;\n\n\n\n\tlet db = AppDB {\n\n\t\tkey_value: Arc::new(Database::open(&config, client_path)?),\n\n\t\tblooms: blooms_db::Database::open(blooms_path)?,\n\n\t\ttrace_blooms: blooms_db::Database::open(trace_blooms_path)?,\n\n\t};\n\n\n\n\tOk(Arc::new(db))\n\n}\n", "file_path": "openethereum/parity/db/rocksdb/mod.rs", "rank": 95, "score": 279225.67294324195 }, { "content": "pub fn execute(cmd: ExportHsyncCmd) -> Result<String, String> {\n\n\tuse light::client as light_client;\n\n\tuse parking_lot::Mutex;\n\n\n\n\t// load spec\n\n\tlet spec = cmd.spec.spec(SpecParams::new(cmd.dirs.cache.as_ref(), OptimizeFor::Memory))?;\n\n\n\n\t// load genesis hash\n\n\tlet genesis_hash = spec.genesis_header().hash();\n\n\n\n\t// database paths\n\n\tlet db_dirs = cmd.dirs.database(genesis_hash, cmd.spec.legacy_fork_name(), spec.data_dir.clone());\n\n\n\n\t// user defaults path\n\n\tlet user_defaults_path = db_dirs.user_defaults_path();\n\n\n\n\t// load user defaults\n\n\tlet user_defaults = UserDefaults::load(&user_defaults_path)?;\n\n\n\n\t// select pruning algorithm\n", "file_path": "openethereum/parity/export_hardcoded_sync.rs", "rank": 96, "score": 278950.91625418235 }, { "content": "/// Open a new light client DB.\n\npub fn open_db_light(\n\n\tclient_path: &str,\n\n\tcache_config: &CacheConfig,\n\n\tcompaction: &DatabaseCompactionProfile\n\n) -> io::Result<Arc<dyn BlockChainDB>> {\n\n\tlet path = Path::new(client_path);\n\n\n\n\tlet db_config = DatabaseConfig {\n\n\t\tmemory_budget: helpers::memory_per_column_light(cache_config.blockchain() as usize),\n\n\t\tcompaction: helpers::compaction_profile(&compaction, path),\n\n\t\t.. DatabaseConfig::with_columns(NUM_COLUMNS)\n\n\t};\n\n\n\n\topen_database(client_path, &db_config)\n\n}\n\n\n", "file_path": "openethereum/parity/db/rocksdb/mod.rs", "rank": 97, "score": 278153.4109399085 }, { "content": "/// Reads passwords from files. Treats each line as a separate password.\n\npub fn passwords_from_files(files: &[String]) -> Result<Vec<Password>, String> {\n\n\tlet passwords = files.iter().map(|filename| {\n\n\t\tlet file = File::open(filename).map_err(|_| format!(\"{} Unable to read password file. Ensure it exists and permissions are correct.\", filename))?;\n\n\t\tlet reader = BufReader::new(&file);\n\n\t\tlet lines = reader.lines()\n\n\t\t\t.filter_map(|l| l.ok())\n\n\t\t\t.map(|pwd| pwd.trim().to_owned().into())\n\n\t\t\t.collect::<Vec<Password>>();\n\n\t\tOk(lines)\n\n\t}).collect::<Result<Vec<Vec<Password>>, String>>();\n\n\tOk(passwords?.into_iter().flat_map(|x| x).collect())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse std::time::Duration;\n\n\tuse std::fs::File;\n\n\tuse std::io::Write;\n\n\tuse std::collections::HashSet;\n\n\tuse tempfile::TempDir;\n", "file_path": "openethereum/parity/helpers.rs", "rank": 98, "score": 277753.0424934847 }, { "content": "/// Prompts user asking for password.\n\npub fn password_prompt() -> Result<Password, String> {\n\n\tuse rpassword::read_password;\n\n\tconst STDIN_ERROR: &'static str = \"Unable to ask for password on non-interactive terminal.\";\n\n\n\n\tprintln!(\"Please note that password is NOT RECOVERABLE.\");\n\n\tprint!(\"Type password: \");\n\n\tflush_stdout();\n\n\n\n\tlet password = read_password().map_err(|_| STDIN_ERROR.to_owned())?.into();\n\n\n\n\tprint!(\"Repeat password: \");\n\n\tflush_stdout();\n\n\n\n\tlet password_repeat = read_password().map_err(|_| STDIN_ERROR.to_owned())?.into();\n\n\n\n\tif password != password_repeat {\n\n\t\treturn Err(\"Passwords do not match!\".into());\n\n\t}\n\n\n\n\tOk(password)\n\n}\n\n\n", "file_path": "openethereum/parity/helpers.rs", "rank": 99, "score": 276598.7602492584 } ]
Rust
src/refmanager.rs
WilsonGramer/ref_thread_local.rs
52cd3d9641d1b776ebd1586bdc914dd889a9f9de
extern crate std; use super::RefThreadLocal; use std::cell::Cell; use std::fmt::{Debug, Display, Formatter}; use std::ops::{Deref, DerefMut}; use std::ptr::{null, null_mut}; use std::thread::LocalKey; struct RefManagerInnerData<T> { borrow_count: Cell<isize>, value: T, } pub struct RefManagerPeekData<T> { ptr_inner_data: *mut RefManagerInnerData<T>, ptr_borrow_count: *const Cell<isize>, ptr_value: *mut T, } impl<T> Clone for RefManagerPeekData<T> { fn clone(&self) -> Self { return Self { ptr_inner_data: self.ptr_inner_data, ptr_borrow_count: self.ptr_borrow_count, ptr_value: self.ptr_value, }; } } impl<T> Copy for RefManagerPeekData<T> {} pub struct RefManagerDataGuard<T> { peek_data: Cell<RefManagerPeekData<T>>, } pub struct Ref<'a, T: ?Sized + 'a> { borrow_count: &'a Cell<isize>, value: &'a T, } pub struct RefMut<'a, T: ?Sized + 'a> { borrow_count: &'a Cell<isize>, value: &'a mut T, } #[derive(Debug)] pub struct RefManager<T: 'static> { local_key: &'static LocalKey<RefManagerDataGuard<T>>, init_func: fn() -> T, } #[derive(Debug)] pub struct BorrowError { _private: (), } #[derive(Debug)] pub struct BorrowMutError { _private: (), } #[macro_export] #[doc(hidden)] macro_rules! _create_refmanager_data { ($NAME:ident, $T:ty) => { thread_local! { static $NAME: $crate::RefManagerDataGuard<$T> = $crate::RefManagerDataGuard::INIT_SELF; } }; } impl<T> RefManager<T> { pub fn new(local_key: &'static LocalKey<RefManagerDataGuard<T>>, init_func: fn() -> T) -> Self { RefManager { local_key, init_func, } } fn get_initialized_peek(&self) -> RefManagerPeekData<T> { self.local_key.with(|guard| { if guard.peek_data.get().ptr_inner_data.is_null() { self.initialize().expect("failed to initialize"); } guard.peek_data.get() }) } } impl<T> RefThreadLocal<T> for RefManager<T> { fn initialize(&self) -> Result<(), ()> { self.local_key.with(|guard| { if guard.peek_data.get().ptr_inner_data.is_null() { let mut box_inner_data = Box::new(RefManagerInnerData { borrow_count: Cell::new(0), value: (self.init_func)(), }); let ptr_borrow_count = &box_inner_data.borrow_count as *const Cell<isize>; let ptr_value = &mut box_inner_data.value as *mut T; let ptr_inner_data = Box::into_raw(box_inner_data); guard.peek_data.set(RefManagerPeekData { ptr_inner_data, ptr_borrow_count, ptr_value, }); Ok(()) } else { Err(()) } }) } fn destroy(&self) -> Result<(), ()> { self.local_key.with(|guard| guard.destroy()) } fn is_initialized(&self) -> bool { self.local_key .with(|guard| !guard.peek_data.get().ptr_inner_data.is_null()) } fn borrow<'a>(&self) -> Ref<'a, T> { self.try_borrow().expect("already mutably borrowed") } fn borrow_mut<'a>(&self) -> RefMut<'a, T> { self.try_borrow_mut().expect("already borrowed") } fn try_borrow<'a>(&self) -> Result<Ref<'a, T>, BorrowError> { let peek_data = self.get_initialized_peek(); let (ptr_borrow_count, ptr_value) = (peek_data.ptr_borrow_count, peek_data.ptr_value); let cell_borrow_count = unsafe { ptr_borrow_count.as_ref() }.unwrap(); let borrow_count = cell_borrow_count.get(); if borrow_count < 0 { return Err(BorrowError { _private: () }); } cell_borrow_count.set(borrow_count + 1); Ok(Ref { borrow_count: cell_borrow_count, value: unsafe { ptr_value.as_ref() }.unwrap(), }) } fn try_borrow_mut<'a>(&self) -> Result<RefMut<'a, T>, BorrowMutError> { let peek_data = self.get_initialized_peek(); let (ptr_borrow_count, ptr_value) = (peek_data.ptr_borrow_count, peek_data.ptr_value); let cell_borrow_count = unsafe { ptr_borrow_count.as_ref() }.unwrap(); let borrow_count = cell_borrow_count.get(); if borrow_count != 0 { return Err(BorrowMutError { _private: () }); } cell_borrow_count.set(-1); Ok(RefMut { borrow_count: cell_borrow_count, value: unsafe { ptr_value.as_mut() }.unwrap(), }) } } impl<'a, T: ?Sized> Drop for Ref<'a, T> { fn drop(&mut self) { self.borrow_count.set(self.borrow_count.get() - 1); } } impl<'a, T: ?Sized> Deref for Ref<'a, T> { type Target = T; fn deref(&self) -> &T { self.value } } impl<'a, T: ?Sized> Ref<'a, T> { pub fn map<U, F>(orig: Ref<'a, T>, f: F) -> Ref<'a, U> where F: FnOnce(&T) -> &U, { let borrow_count = orig.borrow_count; let value = orig.value; std::mem::forget(orig); Ref { borrow_count: borrow_count, value: f(value), } } pub fn map_split<U: ?Sized, V: ?Sized, F>(orig: Ref<'a, T>, f: F) -> (Ref<'a, U>, Ref<'a, V>) where F: FnOnce(&T) -> (&U, &V), { let borrow_count = orig.borrow_count; let value = orig.value; std::mem::forget(orig); let (a, b) = f(value); borrow_count.set(borrow_count.get() + 1); ( Ref { borrow_count: borrow_count, value: a, }, Ref { borrow_count: borrow_count, value: b, }, ) } } impl<'a, T: Debug> Debug for Ref<'a, T> { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { Debug::fmt(&**self, f) } } impl<'a, T: Display> Display for Ref<'a, T> { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { self.value.fmt(f) } } impl<'a, T: ?Sized> Drop for RefMut<'a, T> { fn drop(&mut self) { self.borrow_count.set(self.borrow_count.get() + 1); } } impl<'a, T: ?Sized> Deref for RefMut<'a, T> { type Target = T; fn deref(&self) -> &T { self.value } } impl<'a, T: ?Sized> DerefMut for RefMut<'a, T> { fn deref_mut(&mut self) -> &mut T { self.value } } impl<'a, T: Debug> Debug for RefMut<'a, T> { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { Debug::fmt(&**self, f) } } impl<'a, T: Display> Display for RefMut<'a, T> { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { self.value.fmt(f) } } impl<'a, T: ?Sized> RefMut<'a, T> { pub fn map<U, F>(orig: RefMut<'a, T>, f: F) -> RefMut<'a, U> where F: FnOnce(&mut T) -> &mut U, { let borrow_count = orig.borrow_count; let value = orig.value as *mut T; std::mem::forget(orig); RefMut { borrow_count: borrow_count, value: f(unsafe { value.as_mut().unwrap() }), } } pub fn map_split<U: ?Sized, V: ?Sized, F>(orig: RefMut<'a, T>, f: F) -> (RefMut<'a, U>, RefMut<'a, V>) where F: FnOnce(&mut T) -> (&mut U, &mut V), { let borrow_count = orig.borrow_count; let value = orig.value as *mut T; std::mem::forget(orig); let (a, b) = f(unsafe { value.as_mut().unwrap() }); borrow_count.set(borrow_count.get() - 1); ( RefMut { borrow_count: borrow_count, value: a, }, RefMut { borrow_count: borrow_count, value: b, }, ) } } impl<T> RefManagerDataGuard<T> { pub const INIT_PEEK_DATA: RefManagerPeekData<T> = RefManagerPeekData { ptr_inner_data: null_mut(), ptr_borrow_count: null(), ptr_value: null_mut(), }; pub const INIT_SELF: Self = RefManagerDataGuard { peek_data: Cell::new(Self::INIT_PEEK_DATA), }; pub fn destroy(&self) -> Result<(), ()> { let peek_data = self.peek_data.get(); let (ptr_inner_data, ptr_borrow_count) = (peek_data.ptr_inner_data, peek_data.ptr_borrow_count); if ptr_inner_data.is_null() { Err(()) } else { let borrow_count = unsafe { ptr_borrow_count.as_ref() }.unwrap().get(); if borrow_count != 0 { panic!("cannot destroy before all references are dropped"); } unsafe { Box::from_raw(ptr_inner_data) }; self.peek_data.set(Self::INIT_PEEK_DATA); Ok(()) } } } impl<T> Drop for RefManagerDataGuard<T> { fn drop(&mut self) { let _ = self.destroy(); } }
extern crate std; use super::RefThreadLocal; use std::cell::Cell; use std::fmt::{Debug, Display, Formatter}; use std::ops::{Deref, DerefMut}; use std::ptr::{null, null_mut}; use std::thread::LocalKey; struct RefManagerInnerData<T> { borrow_count: Cell<isize>, value: T, } pub struct RefManagerPeekData<T> { ptr_inner_data: *mut RefManagerInnerData<T>, ptr_borrow_count: *const Cell<isize>, ptr_value: *mut T, } impl<T> Clone for RefManagerPeekData<T> { fn clone(&self) -> Self { return Self { ptr_inner_data: self.ptr_inner_data, ptr_borrow_count: self.ptr_borrow_count, ptr_value: self.ptr_value, }; } } impl<T> Copy for RefManagerPeekData<T> {} pub struct RefManagerDataGuard<T> { peek_data: Cell<RefManagerPeekData<T>>, } pub struct Ref<'a, T: ?Sized + 'a> { borrow_count: &'a Cell<isize>, value: &'a T, } pub struct RefMut<'a, T: ?Sized + 'a> { borrow_count: &'a Cell<isize>, value: &'a mut T, } #[derive(Debug)] pub struct RefManager<T: 'static> { local_key: &'static LocalKey<RefManagerDataGuard<T>>, init_func: fn() -> T, } #[derive(Debug)] pub struct BorrowError { _private: (), } #[derive(Debug)] pub struct BorrowMutError { _private: (), } #[macro_export] #[doc(hidden)] macro_rules! _create_refmanager_data { ($NAME:ident, $T:ty) => { thread_local! { static $NAME: $crate::RefManagerDataGuard<$T> = $crate::RefManagerDataGuard::INIT_SELF; } }; } impl<T> RefManager<T> { pub fn new(local_key: &'static LocalKey<RefManagerDataGuard<T>>, init_func: fn() -> T) -> Self { RefManager { local_key, init_func, } } fn get_initialized_peek(&self) -> RefManagerPeekData<T> { self.local_key.with(|guard| { if guard.peek_data.get().ptr_inner_data.is_null() { self.initialize().expect("failed to initialize"); } guard.peek_data.get() }) } } impl<T> RefThreadLocal<T> for RefManager<T> { fn initialize(&self) -> Result<(), ()> { self.local_key.with(|guard| { if guard.peek_data.get().ptr_inner_data.is_null() { let mut box_inner_data = Box::new(RefManagerInnerData { borrow_count: Cell::new(0), value: (self.init_func)(), }); let ptr_borrow_count = &box_inner_data.borrow_count as *const Cell<isize>; let ptr_value = &mut box_inner_data.value as *mut T; let ptr_inner_data = Box::into_raw(box_inner_data); guard.peek_data.set(RefManagerPeekData { ptr_inner_data, ptr_borrow_count, ptr_value, }); Ok(()) } else { Err(()) } }) } fn destroy(&self) -> Result<(), ()> { self.local_key.with(|guard| guard.destroy()) } fn is_initialized(&self) -> bool { self.local_key .with(|guard| !guard.peek_data.get().ptr_inner_data.is_null()) } fn borrow<'a>(&self) -> Ref<'a, T> { self.try_borrow().expect("already mutably borrowed") } fn borrow_mut<'a>(&self) -> RefMut<'a, T> { self.try_borrow_mut().expect("already borrowed") } fn try_borrow<'a>(&self) -> Result<Ref<'a, T>, BorrowError> { let peek_data = self.get_initialized_peek(); let (ptr_borrow_count, ptr_value) = (peek_data.ptr_borrow_count, peek_data.ptr_value); let cell_borrow_count = unsafe { ptr_borrow_count.as_ref() }.unwrap(); let borrow_count = cell_borrow_count.get(); if borrow_count < 0 { return Err(BorrowError { _private: () }); } cell_borrow_count.set(borrow_count + 1); Ok(Ref { borrow_count: cell_borrow_count, value: unsafe { ptr_value.as_ref() }.unwrap(), }) } fn try_borrow_mut<'a>(&self) -> Result<RefMut<'a, T>, BorrowMutError> { let peek_data = self.get_initialized_peek(); let (ptr_borrow_count, ptr_value) = (peek_data.ptr_borrow_count, peek_data.ptr_value); let cell_borrow_count = unsafe { ptr_borrow_count.as_ref() }.unwrap(); let borrow_count = cell_borrow_count.get(); if borrow_count != 0 { return Err(BorrowMutError { _private: () }); } cell_borrow_count.set(-1); Ok(RefMut { borrow_count: cell_borrow_count, value: unsafe { ptr_value.as_mut() }.unwrap(), }) } } impl<'a, T: ?Sized> Drop for Ref<'a, T> { fn drop(&mut self) { self.borrow_count.set(self.borrow_count.get() - 1); } } impl<'a, T: ?Sized> Deref for Ref<'a, T> { type Target = T; fn deref(&self) -> &T { self.value } } impl<'a, T: ?Sized> Ref<'a, T> { pub fn map<U, F>(orig: Ref<'a, T>, f: F) -> Ref<'a, U> where F: FnOnce(&T) -> &U, { let borrow_count = orig.borrow_count; let value = orig.value; std::mem::forget(orig); Ref { borrow_count: borrow_count, value: f(value), } }
} impl<'a, T: Debug> Debug for Ref<'a, T> { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { Debug::fmt(&**self, f) } } impl<'a, T: Display> Display for Ref<'a, T> { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { self.value.fmt(f) } } impl<'a, T: ?Sized> Drop for RefMut<'a, T> { fn drop(&mut self) { self.borrow_count.set(self.borrow_count.get() + 1); } } impl<'a, T: ?Sized> Deref for RefMut<'a, T> { type Target = T; fn deref(&self) -> &T { self.value } } impl<'a, T: ?Sized> DerefMut for RefMut<'a, T> { fn deref_mut(&mut self) -> &mut T { self.value } } impl<'a, T: Debug> Debug for RefMut<'a, T> { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { Debug::fmt(&**self, f) } } impl<'a, T: Display> Display for RefMut<'a, T> { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { self.value.fmt(f) } } impl<'a, T: ?Sized> RefMut<'a, T> { pub fn map<U, F>(orig: RefMut<'a, T>, f: F) -> RefMut<'a, U> where F: FnOnce(&mut T) -> &mut U, { let borrow_count = orig.borrow_count; let value = orig.value as *mut T; std::mem::forget(orig); RefMut { borrow_count: borrow_count, value: f(unsafe { value.as_mut().unwrap() }), } } pub fn map_split<U: ?Sized, V: ?Sized, F>(orig: RefMut<'a, T>, f: F) -> (RefMut<'a, U>, RefMut<'a, V>) where F: FnOnce(&mut T) -> (&mut U, &mut V), { let borrow_count = orig.borrow_count; let value = orig.value as *mut T; std::mem::forget(orig); let (a, b) = f(unsafe { value.as_mut().unwrap() }); borrow_count.set(borrow_count.get() - 1); ( RefMut { borrow_count: borrow_count, value: a, }, RefMut { borrow_count: borrow_count, value: b, }, ) } } impl<T> RefManagerDataGuard<T> { pub const INIT_PEEK_DATA: RefManagerPeekData<T> = RefManagerPeekData { ptr_inner_data: null_mut(), ptr_borrow_count: null(), ptr_value: null_mut(), }; pub const INIT_SELF: Self = RefManagerDataGuard { peek_data: Cell::new(Self::INIT_PEEK_DATA), }; pub fn destroy(&self) -> Result<(), ()> { let peek_data = self.peek_data.get(); let (ptr_inner_data, ptr_borrow_count) = (peek_data.ptr_inner_data, peek_data.ptr_borrow_count); if ptr_inner_data.is_null() { Err(()) } else { let borrow_count = unsafe { ptr_borrow_count.as_ref() }.unwrap().get(); if borrow_count != 0 { panic!("cannot destroy before all references are dropped"); } unsafe { Box::from_raw(ptr_inner_data) }; self.peek_data.set(Self::INIT_PEEK_DATA); Ok(()) } } } impl<T> Drop for RefManagerDataGuard<T> { fn drop(&mut self) { let _ = self.destroy(); } }
pub fn map_split<U: ?Sized, V: ?Sized, F>(orig: Ref<'a, T>, f: F) -> (Ref<'a, U>, Ref<'a, V>) where F: FnOnce(&T) -> (&U, &V), { let borrow_count = orig.borrow_count; let value = orig.value; std::mem::forget(orig); let (a, b) = f(value); borrow_count.set(borrow_count.get() + 1); ( Ref { borrow_count: borrow_count, value: a, }, Ref { borrow_count: borrow_count, value: b, }, ) }
function_block-full_function
[ { "content": "fn __static_ref_initialize() -> X {\n\n X\n\n}\n", "file_path": "tests/test.rs", "rank": 0, "score": 76498.62253932712 }, { "content": "#[test]\n\nfn test_borrow_mut_after_borrow() {\n\n let _a = NUMBER.try_borrow();\n\n let _b = NUMBER.try_borrow_mut();\n\n _a.expect(\"failed\");\n\n _b.expect_err(\"failed\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 1, "score": 69429.13215094148 }, { "content": "#[test]\n\nfn test_borrow_after_borrow_mut() {\n\n let _a = NUMBER.try_borrow_mut();\n\n let _b = NUMBER.try_borrow();\n\n _a.expect(\"failed\");\n\n _b.expect_err(\"failed\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 2, "score": 69429.13215094148 }, { "content": "fn main() {\n\n}\n", "file_path": "compiletest/tests/compile-fail/static_is_sized.rs", "rank": 4, "score": 59422.307794985834 }, { "content": "#[test]\n\nfn lifetime_name() {\n\n let _ = LIFETIME_NAME.borrow();\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 5, "score": 49474.111665683784 }, { "content": "#[test]\n\nfn ref_map() {\n\n ref_thread_local! {\n\n static managed ARR: (u32, u8) = (39, b'b');\n\n }\n\n let r = ARR.borrow();\n\n let s = ref_thread_local::Ref::map(r, |x| &x.0);\n\n assert_eq!(*s, 39);\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 6, "score": 49147.09491582841 }, { "content": "#[test]\n\nfn item_name_shadowing() {\n\n assert_eq!(*ITEM_NAME_TEST.borrow(), X);\n\n}\n\n\n\nuse std::sync::atomic::AtomicBool;\n\nuse std::sync::atomic::Ordering::SeqCst;\n\nuse std::sync::atomic::ATOMIC_BOOL_INIT;\n\n\n\nstatic PRE_INIT_FLAG: AtomicBool = ATOMIC_BOOL_INIT;\n\n\n\nref_thread_local! {\n\n static managed PRE_INIT: () = {\n\n PRE_INIT_FLAG.store(true, SeqCst);\n\n ()\n\n };\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 7, "score": 47260.804559818425 }, { "content": "#[test]\n\nfn ref_map_split() {\n\n ref_thread_local! {\n\n static managed ARR: Vec<u32> = vec![1, 2, 3, 4, 5, 6, 7, 8];\n\n }\n\n let r = ARR.borrow();\n\n let (a, b) = ref_thread_local::Ref::map_split(r, |x| x.split_at(4));\n\n assert_eq!(&*a, &[1, 2, 3, 4]);\n\n assert_eq!(&*b, &[5, 6, 7, 8]);\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 8, "score": 46953.769779566566 }, { "content": "fn run_mode(mode: &'static str) {\n\n let mut config = compiletest::Config::default();\n\n config.mode = mode.parse().expect(\"Invalid mode\");\n\n config.src_base = [\"tests\", mode].iter().collect();\n\n\n\n config.verbose = true;\n\n\n\n config.target_rustcflags = Some(\"-L target/debug/ -L target/debug/deps/\".to_owned());\n\n config.clean_rmeta();\n\n\n\n compiletest::run_tests(&config);\n\n}\n\n\n", "file_path": "compiletest/tests/compile_tests.rs", "rank": 9, "score": 44765.840413639526 }, { "content": "pub trait RefThreadLocal<T> {\n\n fn initialize(&self) -> Result<(), ()>;\n\n fn destroy(&self) -> Result<(), ()>;\n\n fn is_initialized(&self) -> bool;\n\n fn borrow<'a>(&self) -> Ref<'a, T>;\n\n fn borrow_mut<'a>(&self) -> RefMut<'a, T>;\n\n fn try_borrow<'a>(&self) -> Result<Ref<'a, T>, BorrowError>;\n\n fn try_borrow_mut<'a>(&self) -> Result<RefMut<'a, T>, BorrowMutError>;\n\n}\n\n\n\n#[macro_export(local_inner_macros)]\n\n#[doc(hidden)]\n\nmacro_rules! _ref_thread_local_internal {\n\n ($(#[$attr:meta])* ($($vis:tt)*) static $N:ident : $T:ty = $e:expr; $($t:tt)*) => {\n\n $crate::_ref_thread_local_internal!(@MAKE TY, $(#[$attr])*, ($($vis)*), $N);\n\n $crate::_ref_thread_local_internal!(@TAIL, $N : $T = $e);\n\n $crate::ref_thread_local!($($t)*);\n\n };\n\n (@TAIL, $N:ident : $T:ty = $e:expr) => {\n\n impl $N {\n", "file_path": "src/lib.rs", "rank": 10, "score": 43371.14612053985 }, { "content": "fn main() {\n\n assert_eq!(*outer::inner::FOO.borrow(), ()); //~ ERROR static `FOO` is private\n\n}\n", "file_path": "compiletest/tests/compile-fail/static_is_private.rs", "rank": 11, "score": 43283.674206126074 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq)]\n\nstruct X;\n", "file_path": "tests/test.rs", "rank": 12, "score": 36740.602408787905 }, { "content": "// error-pattern: the size for values of type `str` cannot be known at compilation time\n\n#[macro_use]\n\nextern crate ref_thread_local_compiletest as ref_thread_local;\n\n\n\nref_thread_local! {\n\n pub static managed FOO: str = panic!();\n\n}\n\n\n", "file_path": "compiletest/tests/compile-fail/static_is_sized.rs", "rank": 13, "score": 33860.641560102486 }, { "content": "struct Once(X);\n\nconst ONCE_INIT: Once = Once(X);\n\nstatic DATA: X = X;\n\nstatic ONCE: X = X;\n", "file_path": "tests/test.rs", "rank": 14, "score": 33757.84565588985 }, { "content": "#[test]\n\nfn s3() {\n\n assert_eq!(&*S3.borrow(), \"ab\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 15, "score": 31363.233855320454 }, { "content": "fn main() {\n\n let x = NUMBER.borrow(); // a Ref<'a, i32>\n\n println!(\"The number is {}.\", x);\n\n}\n\n```\n\n\n\n# Additional Runtime Resource Usage Compared to `thread_local!`\n\nIn current version:\n\n* For each `static` variable in `ref_thread_local!`: 3 pointer variables, 1 `Cell<isize>`, 1 heap allocation.\n\n* For each reference: 1 reference\n\n* For each borrow: some borrow count operations, some function call (may be inlined)\n\n\n\n*/\n\n\n\n#![doc(html_root_url = \"https://docs.rs/ref_thread_local/0.1.0\")]\n\n\n\n#[doc(hidden)]\n\npub use std::ops::Deref as __Deref;\n\n#[doc(hidden)]\n\npub mod refmanager;\n\n#[doc(hidden)]\n\npub use self::refmanager::*;\n\npub use self::refmanager::{Ref, RefMut};\n\n\n", "file_path": "src/lib.rs", "rank": 16, "score": 31363.233855320454 }, { "content": "#[test]\n\nfn refmut_map() {\n\n ref_thread_local! {\n\n static managed ARR: (u32, u8) = (39, b'b');\n\n }\n\n {\n\n let r = ARR.borrow_mut();\n\n let mut s = ref_thread_local::RefMut::map(r, |x| &mut x.0);\n\n *s = 42;\n\n }\n\n let r = ARR.borrow();\n\n assert_eq!(*r, (42, b'b'));\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 17, "score": 30184.548215877985 }, { "content": "#[test]\n\nfn test_visibility() {\n\n assert_eq!(*visibility::FOO.borrow(), Box::new(0));\n\n assert_eq!(*visibility::inner::BAG.borrow(), Box::new(37));\n\n}\n\n\n\n// This should not cause a warning about a missing Copy implementation\n\nref_thread_local! {\n\n pub static managed VAR: i32 = { 0 };\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 18, "score": 30184.548215877985 }, { "content": "#[test]\n\nfn test_basic() {\n\n assert_eq!(&**STRING.borrow(), \"hello\");\n\n assert_eq!(*NUMBER.borrow(), 6);\n\n assert!(HASHMAP.borrow().get(&1).is_some());\n\n assert!(HASHMAP.borrow().get(&3).is_none());\n\n assert_eq!(\n\n &*ARRAY_BOXES.borrow(),\n\n &[Box::new(1), Box::new(2), Box::new(3)]\n\n );\n\n assert_eq!(*UNSAFE.borrow(), std::u32::MAX);\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 19, "score": 30184.548215877985 }, { "content": "#[test]\n\nfn pre_init() {\n\n assert_eq!(PRE_INIT_FLAG.load(SeqCst), false);\n\n let _ = PRE_INIT.initialize();\n\n assert_eq!(PRE_INIT_FLAG.load(SeqCst), true);\n\n}\n\n\n\nref_thread_local! {\n\n static managed LIFETIME_NAME: for<'a> fn(&'a u8) = { fn f(_: &u8) {} f };\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 20, "score": 30184.548215877985 }, { "content": "#[test]\n\nfn test_repeat() {\n\n assert_eq!(*NUMBER.borrow(), 6);\n\n assert_eq!(*NUMBER.borrow(), 6);\n\n assert_eq!(*NUMBER.borrow(), 6);\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 21, "score": 30184.548215877985 }, { "content": "#[test]\n\nfn test_meta() {\n\n // this would not compile if STRING were not marked #[derive(Copy, Clone)]\n\n let copy_of_string = STRING;\n\n // just to make sure it was copied\n\n assert!(&STRING as *const _ != &copy_of_string as *const _);\n\n // this would not compile if STRING were not marked #[derive(Debug)]\n\n assert_eq!(\n\n format!(\"{:?}\", STRING),\n\n \"STRING { _private_field: () }\".to_string()\n\n );\n\n}\n\n\n\nmod visibility {\n\n use ref_thread_local::RefThreadLocal;\n\n ref_thread_local! {\n\n pub static managed FOO: Box<u32> = Box::new(0);\n\n static managed BAR: Box<u32> = Box::new(98);\n\n }\n\n\n\n pub mod inner {\n", "file_path": "tests/test.rs", "rank": 22, "score": 30184.548215877985 }, { "content": "#[test]\n\nfn refmut_map_split() {\n\n ref_thread_local! {\n\n static managed ARR: Vec<u32> = vec![1, 2, 3, 4, 5, 6, 7, 8];\n\n }\n\n {\n\n let r = ARR.borrow_mut();\n\n let (mut a, mut b) = ref_thread_local::RefMut::map_split(r, |x| x.split_at_mut(4));\n\n a.iter_mut().for_each(|v| *v += 1);\n\n b.iter_mut().for_each(|v| *v *= 2);\n\n }\n\n assert_eq!(&*ARR.borrow(), &[2, 3, 4, 5, 10, 12, 14, 16]);\n\n}\n", "file_path": "tests/test.rs", "rank": 23, "score": 29149.90694498762 }, { "content": "#[test]\n\nfn compile_test() {\n\n run_mode(\"compile-fail\");\n\n}\n", "file_path": "compiletest/tests/compile_tests.rs", "rank": 24, "score": 28234.425597555834 }, { "content": "fn transmute() -> X {\n\n X\n\n}\n", "file_path": "tests/test.rs", "rank": 25, "score": 27747.39291783025 }, { "content": "fn require_sync() -> X {\n\n X\n\n}\n", "file_path": "tests/test.rs", "rank": 26, "score": 26712.75164693988 }, { "content": "fn main() { }\n", "file_path": "compiletest/tests/compile-fail/incorrect_visibility_restriction.rs", "rank": 27, "score": 26687.10454170777 }, { "content": "fn main() {\n\n let _ = (|| &*VALUE.borrow())(); //~ ERROR borrowed value does not live long enough\n\n}", "file_path": "compiletest/tests/compile-fail/lifetime_not_long_enough.rs", "rank": 28, "score": 26687.10454170777 }, { "content": "fn times_two(n: u32) -> u32 {\n\n n * 2\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 29, "score": 22889.068179148613 }, { "content": "fn test(_: Vec<X>) -> X {\n\n X\n\n}\n\n\n\n// All these names should not be shadowed\n\nref_thread_local! {\n\n static managed ITEM_NAME_TEST: X = {\n\n test(vec![X, Once(X).0, ONCE_INIT.0, DATA, ONCE,\n\n require_sync(), transmute(),\n\n // Except this, which will sadly be shadowed by internals:\n\n // __static_ref_initialize()\n\n ])\n\n };\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 30, "score": 21951.980829255004 }, { "content": "#[macro_use]\n\nextern crate ref_thread_local_compiletest as ref_thread_local;\n\nuse ref_thread_local::RefThreadLocal;\n\n\n\nmod outer {\n\n pub mod inner {\n\n ref_thread_local! {\n\n pub(in outer) static managed FOO: () = ();\n\n }\n\n }\n\n}\n\n\n", "file_path": "compiletest/tests/compile-fail/static_is_private.rs", "rank": 47, "score": 16790.367483462585 }, { "content": "ref_thread_local.rs\n\n==============\n\n\n\nA macro for declaring thread-local `static`s like using both of `lazy_static!` and `RefCell`\n\n\n\nUsing this macro, you can have thread-local `static`s be referenced by `borrow()` function \n\nlike using a `RefCell`.\n\n\n\nYou may also initialize or destroy a `static` variable at any time you like.\n\n\n\n[![Travis-CI Status](https://travis-ci.org/Xeiron/ref_thread_local.rs.svg?branch=master)](https://travis-ci.org/Xeiron/ref_thread_local.rs)\n\n[![Latest version](https://img.shields.io/crates/v/ref_thread_local.svg)](https://crates.io/crates/ref_thread_local)\n\n[![Documentation](https://docs.rs/ref_thread_local/badge.svg)](https://docs.rs/ref_thread_local)\n\n[![License](https://img.shields.io/crates/l/ref_thread_local.svg)](https://github.com/Xeiron/ref_thread_local.rs#license)\n\n\n\n## Minimum supported `rustc`\n\n\n\n`1.30.0+`\n\n\n\n# Getting Started\n\n\n\n[ref_thread_local.rs is available on crates.io](https://crates.io/crates/ref_thread_local).\n\nIt is recommended to look there for the newest released version, as well as links to the newest builds of the docs.\n\n\n\nAt the point of the last update of this README, the latest published version could be used like this:\n\n\n\nAdd the following dependency to your Cargo manifest...\n\n\n\n```toml\n\n[dependencies]\n\nref_thread_local = \"0.0\"\n\n```\n\n\n\n...and see the [docs](https://docs.rs/ref_thread_local) for how to use it.\n\n\n\n# Example\n\n\n\n```rust\n\n#[macro_use]\n\nextern crate ref_thread_local;\n\nuse ref_thread_local::RefThreadLocal;\n\n\n\nref_thread_local! {\n\n static managed NUMBER: i32 = 233;\n\n}\n\n\n\nfn main() {\n\n let x = NUMBER.borrow(); // a Ref<'a, i32>\n\n println!(\"The number is {}.\", x);\n\n}\n\n```\n\n\n\n## License\n\n\n\nLicensed under of\n\n * MIT license ([LICENSE](LICENSE) or http://opensource.org/licenses/MIT)\n", "file_path": "README.md", "rank": 48, "score": 10657.741890080282 }, { "content": " fn get_refmanager(&self) -> $crate::RefManager<$T> {\n\n fn init_value() -> $T { $e }\n\n $crate::_create_refmanager_data!(GUARDED_REF_MANAGER_DATA, $T);\n\n $crate::RefManager::new(&GUARDED_REF_MANAGER_DATA, init_value)\n\n }\n\n }\n\n\n\n impl $crate::RefThreadLocal<$T> for $N {\n\n fn initialize(&self) -> ::std::result::Result<(), ()> { self.get_refmanager().initialize() }\n\n fn destroy(&self) -> ::std::result::Result<(), ()> { self.get_refmanager().destroy() }\n\n fn is_initialized(&self) -> bool { self.get_refmanager().is_initialized() }\n\n fn borrow<'_lifetime>(&self) -> $crate::Ref<'_lifetime, $T> { self.get_refmanager().borrow() }\n\n fn borrow_mut<'_lifetime>(&self) -> $crate::RefMut<'_lifetime, $T> { self.get_refmanager().borrow_mut() }\n\n fn try_borrow<'_lifetime>(&self) -> ::std::result::Result<$crate::Ref<'_lifetime, $T>, $crate::BorrowError> { self.get_refmanager().try_borrow() }\n\n fn try_borrow_mut<'_lifetime>(&self) -> ::std::result::Result<$crate::RefMut<'_lifetime, $T>, $crate::BorrowMutError> { self.get_refmanager().try_borrow_mut() }\n\n }\n\n };\n\n (@MAKE TY, $(#[$attr:meta])*, ($($vis:tt)*), $N:ident) => {\n\n #[allow(missing_copy_implementations)]\n\n #[allow(non_camel_case_types)]\n", "file_path": "src/lib.rs", "rank": 49, "score": 21.624899695955087 }, { "content": "\n\nFor a given `static managed NAME: TYPE = EXPR;`, the macro generates a unique type that\n\nimplements `RefThreadLocal<T>` trait and stores it in a static with name `NAME`. (Attributes end up\n\nattaching to this type.)\n\n\n\nWhen calling any method of this unique type, it generated a `RefManager<T>` internally,\n\nwhich manage the reference count of borrowing, and initialize a internal\n\nthread-local `static` variable on calling `initialize()`, `borrow()`, `borrow_mut()`,\n\n`borrow_mut()`, `try_borrow_mut()` only if when uninitialized or destroyed.\n\n\n\nLike `RefCell`, `borrow()` and `borrow_mut()` don't return reference but instead\n\n`Ref<'a, T>` or `RefMut<'a, T>`, which manage a borrow count internally.\n\n\n\nLike `thread_local!`, variables in `ref_thread_local!` will be dropped normally\n\nwhen thread is exiting or `destroy()` is called.\n\n\n\n# Example\n\n\n\nUsing the macro:\n\n\n\n```rust\n\n#[macro_use]\n\nextern crate ref_thread_local;\n\nuse ref_thread_local::RefThreadLocal;\n\n\n\nref_thread_local! {\n\n static managed NUMBER: i32 = 233;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 50, "score": 16.82554860361008 }, { "content": "// Copyright 2018 tuxzz and lazy-static.rs Developers\n\n//\n\n// Licensed under the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,\n\n// at your option. This file may not be copied, modified,\n\n// or distributed except according to those terms.\n\n\n\n/*!\n\nA macro for declaring thread-local `static`s like using both of `lazy_static!` and `RefCell`\n\n\n\nUsing this macro, you can have thread-local `static`s be referenced by `borrow()` function\n\nlike using a `RefCell`.\n\n\n\nYou may also initialize or destroy a `static` variable at any time you like.\n\n\n\n# Syntax\n\n\n\n```ignore\n\nref_thread_local! {\n\n [pub] static managed NAME_1: TYPE_1 = EXPR_1;\n\n [pub] static managed NAME_2: TYPE_2 = EXPR_2;\n", "file_path": "src/lib.rs", "rank": 51, "score": 14.333139335431905 }, { "content": "#[macro_use]\n\nextern crate ref_thread_local;\n\nuse ref_thread_local::RefThreadLocal;\n\nuse std::collections::HashMap;\n\n\n\nref_thread_local! {\n\n /// Documentation!\n\n pub static managed NUMBER: u32 = times_two(3);\n\n\n\n static managed ARRAY_BOXES: [Box<u32>; 3] = [Box::new(1), Box::new(2), Box::new(3)];\n\n\n\n /// More documentation!\n\n #[allow(unused_variables)]\n\n #[derive(Copy, Clone, Debug)]\n\n pub static managed STRING: String = \"hello\".to_string();\n\n\n\n static managed HASHMAP: HashMap<u32, &'static str> = {\n\n let mut m = HashMap::new();\n\n m.insert(0, \"abc\");\n\n m.insert(1, \"def\");\n", "file_path": "tests/test.rs", "rank": 52, "score": 14.258031941881084 }, { "content": " ...\n\n [pub] static managed NAME_N: TYPE_N = EXPR_N;\n\n}\n\n```\n\n\n\nAttributes (including doc comments) are supported as well:\n\n\n\n```rust\n\n# #[macro_use]\n\n# extern crate ref_thread_local;\n\n# use ref_thread_local::RefThreadLocal;\n\n# fn main() {\n\nref_thread_local! {\n\n /// This is an example for using doc comment attributes\n\n static managed EXAMPLE: u8 = 42;\n\n}\n\n# }\n\n```\n\n\n\n# Semantics\n", "file_path": "src/lib.rs", "rank": 53, "score": 13.805479818622251 }, { "content": "#[macro_use]\n\nextern crate ref_thread_local_compiletest as ref_thread_local;\n\nuse ref_thread_local::RefThreadLocal;\n\n\n\nref_thread_local! {\n\n static managed VALUE: i32 = 233i32;\n\n}\n\n\n", "file_path": "compiletest/tests/compile-fail/lifetime_not_long_enough.rs", "rank": 54, "score": 11.817896814193777 }, { "content": "extern crate ref_thread_local;\n\npub use self::ref_thread_local::*;\n", "file_path": "compiletest/src/lib.rs", "rank": 55, "score": 11.385233896140672 }, { "content": "// incorrect visibility restriction\n\n#[macro_use]\n\nextern crate ref_thread_local_compiletest as ref_thread_local;\n\n\n\nref_thread_local! {\n\n pub(nonsense) static managed WRONG: () = ();\n\n //~^ ERROR incorrect visibility restriction\n\n}\n\n\n", "file_path": "compiletest/tests/compile-fail/incorrect_visibility_restriction.rs", "rank": 56, "score": 10.888871937713265 }, { "content": " m.insert(2, \"ghi\");\n\n m\n\n };\n\n\n\n // This should not compile if the unsafe is removed.\n\n static managed UNSAFE: u32 = unsafe {\n\n std::mem::transmute::<i32, u32>(-1)\n\n };\n\n}\n\n\n\nref_thread_local! {\n\n static managed S1: &'static str = \"a\";\n\n static managed S2: &'static str = \"b\";\n\n}\n\nref_thread_local! {\n\n static managed S3: String = [*S1.borrow(), *S2.borrow()].join(\"\");\n\n}\n\n\n\n#[test]\n", "file_path": "tests/test.rs", "rank": 57, "score": 10.49024592269263 }, { "content": " ref_thread_local! {\n\n pub(in super) static managed BAZ: Box<u32> = Box::new(42);\n\n pub(crate) static managed BAG: Box<u32> = Box::new(37);\n\n }\n\n }\n\n\n\n #[test]\n\n fn sub_test() {\n\n assert_eq!(**FOO.borrow(), 0);\n\n assert_eq!(**BAR.borrow(), 98);\n\n assert_eq!(**inner::BAZ.borrow(), 42);\n\n assert_eq!(**inner::BAG.borrow(), 37);\n\n }\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 58, "score": 9.039162526160615 }, { "content": " #[allow(dead_code)]\n\n $(#[$attr])*\n\n $($vis)* struct $N { _private_field: () }\n\n #[doc(hidden)]\n\n $($vis)* static $N: $N = $N { _private_field: () };\n\n };\n\n () => ()\n\n}\n\n\n\n#[macro_export(local_inner_macros)]\n\nmacro_rules! ref_thread_local {\n\n ($(#[$attr:meta])* static managed $N:ident : $T:ty = $e:expr; $($t:tt)*) => {\n\n $crate::_ref_thread_local_internal!($(#[$attr])* () static $N : $T = $e; $($t)*);\n\n };\n\n ($(#[$attr:meta])* pub static managed $N:ident : $T:ty = $e:expr; $($t:tt)*) => {\n\n $crate::_ref_thread_local_internal!($(#[$attr])* (pub) static $N : $T = $e; $($t)*);\n\n };\n\n ($(#[$attr:meta])* pub ($($vis:tt)+) static managed $N:ident : $T:ty = $e:expr; $($t:tt)*) => {\n\n $crate::_ref_thread_local_internal!($(#[$attr])* (pub ($($vis)+)) static $N : $T = $e; $($t)*);\n\n };\n\n () => ()\n\n}\n", "file_path": "src/lib.rs", "rank": 59, "score": 8.015707840952558 }, { "content": "extern crate compiletest_rs as compiletest;\n\n\n", "file_path": "compiletest/tests/compile_tests.rs", "rank": 60, "score": 5.012983032258669 } ]
Rust
src/parser/combinators.rs
ireina7/abyss.rs
2c374996d68a6940a6100ff41e1912d3daca9db9
use super::core::*; #[allow(dead_code)] pub fn wrap<P: Parser>(p: P) -> Wrapper<P> { Wrapper::new(p) } #[allow(dead_code)] pub fn pure<A: Clone>(x: A) -> Pure<A> { Pure::new(x) } #[allow(dead_code)] pub fn satisfy<F>(f: F) -> Satisfy<F> where F: Fn(&char) -> bool { Satisfy::new(f) } #[allow(dead_code)] pub fn fix<'a, A, F>(fix: F) -> Fix<'a, A> where F: for<'f> Fn(&'f Fix<'a, A>) -> Box<dyn Parser<Output=A> + 'f> + 'a { Fix::new(fix) } #[allow(dead_code)] pub fn many<P: Parser>(p: P) -> Many<P> { Many::new(p) } #[allow(dead_code)] pub fn at_least_1<P: Parser>(p: P) -> Many1<P> { Many1::new(p) } #[allow(dead_code)] pub fn many1<P: Parser>(p: P) -> Many1<P> { at_least_1(p) } #[allow(dead_code)] pub fn char(ch: char) -> Char { Char { ch } } #[allow(dead_code)] pub fn digit() -> Wrapper<impl Parser<Output=char> + Clone> { satisfy(|&c| c.is_digit(10)).info("Parsing single digit") } #[allow(dead_code)] pub fn digits() -> Wrapper<impl Parser<Output=Vec<char>> + Clone> { many(digit()).info("Parsing many digits") } #[allow(dead_code)] pub fn letter() -> Wrapper<impl Parser<Output=char> + Clone> { satisfy(|&c| c.is_alphabetic()).info("Parsing single letter") } #[allow(dead_code)] pub fn letters() -> Wrapper<impl Parser<Output=Vec<char>> + Clone> { many(letter()).info("Parsing many letters") } #[allow(dead_code)] pub fn blank() -> Wrapper<impl Parser<Output=String> + Clone> { many(char(' ') | char('\t') | char('\n')).map(|xs| xs.into_iter().collect()) .info("Parsing blanks") } #[allow(dead_code)] pub fn identifier() -> Wrapper<impl Parser<Output=String> + Clone> { (letter() >> move |x_| many(letter().or(digit()).or(char('_'))) >> move |xs| pure(vec![x_].into_iter().chain(xs.into_iter()).collect::<String>())) .info("Parsing identifier") } #[allow(dead_code)] pub fn any() -> Wrapper<impl Parser<Output=char> + Clone> { satisfy(|_| true).info("Parsing any char") } #[allow(dead_code)] pub fn one_of(cs: &str) -> Wrapper<impl Parser<Output=char> + Clone> { let ss = cs.to_string(); satisfy(move |&c| ss.chars().any(|x| x == c)) .info(&format!("Parsing char of one of {}", cs)) } #[allow(dead_code)] pub fn except(cs: &str) -> Wrapper<impl Parser<Output=char> + Clone> { let ss = cs.to_string(); satisfy(move |&c| !ss.chars().any(|x| x == c)) .info(&format!("Parsing char except one of {}", cs)) } #[allow(dead_code)] pub fn identifiers_sep_by_blank() -> Wrapper<impl Parser<Output=Vec<String>> + Clone> { many( identifier() >> move |x| blank() >> move |_| pure(x.clone())) .info("Parsing identifiers") } #[allow(dead_code)] pub fn list_of_identifiers_sep_by_blank() -> Wrapper<impl Parser<Output=Vec<String>> + Clone> { (char('(') >> move |_| identifiers_sep_by_blank() >> move |s| char(')') >> move |_| pure(s.clone())) .info("Parsing list of identifiers") } /* pub fn list<P: Parser<Output=Vec<String>> + Clone>(p: Wrapper<P>) -> Wrapper<impl Parser<Output=Vec<String>> + Clone> { wrap( char('(') >> move |__| many(identifier()) >> move |xs| char(')') >> move |__| pure(xs.clone()) ) } */ #[cfg(test)] mod tests { use super::*; #[test] fn test_parser_monad() { let mut src = ParseState::new("a0bcdefghijklmn"); let ans = char('a') .and_then(|_| digit()) .and_then(|_| char('b')); assert_eq!(ans.parse(&mut src), Ok('b')); } /* #[test] fn test_parser_monad_do_notation() { let mut src = ParseState::new("a0bcdefghijklmn"); let parser = do_parse! { a =o char('a'), _ =o digit() , b =o char('b'), =o satisfy(move |&c| c == a || c == b || c == 'c') }; assert_eq!(parser.parse(&mut src), Ok('c')); }*/ #[test] fn test_parser_many() { let mut src = ParseState::new("aa0bcdefghijklmn"); let parser = many(char('a')); assert_eq!(parser.parse(&mut src), Ok(vec!['a', 'a'])); } #[test] fn test_parser_many1() { let mut src = ParseState::new("aa01bcdefghijklmn"); let parser0 = at_least_1(char('a')); let parser1 = digits(); let parser2 = many1(char('a')); assert_eq!(parser0.parse(&mut src), Ok(vec!['a', 'a'])); assert_eq!(parser1.parse(&mut src), Ok(vec!['0', '1'])); assert_eq!(parser2.parse(&mut src).ok(), None); } #[test] fn test_parser_identifier() { let mut src = ParseState::new("hello0)"); let parser = identifier(); assert_eq!(parser.parse(&mut src), Ok("hello0".into())); } /* #[test] fn test_parser_string() { let mut src = ParseState::new("hello0%"); let parser = string("hell"); assert_eq!(parser.parse(&mut src).ok(), Some("hell".into())); //assert_eq!(src.next(), Some('o')); }*/ #[test] fn test_parser_map() { let mut src = ParseState::new("hello0%"); let parser = letters().map(|cs| cs.into_iter().map(|c| if c == 'l' { 'x' } else { c }).collect::<String>()); assert_eq!(parser.parse(&mut src).ok(), Some("hexxo".into())); } #[test] fn test_parse_list() { let mut src = ParseState::new("(Hello world)"); let parser = list_of_identifiers_sep_by_blank(); assert_eq!(parser.parse(&mut src), Ok(vec!["Hello", "world"].into_iter().map(|s| s.into()).collect())); } #[test] fn test_parser_blank() { let mut src = ParseState::new("( )"); let parser = char('(').and(blank()); assert_eq!(parser.parse(&mut src).ok(), Some(" ".into())); } #[test] fn test_parser_fix() { let mut src = ParseState::new("....@"); let parser = fix(|f| Box::new( char('.') >> move |_| f.clone().or(char('@')) >> move |xs| pure(xs.clone()) )); assert_eq!(parser.parse(&mut src).ok(), Some('@')); } }
use super::core::*; #[allow(dead_code)] pub fn wrap<P: Parser>(p: P) -> Wrapper<P> { Wrapper::new(p) } #[allow(dead_code)] pub fn pure<A: Clone>(x: A) -> Pure<A> { Pure::new(x) } #[allow(dead_code)] pub fn satisfy<F>(f: F) -> Satisfy<F> where F: Fn(&char) -> bool { Satisfy::new(f) } #[allow(dead_code)] pub fn fix<'a, A, F>(fix: F) -> Fix<'a, A> where F: for<'f> Fn(&'f Fix<'a, A>) -> Box<dyn Parser<Output=A> + 'f> + 'a { Fix::new(fix) } #[allow(dead_code)] pub fn many<P: Parser>(p: P) -> Many<P> { Many::new(p) } #[allow(dead_code)] pub fn at_least_1<P: Parser>(p: P) -> Many1<P> { Many1::new(p) } #[allow(dead_code)] pub fn many1<P: Parser>(p: P) -> Many1<P> { at_least_1(p) } #[allow(dead_code)] pub fn char(ch: char) -> Char { Char { ch } } #[allow(dead_code)] pub fn digit() -> Wrapper<impl Parser<Output=char> + Clone> { satisfy(|&c| c.is_digit(10)).info("Parsing single digit") } #[allow(dead_code)] pub fn digits() -> Wrapper<impl Parser<Output=Vec<char>> + Clone> { many(digit()).info("Parsing many digits") } #[allow(dead_code)] pub fn letter() -> Wrapper<impl Parser<Output=char> + Clone> { satisfy(|&c| c.is_alphabetic()).info("Parsing single letter") } #[allow(dead_code)] pub fn letters() -> Wrapper<impl Parser<Output=Vec<char>> + Clone> { many(letter()).info("Parsing many letters") } #[allow(dead_code)] pub fn blank() -> Wrapper<impl Parser<Output=String> + Clone> { many(char(' ') | char('\t') | char('\n')).map(|xs| xs.into_iter().collect()) .info("Parsing blanks") } #[allow(dead_code)] pub fn identifier() -> Wrapper<impl Parser<Output=String> + Clone> { (letter() >> move |x_| many(letter().or(digit()).or(char('_'))) >> move |xs| pure(vec![x_].into_iter().chain(xs.into_iter()).collect::<String>())) .info("Parsing identifier") } #[allow(dead_code)] pub fn any() -> Wrapper<impl Parser<Output=char> + Clone> { satisfy(|_| true).info("Parsing any char") } #[allow(dead_code)] pub fn one_of(cs: &str) -> Wrapper<impl Parser<Output=char> + Clone> { let ss = cs.to_string(); satisfy(move |&c| ss.chars().any(|x| x == c))
.parse(&mut src), Ok('c')); }*/ #[test] fn test_parser_many() { let mut src = ParseState::new("aa0bcdefghijklmn"); let parser = many(char('a')); assert_eq!(parser.parse(&mut src), Ok(vec!['a', 'a'])); } #[test] fn test_parser_many1() { let mut src = ParseState::new("aa01bcdefghijklmn"); let parser0 = at_least_1(char('a')); let parser1 = digits(); let parser2 = many1(char('a')); assert_eq!(parser0.parse(&mut src), Ok(vec!['a', 'a'])); assert_eq!(parser1.parse(&mut src), Ok(vec!['0', '1'])); assert_eq!(parser2.parse(&mut src).ok(), None); } #[test] fn test_parser_identifier() { let mut src = ParseState::new("hello0)"); let parser = identifier(); assert_eq!(parser.parse(&mut src), Ok("hello0".into())); } /* #[test] fn test_parser_string() { let mut src = ParseState::new("hello0%"); let parser = string("hell"); assert_eq!(parser.parse(&mut src).ok(), Some("hell".into())); //assert_eq!(src.next(), Some('o')); }*/ #[test] fn test_parser_map() { let mut src = ParseState::new("hello0%"); let parser = letters().map(|cs| cs.into_iter().map(|c| if c == 'l' { 'x' } else { c }).collect::<String>()); assert_eq!(parser.parse(&mut src).ok(), Some("hexxo".into())); } #[test] fn test_parse_list() { let mut src = ParseState::new("(Hello world)"); let parser = list_of_identifiers_sep_by_blank(); assert_eq!(parser.parse(&mut src), Ok(vec!["Hello", "world"].into_iter().map(|s| s.into()).collect())); } #[test] fn test_parser_blank() { let mut src = ParseState::new("( )"); let parser = char('(').and(blank()); assert_eq!(parser.parse(&mut src).ok(), Some(" ".into())); } #[test] fn test_parser_fix() { let mut src = ParseState::new("....@"); let parser = fix(|f| Box::new( char('.') >> move |_| f.clone().or(char('@')) >> move |xs| pure(xs.clone()) )); assert_eq!(parser.parse(&mut src).ok(), Some('@')); } }
.info(&format!("Parsing char of one of {}", cs)) } #[allow(dead_code)] pub fn except(cs: &str) -> Wrapper<impl Parser<Output=char> + Clone> { let ss = cs.to_string(); satisfy(move |&c| !ss.chars().any(|x| x == c)) .info(&format!("Parsing char except one of {}", cs)) } #[allow(dead_code)] pub fn identifiers_sep_by_blank() -> Wrapper<impl Parser<Output=Vec<String>> + Clone> { many( identifier() >> move |x| blank() >> move |_| pure(x.clone())) .info("Parsing identifiers") } #[allow(dead_code)] pub fn list_of_identifiers_sep_by_blank() -> Wrapper<impl Parser<Output=Vec<String>> + Clone> { (char('(') >> move |_| identifiers_sep_by_blank() >> move |s| char(')') >> move |_| pure(s.clone())) .info("Parsing list of identifiers") } /* pub fn list<P: Parser<Output=Vec<String>> + Clone>(p: Wrapper<P>) -> Wrapper<impl Parser<Output=Vec<String>> + Clone> { wrap( char('(') >> move |__| many(identifier()) >> move |xs| char(')') >> move |__| pure(xs.clone()) ) } */ #[cfg(test)] mod tests { use super::*; #[test] fn test_parser_monad() { let mut src = ParseState::new("a0bcdefghijklmn"); let ans = char('a') .and_then(|_| digit()) .and_then(|_| char('b')); assert_eq!(ans.parse(&mut src), Ok('b')); } /* #[test] fn test_parser_monad_do_notation() { let mut src = ParseState::new("a0bcdefghijklmn"); let parser = do_parse! { a =o char('a'), _ =o digit() , b =o char('b'), =o satisfy(move |&c| c == a || c == b || c == 'c') }; assert_eq!(parser
random
[ { "content": "/// Check if variable is atom (normal form)\n\nfn is_atom(s: &str) -> bool {\n\n let atoms = [\"True\", \"False\"];\n\n atoms.iter().any(|&x| x == s)\n\n}\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 18, "score": 143964.6489613122 }, { "content": "/// Test if the arithmetic operator is valid\n\nfn is_arith(op: &str) -> bool {\n\n [\"+\", \"-\", \"*\", \"/\"].iter().any(|&x| x == op)\n\n}\n\n\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 20, "score": 141074.22525750796 }, { "content": "#[inline]\n\npub fn unpack(xs: Object) -> Object {\n\n use Object::*;\n\n match xs {\n\n List(xs) => match &xs[..] {\n\n [] => List(xs),\n\n [x, xs @ ..] => List(vec![Cons(\"::\".into()), x.clone(), List(xs.to_vec())]),\n\n }\n\n others => others,\n\n }\n\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n#[cfg(test)]\n", "file_path": "src/abyss/logic.rs", "rank": 22, "score": 133625.79312014533 }, { "content": "pub fn is_decl(obj: &Object) -> bool {\n\n use Object::*;\n\n match obj {\n\n List(xs) => match &xs[..] {\n\n [Var(op), _, _] if op == \"define\" => true,\n\n [Var(op), _, _] if op == \"data\" => todo!(),\n\n _ => false\n\n }\n\n _ => false\n\n }\n\n}\n\n\n", "file_path": "src/abyss/eval/decl.rs", "rank": 23, "score": 131816.43261114307 }, { "content": "pub fn magma() -> Wrapper<impl Parser<Output=String> + Clone> {\n\n\n\n (satisfy(|&c| \n\n !c.is_digit(10) && \"'( )\\\"\".chars().all(|x| c != x))\n\n >> move |x_|\n\n many(except(\"( )\\\"\")) >> move |xs|\n\n pure(vec![x_].into_iter().chain(xs.into_iter()).collect::<String>()))\n\n .info(\"Parsing magma identifier\")\n\n}\n\n\n", "file_path": "src/abyss/parser.rs", "rank": 24, "score": 117938.28082776304 }, { "content": "fn prompt(s: &str) {\n\n print!(\"{} < \", s);\n\n io::stdout().flush().ok().expect(\"Could not flush stdout\");\n\n}\n\n\n", "file_path": "src/abyss/repl.rs", "rank": 25, "score": 96743.46661386677 }, { "content": "#[inline]\n\nfn write_list(f: &mut fmt::Formatter, xs: &Object) -> fmt::Result {\n\n write!(f, \"(\")?;\n\n write_list_tailrec(xs, f)\n\n}\n", "file_path": "src/abyss/object.rs", "rank": 26, "score": 93999.62542633477 }, { "content": "#[inline]\n\nfn write_list_tailrec(xs: &Object, f: &mut fmt::Formatter) -> fmt::Result {\n\n use Object::*;\n\n match xs {\n\n List(xs) => match &xs[..] {\n\n [Cons(_), x, xs] => {\n\n write!(f, \" {}\", x)?;\n\n match xs {\n\n List(_) => write_list_tailrec(xs, f),\n\n Thunk(_, _, _) => write!(f, \" ...)\"),\n\n _ => write!(f, \")\"),\n\n }\n\n },\n\n [] => write!(f, \")\"),\n\n _ => todo!()\n\n }\n\n _ => todo!()\n\n }\n\n}\n\n\n\n\n", "file_path": "src/abyss/object.rs", "rank": 27, "score": 92659.43454160148 }, { "content": "pub fn env() -> Env {\n\n let f = |s: &str| Rc::new(s.parse::<Object>().unwrap()); // You need to make sure no panic!\n\n let env = vec![\n\n //(\"fix\", \"(lambda (f) ((lambda (x) (f (lambda (v) (x x v)))) (lambda (x) (f (lambda (v) (x x v))))))\"),\n\n //(\"fix\", \"(lambda (f) ((lambda (x) (f (x x))) (lambda (x) (f (x x)))))\"),\n\n //(\"lazy\", \"(lambda (x) (lazy x))\"),\n\n //(\"!\", \"(lambda (x) (! x))\"),\n\n (\"+\", \"(-> Int Int Int)\"),\n\n (\"-\", \"(-> Int Int Int)\"),\n\n (\"*\", \"(-> Int Int Int)\"),\n\n (\"/\", \"(-> Int Int Int)\"),\n\n (\"<\", \"(-> Int Int Bool)\"),\n\n (\">\", \"(-> Int Int Bool)\"),\n\n (\"==\", \"(-> Int Int Bool)\"),\n\n (\"/=\", \"(-> Int Int Bool)\"),\n\n (\"<=\", \"(-> Int Int Bool)\"),\n\n (\">=\", \"(-> Int Int Bool)\"),\n\n //(\"::\", \"(-> Int Int Bool)\"),\n\n //(\"head\", \"(lambda (xs) (head xs))\"),\n\n //(\"tail\", \"(lambda (xs) (tail xs))\"),\n\n ];\n\n Env::new_from(env.into_iter().map(|(str, src)| (str.to_string(), f(src))).collect())\n\n}\n\n\n\n\n\n\n\n\n\n\n", "file_path": "src/abyss/checker.rs", "rank": 28, "score": 90874.15109576863 }, { "content": "#[allow(dead_code)]\n\nfn string(s: &str) -> ParseString {\n\n ParseString { s: s.into() }\n\n}\n", "file_path": "src/parser/core.rs", "rank": 29, "score": 90323.80466313568 }, { "content": "#[inline]\n\nfn eval_cons(x: Object, xs: Object, env: &mut Env) -> Result<Object> {\n\n use Object::*;\n\n //let xs = evaluate(xs, env)?;\n\n let xs = wrap(None, xs, env.clone())?;\n\n let x = wrap(None, x , env.clone())?;\n\n Ok(Object::List(vec![Cons(\"::\".into()), x, xs]))\n\n}\n\n\n\n/// Handle list constructions\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 30, "score": 89224.89915076106 }, { "content": "#[allow(dead_code)]\n\npub fn env() -> Env {\n\n //use Object::*;\n\n let env = Env::new();\n\n let f = |s: &str| Rc::new(s.parse::<Object>().unwrap().eval(&env).unwrap()); // You need to make sure no panic!\n\n let env = vec![\n\n (\"fix\", \"(lambda (f) ((lambda (x) (f (lambda (v) (x x v)))) (lambda (x) (f (lambda (v) (x x v))))))\"),\n\n //(\"fix\", \"(lambda (f) ((lambda (x) (f (x x))) (lambda (x) (f (x x)))))\"),\n\n (\"lazy\", \"(lambda (x) (lazy x))\"),\n\n (\"!\", \"(lambda (x) (! x))\"),\n\n (\"+\", \"(lambda (x y) (+ x y))\"),\n\n (\"-\", \"(lambda (x y) (- x y))\"),\n\n (\"*\", \"(lambda (x y) (* x y))\"),\n\n (\"/\", \"(lambda (x y) (/ x y))\"),\n\n (\"<\", \"(lambda (x y) (< x y))\"),\n\n (\">\", \"(lambda (x y) (> x y))\"),\n\n (\"==\", \"(lambda (x y) (== x y))\"),\n\n (\"/=\", \"(lambda (x y) (/= x y))\"),\n\n (\"<=\", \"(lambda (x y) (<= x y))\"),\n\n (\">=\", \"(lambda (x y) (>= x y))\"),\n\n (\"::\", \"(lambda (x xs) (:: x xs))\"),\n\n //(\"head\", \"(lambda (xs) (head xs))\"),\n\n //(\"tail\", \"(lambda (xs) (tail xs))\"),\n\n ];\n\n Env::new_from(env.into_iter().map(|(str, src)| (str.to_string(), f(src))).collect())\n\n}\n\n\n", "file_path": "src/abyss/eval/core.rs", "rank": 31, "score": 88871.22802690812 }, { "content": "pub fn repl() -> io::Result<()> {\n\n let stdin = io::stdin();\n\n //let mut input = String::new();\n\n let mut env = eval::env();\n\n let mut log = Backtrace::new();\n\n let env_repl = vec![\n\n (\"quit\" , \"(lambda () (quit))\"),\n\n (\"help\" , \"(lambda () (help))\"),\n\n (\"lazy\" , \"(lambda () (lazy))\"),\n\n (\"debug\", \"(lambda () (debug))\"),\n\n (\"eager\", \"(lambda () (eager))\"),\n\n ];\n\n let f = |s: &str| std::rc::Rc::new(s.parse::<Object>().unwrap().eval(&env).unwrap()); // You need to make sure no panic!\n\n let env_repl = Env::new_from(env_repl.into_iter().map(|(str, src)| (str.to_string(), f(src))).collect());\n\n env.extend(env_repl.env.into_iter());\n\n let label = \"abyss\";\n\n let mut debug_mode = false;\n\n let mut eager_mode = false;\n\n prompt(label);\n\n for line in stdin.lock().lines() {\n", "file_path": "src/abyss/repl.rs", "rank": 32, "score": 86841.98620458321 }, { "content": "/// Handle cons expression\n\nfn eval_cons(x: &Object, xs: &Object, env: &mut Env) -> Result<Object, EvalError> {\n\n let xs = evaluate(xs, env)?;\n\n match xs {\n\n //Object::Nil => Ok(Object::List(vec![evaluate(x, env)?])),\n\n Object::List(xs) => Ok(Object::List(vec![evaluate(x, env)?].into_iter().chain(xs.into_iter()).collect())),\n\n _ => Err(EvalError {msg: format!(\"Cons error: {:?}\", xs)})\n\n }\n\n}\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 33, "score": 85036.09125042088 }, { "content": "/// The main Eval trait, should be able to display and debug (Clone to store in Env)\n\n/// \n\n/// The generic type `Output` is exposed in order to evaluate abyss expression to various\n\n/// target output in the future.\n\npub trait Eval<Output>: fmt::Display + fmt::Debug + Clone {\n\n type Error;\n\n fn eval(&self, env: &Env) -> std::result::Result<Output, Self::Error>;\n\n}\n\n\n\n/// The default environment for evaluation\n", "file_path": "src/abyss/eval/core.rs", "rank": 34, "score": 84955.94305213055 }, { "content": "/// Handle cons expression\n\nfn eval_cons(x: Object, xs: Object, env: &mut Env, backtrace: &mut Backtrace) -> Result<Object> {\n\n use Object::*;\n\n let xs = evaluate(xs, env, backtrace)?;\n\n match xs {\n\n List(xs) => Ok(List(vec![Cons(\"::\".into()), evaluate(x, env, backtrace)?, List(xs)])),\n\n _ => Err(EvalError::new(format!(\"Cons error: {:?}\", xs), backtrace.clone()))\n\n }\n\n}\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 35, "score": 80621.16779303874 }, { "content": "fn parse_to_object(src: &str) -> Result<Object, ParseError> {\n\n\n\n let mut src = ParseState::new(src.trim());\n\n fn item<P: Parser + Clone>(p: P) -> Wrapper<impl Parser<Output=P::Output> + Clone> where\n\n P::Output : Clone {\n\n\n\n (p.and_then(move |x: P::Output| blank().and_then(move |_| pure(x.clone())))).wrap()\n\n }\n\n let parser = atom() | fix(|list| Box::new (\n\n char('(') >> move |_|\n\n many(\n\n item(number()) | item(variable()) | item(symbol()) | item(string()) | list.clone()\n\n ) >> move |xs|\n\n char(')').and(blank()) >> move |_|\n\n pure(Object::List(xs.clone()))\n\n ));\n\n\n\n let ans = parser.parse(&mut src)?;\n\n Ok(conversions(ans))\n\n}\n", "file_path": "src/abyss/parser.rs", "rank": 36, "score": 79789.16763579659 }, { "content": "pub trait CustomObjClone {\n\n fn clone_boxed_obj(&self) -> Box<dyn CustomObj>;\n\n}\n\n\n\nimpl<T> CustomObjClone for T\n\n where T: 'static + CustomObj + Clone {\n\n fn clone_boxed_obj(&self) -> Box<dyn CustomObj> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\nimpl Clone for Box<dyn CustomObj> {\n\n fn clone(&self) -> Box<dyn CustomObj> {\n\n self.clone_boxed_obj()\n\n }\n\n}\n\n\n\nimpl Hash for Box<dyn CustomObj> {\n\n fn hash<H>(&self, state: &mut H) where H: Hasher {\n\n self.hash_dyn().hash(state)\n", "file_path": "src/abyss/object.rs", "rank": 37, "score": 79670.04043642682 }, { "content": "fn string() -> Wrapper<impl Parser<Output=Object> + Clone> {\n\n (\n\n char('\"').and(many(satisfy(|&c| c != '\"'))) >> move |s| {\n\n char('\"') >> move |_|\n\n pure(Object::Str(s.clone().into_iter().collect()))\n\n }).info(\"Parsing string\")\n\n}\n\n\n", "file_path": "src/abyss/parser.rs", "rank": 38, "score": 78575.72990900758 }, { "content": "fn variable() -> Wrapper<impl Parser<Output=Object> + Clone> {\n\n magma()\n\n .map(|s| Object::Var(s))\n\n .info(\"Parsing variable\")\n\n}\n\n\n", "file_path": "src/abyss/parser.rs", "rank": 39, "score": 78575.72990900758 }, { "content": "fn real() -> Wrapper<impl Parser<Output=Object> + Clone> {\n\n (many1(digit()) >> move |l|\n\n char('.') >> move |_| {\n\n let a = l.clone().into_iter().collect::<String>();\n\n many1(digit()) >> move |r| {\n\n let b = r.clone().into_iter().collect::<String>();\n\n pure(Object::Real(format!(\"{}.{}\", a, b).parse().unwrap()))\n\n }\n\n }).info(\"Parsing real number\")\n\n}\n\n\n", "file_path": "src/abyss/parser.rs", "rank": 40, "score": 78575.72990900758 }, { "content": "fn integer() -> Wrapper<impl Parser<Output=Object> + Clone> {\n\n many1(digit()).map(|s| Object::Integer(s.into_iter().collect::<String>().parse().unwrap()))\n\n .info(\"Parsing integer\")\n\n}\n\n\n", "file_path": "src/abyss/parser.rs", "rank": 41, "score": 78575.72990900758 }, { "content": "fn number() -> Wrapper<impl Parser<Output=Object> + Clone> {\n\n (real() | integer()).info(\"Parsing number\")\n\n}\n\n\n", "file_path": "src/abyss/parser.rs", "rank": 42, "score": 78575.72990900758 }, { "content": "fn atom() -> Wrapper<impl Parser<Output=Object> + Clone> {\n\n (variable() | number() | symbol() | string()).wrap()\n\n}\n\n\n\n\n\n\n\n\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub struct Pattern {\n\n expr: Object\n\n}\n\n\n\nimpl Pattern {\n\n pub fn unwrap(self) -> Object {\n\n self.expr\n\n }\n\n}\n\n\n\n\n", "file_path": "src/abyss/parser.rs", "rank": 43, "score": 78575.72990900758 }, { "content": "fn symbol() -> Wrapper<impl Parser<Output=Object> + Clone> {\n\n char('\\'').and(identifier()).map(|s| Object::Symbol(s)).info(\"Parsing symbol\")\n\n}\n\n\n", "file_path": "src/abyss/parser.rs", "rank": 44, "score": 78575.72990900758 }, { "content": "fn check_occurs(v: &Object, term: &Object, unv: &Env) -> bool {\n\n use Object::*;\n\n if v == term {\n\n return true;\n\n }\n\n match (v, term) {\n\n (Var(_), Var(_)) if unv.contains_key(term) => {\n\n check_occurs(v, &unv[term], unv)\n\n },\n\n (Var(_), List(xs)) => xs.iter().any(|x| check_occurs(v, x, unv)),\n\n _ => false\n\n }\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "src/abyss/logic.rs", "rank": 45, "score": 76925.50465308473 }, { "content": "//(let ((gen (lambda (s n) (case n ((0 ()) (n (cons s (gen s (- n 1))))))))) (gen 'T_T 200))\n\n/// Handle basic function application `(f x)`\n\n/// \n\n/// `f` and `x` should have been evaluated!\n\nfn apply(f: Object, x: Object) -> Result<Object, EvalError> {\n\n use Object::*;\n\n \n\n match f {\n\n Closure(name, params, expr, env) => {\n\n // Unification should be invoked here, but we only allow single variable here to debug...\n\n let mut env = env;\n\n \n\n match *params {\n\n List(ps) => match &ps[..] {\n\n [ ] => evaluate(&*expr, &mut env),\n\n [pat] => {\n\n let bind = pat.unify(&x);\n\n if let Ok(bind) = bind {\n\n let bind: HashMap<String, Rc<Object>> = bind.into_iter().map(|(k, v)| (k, Rc::new(v))).collect();\n\n if let Some(name) = name {\n\n env.insert(\n\n name.clone(), \n\n Rc::new(Closure(Some(name), Box::new(List(ps.to_vec())), expr.clone(), env.clone()))\n\n );\n", "file_path": "src/abyss/eval/strict.rs", "rank": 46, "score": 74317.47077930241 }, { "content": "#[inline]\n\nfn eval_list(xs: &[Object], env: &mut Env) -> Result<Object> {\n\n let mut v = vec![];\n\n for x in xs {\n\n let x = wrap(None, x.clone(), env.clone())?;\n\n v.push(x);\n\n }\n\n Ok(Object::List(v))\n\n}\n\n\n\n/*\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 47, "score": 73414.58198492462 }, { "content": "fn eval_head(xs: &Object, env: &mut Env) -> Result<Object> {\n\n use Object::*;\n\n match force(xs, env)? {\n\n List(xs) => match &xs[..] {\n\n [x, ..] => Ok(x.clone()),\n\n _ => Err(EvalError { msg: format!(\"Eval error: Can not get head of an empty list.\") })\n\n },\n\n others => Err(EvalError { msg: format!(\"Eval error: Can not get head from {:?}\", others) })\n\n }\n\n}\n\n\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 48, "score": 73414.58198492462 }, { "content": "fn eval_head(xs: &Object, env: &mut Env) -> Result<Object> {\n\n match evaluate(xs, env)? {\n\n Object::List(xs) => match &xs[..] {\n\n [x, ..] => Ok(x.clone()),\n\n _ => Err(EvalError { msg: format!(\"Eval error: Can not get head of an empty list.\") })\n\n },\n\n others => Err(EvalError { msg: format!(\"Eval error: Can not get head from {:?}\", others) })\n\n }\n\n}\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 49, "score": 73414.58198492462 }, { "content": "fn eval_tail(xs: &Object, env: &mut Env) -> Result<Object> {\n\n //println!(\"{:?}\", xs);\n\n use Object::*;\n\n match force(xs, env)? {\n\n List(xs) => match &xs[..] {\n\n [_, xs @ ..] => Ok(Object::List(xs.to_vec())),\n\n _ => Err(EvalError { msg: format!(\"Eval error: Can not get tail of an empty list.\") })\n\n },\n\n others => Err(EvalError { msg: format!(\"Eval error: Can not get tail from {:?}\", others) })\n\n }\n\n}\n\n*/\n\n\n\n/// Thunk evaluation\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 50, "score": 73414.58198492462 }, { "content": "fn eval_tail(xs: &Object, env: &mut Env) -> Result<Object> {\n\n //println!(\"{:?}\", xs);\n\n match evaluate(xs, env)? {\n\n Object::List(xs) => match &xs[..] {\n\n [_, xs @ ..] => Ok(Object::List(xs.to_vec())),\n\n _ => Err(EvalError { msg: format!(\"Eval error: Can not get tail of an empty list.\") })\n\n },\n\n others => Err(EvalError { msg: format!(\"Eval error: Can not get tail from {:?}\", others) })\n\n }\n\n}\n\n*/\n\n\n\n/// Thunk evaluation\n", "file_path": "src/abyss/eval/strict.rs", "rank": 51, "score": 73414.58198492462 }, { "content": "#[inline]\n\npub fn weak(ps: Vec<Object>, expr: Object) -> Object {\n\n use Object::*;\n\n List(vec![Var(\"lambda\".into()), List(ps), expr])\n\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n/// Module for all atom evaluations\n\n/// Including:\n\n/// + - * / == /= < > <= >=\n\npub mod atom {\n\n\n\n use super::*;\n\n use std::collections::HashMap;\n", "file_path": "src/abyss/eval/core.rs", "rank": 52, "score": 72410.45517188463 }, { "content": "pub fn eval_decl(decl: &Object, env: &mut Env) -> Result<()> {\n\n use Object::*;\n\n let mut log = Backtrace::new();\n\n match decl {\n\n List(xs) => match &xs[..] {\n\n [Var(op), def, expr] if op == \"define\" => bind(def.clone(), expr.clone(), env, &mut log),\n\n [Var(op), _, _] if op == \"data\" => todo!(),\n\n _ => Err(EvalError::msg(format!(\"Evaluation error: wrong format of declarations: {:?}\", decl)))\n\n }\n\n _ => Err(EvalError::msg(format!(\"Evaluation error: wrong format of declarations: {:?}\", decl)))\n\n }\n\n}", "file_path": "src/abyss/eval/decl.rs", "rank": 53, "score": 71089.7249106121 }, { "content": "/// Handle list constructions\n\nfn eval_list(xs: &[Object], env: &mut Env) -> Result<Object, EvalError> {\n\n let mut v = vec![];\n\n for x in xs {\n\n let x = evaluate(x, env)?;\n\n v.push(x);\n\n }\n\n Ok(Object::List(v))\n\n}\n\n\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 54, "score": 69647.1263968522 }, { "content": "fn eval_tail(xs: &Object, env: &mut Env) -> Result<Object, EvalError> {\n\n //println!(\"{:?}\", xs);\n\n match evaluate(xs, env)? {\n\n Object::List(xs) => match &xs[..] {\n\n [_, xs @ ..] => Ok(Object::List(xs.to_vec())),\n\n _ => Err(EvalError { msg: format!(\"Eval error: Can not get tail of an empty list.\") })\n\n },\n\n others => Err(EvalError { msg: format!(\"Eval error: Can not get tail from {:?}\", others) })\n\n }\n\n}\n\n\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 55, "score": 69647.1263968522 }, { "content": "fn eval_head(xs: &Object, env: &mut Env) -> Result<Object, EvalError> {\n\n match evaluate(xs, env)? {\n\n Object::List(xs) => match &xs[..] {\n\n [x, ..] => Ok(x.clone()),\n\n _ => Err(EvalError { msg: format!(\"Eval error: Can not get head of an empty list.\") })\n\n },\n\n others => Err(EvalError { msg: format!(\"Eval error: Can not get head from {:?}\", others) })\n\n }\n\n}\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 56, "score": 69647.1263968522 }, { "content": "/// Handle basic function application `(f x)`\n\n/// \n\n/// `f` and `x` should have been evaluated (lazy?)!\n\nfn apply(f: Object, x: Object, backtrace: &mut Backtrace) -> Result<Object> {\n\n use Object::*;\n\n //let g = f.clone();\n\n match &f {\n\n Closure(name, params, expr, env) => {\n\n // Unification should be invoked here, but we only allow single variable here to debug...\n\n let mut env = env.clone();\n\n \n\n match params.borrow() {\n\n List(ps) => match &ps[..] {\n\n [ ] => if x == Nil {\n\n if let Some(name) = name {\n\n //backtrace.push(format!(\"Apply function: {}\", name));\n\n env.insert(name.clone(), Rc::new(f.clone()));\n\n }\n\n evaluate(Object::clone(expr.borrow()), &mut env, backtrace)\n\n } else {\n\n Err(EvalError::new(format!(\"Applying error: unexpected parameter: {}\", x), backtrace.clone()))\n\n },\n\n [pat] => {\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 57, "score": 69343.985367792 }, { "content": "/// Handle basic function application `(f x)`\n\n/// \n\n/// `f` and `x` should have been evaluated (lazy?)!\n\nfn apply(f: Object, x: Object, backtrace: &mut Backtrace) -> Result<Object> {\n\n use Object::*;\n\n \n\n //let g = f.clone();\n\n match &f {\n\n Closure(name, params, expr, env) => {\n\n // Unification should be invoked here, but we only allow single variable here to debug...\n\n let mut env = env.clone();\n\n \n\n match params.borrow() {\n\n List(ps) => match &ps[..] {\n\n [ ] => if x == Nil {\n\n if let Some(name) = name {\n\n //backtrace.push(format!(\"Apply function: {}\", name));\n\n env.insert(name.clone(), Rc::new(f.clone()));\n\n }\n\n evaluate(Object::clone(expr.borrow()), &mut env, backtrace)\n\n } else {\n\n Err(EvalError::new(format!(\"Applying error: unexpected parameter: {}\", x), backtrace.clone()))\n\n },\n", "file_path": "src/abyss/eval/strict.rs", "rank": 58, "score": 69343.985367792 }, { "content": "#[inline]\n\npub fn eval_thunk(thunk: Object, backtrace: &mut Backtrace) -> Result<Object> {\n\n //println!(\"\\nthunk: {:?}\", thunk);\n\n use Object::*;\n\n match thunk {\n\n Thunk(name, thunk, mut env) => {\n\n if let Some(name) = name {\n\n let thunk = Thunk(Some(name.clone()), thunk.clone(), env.clone());\n\n env.insert(name.clone(), Rc::new(thunk));\n\n }\n\n if *thunk.evaluated.borrow() == true {\n\n return Ok(thunk.value());\n\n }\n\n let mut res = evaluate(thunk.value(), &mut env, backtrace)?;\n\n\n\n // Flatten thunk, never generate recursive wrapped thunk!\n\n if let Thunk(_, _, _) = res {\n\n res = eval_thunk(res, backtrace)?;\n\n }\n\n let result = res.clone();\n\n //let p: &Thunker = expr.borrow();\n\n *thunk.as_ref().expr.borrow_mut() = res;\n\n Ok(result)\n\n },\n\n _ => Err(EvalError::new(format!(\"Error while evaluating thunk: {:?}\", thunk), backtrace.clone()))\n\n }\n\n}\n\n\n\n/// Force strict evaluation\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 59, "score": 68599.68476268365 }, { "content": "#[inline]\n\npub fn eval_thunk(thunk: Object, backtrace: &mut Backtrace) -> Result<Object> {\n\n //println!(\"\\nthunk: {:?}\", thunk);\n\n use Object::*;\n\n match thunk {\n\n Thunk(name, thunk, mut env) => {\n\n if let Some(name) = name {\n\n let thunk = Thunk(Some(name.clone()), thunk.clone(), env.clone());\n\n env.insert(name.clone(), Rc::new(thunk));\n\n }\n\n if *thunk.evaluated.borrow() == true {\n\n return Ok(thunk.value());\n\n }\n\n let mut res = evaluate(thunk.value(), &mut env, backtrace)?;\n\n\n\n // Flatten thunk, never generate recursive wrapped thunk!\n\n if let Thunk(_, _, _) = res {\n\n res = eval_thunk(res, backtrace)?;\n\n }\n\n let result = res.clone();\n\n //let p: &Thunker = expr.borrow();\n\n *thunk.as_ref().expr.borrow_mut() = res;\n\n Ok(result)\n\n },\n\n _ => Err(EvalError::new(format!(\"Error while evaluating thunk: {:?}\", thunk), backtrace.clone()))\n\n }\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 60, "score": 68599.68476268365 }, { "content": "/// The main evaluate function to calculate all abyss expressions\n\npub fn evaluate(expr: &Object, env: &mut Env) -> Result<Object, EvalError> {\n\n use Object::*;\n\n //println!(\"\\neval: {} in {:?}\", expr, env);\n\n let expr = expr.clone();\n\n let ans = match expr {\n\n Nil => Ok(Nil),\n\n Var(s) if is_atom(&s) => Ok(Var(s)),\n\n Var(s) => env.get(&s).map(|x| (**x).clone()).ok_or(EvalError { msg: format!(\"No such variable: {}\", s) }),\n\n Symbol(_) => Ok(expr),\n\n Integer(_) => Ok(expr),\n\n Real(_) => Ok(expr),\n\n Str(_) => Ok(expr),\n\n Thunk(_, _, _) => eval_thunk(&expr),\n\n List(ref xs) => match &xs[..] {\n\n // Empty list\n\n [] => Ok(List(xs.clone())),\n\n\n\n // Lambda abstraction\n\n [Var(op), ps, expr] if &op[..] == \"lambda\" => {\n\n Ok(Object::closure(ps.clone(), expr.clone(), env.clone()))\n", "file_path": "src/abyss/eval/strict.rs", "rank": 61, "score": 66282.87271329474 }, { "content": "/// Handle list constructions\n\nfn eval_list(xs: &[Object], env: &mut Env, backtrace: &mut Backtrace) -> Result<Object> {\n\n let mut v = vec![];\n\n for x in xs {\n\n let x = evaluate(x.clone(), env, backtrace)?;\n\n v.push(x);\n\n }\n\n Ok(Object::List(v))\n\n}\n\n\n\n/*\n", "file_path": "src/abyss/eval/strict.rs", "rank": 62, "score": 66194.422638108 }, { "content": "#[inline]\n\npub fn wrap(name: Option<String>, expr: Object, env: Env) -> Result<Object> {\n\n use Object::*;\n\n match expr {\n\n Nil => Ok(Nil),\n\n Var(ref s) => {\n\n if env.contains_key(s) {\n\n Ok(Object::thunk_of(name, expr.clone(), env.clone()))\n\n } else {\n\n Err(EvalError::msg(format!(\"No such variable: {}\", s)))\n\n }\n\n //env.get(s).map(|x| (**x).clone()).ok_or(EvalError { msg: format!(\"No such variable: {}\", s) })\n\n },\n\n Symbol(_) => Ok(expr),\n\n Cons(_) => Ok(expr),\n\n Integer(_) => Ok(expr),\n\n Real(_) => Ok(expr),\n\n Str(_) => Ok(expr),\n\n Thunk(_, _, _) => Ok(expr),\n\n _ => {\n\n //println!(\"wrap {:?}\", expr);\n\n Ok(Object::thunk_of(name, expr, env))\n\n }\n\n }\n\n}\n\n\n\n/// Weak a term, i.e. term => lambda ps. term\n", "file_path": "src/abyss/eval/core.rs", "rank": 63, "score": 65224.938954578494 }, { "content": "pub trait CustomObj: fmt::Display + fmt::Debug + CustomObjClone {\n\n fn eval(&self, env: &mut Env) -> Result<Object, EvalError>;\n\n fn hash_dyn(&self) -> i64;\n\n}\n\n\n\n\n\n\n", "file_path": "src/abyss/object.rs", "rank": 64, "score": 63732.65708049519 }, { "content": "#[inline]\n\npub fn force(obj: Object, env: &mut Env, backtrace: &mut Backtrace) -> Result<Object> {\n\n //println!(\"\\nforce: {} in {:?}\", thunk, env);\n\n use Object::*;\n\n let e = obj.clone();\n\n let v = evaluate(obj, env, backtrace)?;\n\n match v {\n\n Thunk(_, _, _) => {\n\n let v = eval_thunk(v, backtrace)?;\n\n match e {\n\n Var(s) => {\n\n env.insert(s, Rc::new(v.clone()));\n\n },\n\n _ => {}\n\n }\n\n Ok(v)\n\n },\n\n others => Ok(others.clone())\n\n }\n\n}\n\n\n\n#[inline]\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 65, "score": 63142.70342143148 }, { "content": "/// The main evaluate function to calculate all abyss expressions\n\npub fn evaluate(expr: Object, env: &mut Env, backtrace: &mut Backtrace) -> Result<Object> {\n\n use Object::*;\n\n macro_rules! get {\n\n ($sx: expr, $msg: expr) => {\n\n match $sx {\n\n Some(x) => Ok(x),\n\n None => Err(EvalError::new(format!(\"Getting {}\", $msg), backtrace.clone()))\n\n }\n\n };\n\n } \n\n //println!(\"\\neval: {} in {:?}\", expr, env);\n\n let ans = match expr {\n\n Nil => Ok(Nil),\n\n Var(ref s) if atom::is_atom(s) => Ok(expr.clone()),\n\n Var(ref s) => env.get(s).map(|x| (**x).clone()).ok_or(EvalError::new(format!(\"No such variable: {}\", s), backtrace.clone())),\n\n Symbol(_) => Ok(expr),\n\n Integer(_) => Ok(expr),\n\n Real(_) => Ok(expr),\n\n Str(_) => Ok(expr),\n\n Closure(_, _, _, _) => Ok(expr),\n", "file_path": "src/abyss/eval/strict.rs", "rank": 66, "score": 63142.70342143148 }, { "content": "/// The main evaluate function to calculate all abyss expressions\n\npub fn evaluate(expr: Object, env: &mut Env, backtrace: &mut Backtrace) -> Result<Object> {\n\n use Object::*;\n\n macro_rules! get {\n\n ($sx: expr, $msg: expr) => {\n\n match $sx {\n\n Some(x) => Ok(x),\n\n None => Err(EvalError::new(format!(\"Getting {}\", $msg), backtrace.clone()))\n\n }\n\n };\n\n }\n\n //println!(\"\\neval: {:?} in {:?}\", expr, 0);\n\n let ans = match expr {\n\n Nil => Ok(Nil),\n\n Var(ref s) if atom::is_atom(s) => Ok(expr),\n\n Var(ref s) => env.get(s).map(|x| (**x).clone()).ok_or(EvalError::new(format!(\"No such variable: {}\", s), backtrace.clone())),\n\n Symbol(_) => Ok(expr),\n\n Integer(_) => Ok(expr),\n\n Real(_) => Ok(expr),\n\n Str(_) => Ok(expr),\n\n Closure(_, _, _, _) => Ok(expr),\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 67, "score": 63142.70342143148 }, { "content": "pub fn bind(left: Object, right: Object, env: &mut Env, backtrace: &mut Backtrace) -> Result<()> {\n\n interpreter::bind(left, right, env, backtrace)\n\n}\n\n\n", "file_path": "src/abyss/eval/mod.rs", "rank": 68, "score": 61191.72775860413 }, { "content": "pub fn bind(left: Object, right: Object, env: &mut Env, backtrace: &mut Backtrace) -> Result<()> {\n\n use Object::*;\n\n match (left, right) {\n\n (Var(s), expr) => {\n\n let v = evaluate(expr, env, backtrace)?;\n\n //let v = wrap(expr.clone(), env.clone());\n\n let v = match v {\n\n Closure(None, ps, expr, env) => {\n\n Closure(Some(s.clone()), ps, expr, env)\n\n },\n\n others => others,\n\n };\n\n env.insert(s.clone(), Rc::new(v.clone()));\n\n }\n\n (List(xs), expr) => match &xs[..] {\n\n [Var(f), ps @ ..] => {\n\n let lambda = weak(ps.to_vec(), expr.clone());\n\n bind(Var(f.clone()), lambda, env, backtrace)?\n\n },\n\n _ => return Err(EvalError::new(format!(\"Binding error: Invalid Binding {:?} and {:?}\", List(xs), expr), backtrace.clone()))\n\n }\n\n (x, y) => return Err(EvalError::new(format!(\"Binding error: Binding {:?} and {:?}\", x, y), backtrace.clone()))\n\n }\n\n Ok(())\n\n}\n\n\n\n\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 69, "score": 61191.72775860413 }, { "content": "pub fn bind(left: Object, right: Object, env: &mut Env, backtrace: &mut Backtrace) -> Result<()> {\n\n use Object::*;\n\n match (left, right) {\n\n (Var(s), expr) => {\n\n let v = match expr {\n\n List(xs) => match &xs[..] {\n\n [Var(op), ..] if &op[..] == \"lambda\" => evaluate(List(xs), env, backtrace)?,\n\n others => List(others.to_vec())\n\n }\n\n others => others.clone()\n\n };\n\n //let v = wrap(expr.clone(), env.clone());\n\n let v = match v {\n\n Closure(None, ps, expr, env) => {\n\n Closure(Some(s.clone()), ps, expr, env)\n\n },\n\n others => {\n\n let thunk = wrap(Some(s.clone()), others.clone(), env.clone())?;\n\n //env.insert(s.clone(), Rc::new(thunk.clone()));\n\n thunk\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 70, "score": 61191.72775860413 }, { "content": "pub trait Unifiable {\n\n type Key;\n\n fn unify(&self, other: &Self) -> Result<HashMap<Self::Key, Self>, UnifyError>\n\n where Self: Sized;\n\n}\n", "file_path": "src/logic/core.rs", "rank": 71, "score": 54902.650285982236 }, { "content": "pub trait Parser {\n\n\n\n type Output;\n\n fn parse<'a>(&self, state: &mut ParseState<'a>) -> Result<Self::Output, ParseError>;\n\n\n\n /* Utilities */\n\n fn and<P: Parser>(self, other: P) -> And<Self, P> where\n\n Self: Sized {\n\n And { a: self, b: other }\n\n }\n\n fn or<P>(self, other: P) -> Or<Self, P> where\n\n Self: Sized, P: Parser<Output=Self::Output> {\n\n Or { a: self, b: other }\n\n }\n\n fn map<B, F>(self, f: F) -> Map<Self, F> where\n\n Self: Sized, F: Fn(Self::Output) -> B {\n\n Map { parser: self, f: f }\n\n }\n\n fn and_then<B, F>(self, f: F) -> AndThen<Self, F> where\n\n Self: Sized, F: Fn(Self::Output) -> B {\n", "file_path": "src/parser/core.rs", "rank": 72, "score": 54902.650285982236 }, { "content": "pub trait Error: std::error::Error {\n\n fn backtrace(&self) -> Option<&Backtrace>;\n\n fn into<T>(self) -> T where Self: Into<T> {\n\n Into::into(self)\n\n }\n\n}\n\n\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Backtrace {\n\n trace: Vec<String>,\n\n}\n\n\n\nimpl Backtrace {\n\n #[inline]\n\n pub fn new() -> Self {\n\n Backtrace { trace: vec![] }\n\n }\n\n #[inline]\n\n pub fn trace(&self) -> &[String] {\n", "file_path": "src/utils/error.rs", "rank": 73, "score": 47766.563601074246 }, { "content": "/// Evaluate arithmetic expressions.\n\nfn eval_if(cond: &Object, x: &Object, y: &Object, env: &mut Env) -> Result<Object, EvalError> {\n\n use Object::*;\n\n\n\n let cond = evaluate(cond, env)?;\n\n match cond {\n\n Var(s) if &s[..] == \"True\" => evaluate(x, env),\n\n Var(s) if &s[..] == \"False\" => evaluate(y, env),\n\n _ => Err(EvalError { msg: format!(\"If expression error: {:?}\", cond) })\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 74, "score": 44142.86178210055 }, { "content": "fn bye() {\n\n println!(\"Bye bye.\");\n\n}\n\n\n", "file_path": "src/abyss/repl.rs", "rank": 75, "score": 42427.169756728654 }, { "content": "/// Evaluate arithmetic expressions.\n\nfn eval_if(cond: Object, x: Object, y: Object, env: &mut Env, backtrace: &mut Backtrace) -> Result<Object> {\n\n use Object::*;\n\n\n\n let cond = evaluate(cond, env, backtrace)?;\n\n match cond {\n\n Var(s) if &s[..] == \"True\" => evaluate(x, env, backtrace),\n\n Var(s) if &s[..] == \"False\" => evaluate(y, env, backtrace),\n\n _ => Err(EvalError::new(format!(\"If expression error: {:?}\", cond), backtrace.clone()))\n\n }\n\n}\n\n\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 76, "score": 41900.069147948925 }, { "content": "#[inline]\n\nfn eval_if(cond: Object, x: Object, y: Object, env: &mut Env, backtrace: &mut Backtrace) -> Result<Object> {\n\n use Object::*;\n\n\n\n let cond = force(cond, env, backtrace)?;\n\n match cond {\n\n Var(s) if &s[..] == \"True\" => evaluate(x, env, backtrace),\n\n Var(s) if &s[..] == \"False\" => evaluate(y, env, backtrace),\n\n _ => Err(EvalError::new(format!(\"If expression error: {:?}\", cond), backtrace.clone()))\n\n }\n\n}\n\n\n\n\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 77, "score": 41900.069147948925 }, { "content": "#[inline]\n\nfn apply_env(\n\n f: Object, name: &Option<String>, \n\n pat: &Object, x: &Object, \n\n env: &mut Env, backtrace: &mut Backtrace\n\n) -> Result<()> {\n\n\n\n let bind = pat.unify(&x);\n\n if let Ok(bind) = bind {\n\n let bind: HashMap<String, Rc<Object>> = bind.into_iter().map(|(k, v)| (k, Rc::new(v))).collect();\n\n if let Some(name) = name {\n\n //backtrace.push(format!(\"Apply function: {}\", name));\n\n env.insert(name.clone(), Rc::new(f));\n\n }\n\n env.extend(bind);\n\n Ok(())\n\n } else {\n\n Err(EvalError::new(format!(\"Application error: Unification error: unifying {:?} and {:?}\", pat, x), backtrace.clone()))\n\n }\n\n}\n\n\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 78, "score": 40250.54032490199 }, { "content": "#[inline]\n\nfn apply_env(\n\n f: Object, name: &Option<String>, \n\n pat: &Object, x: &Object, \n\n env: &mut Env, backtrace: &mut Backtrace\n\n) -> Result<()> {\n\n\n\n let bind = pat.unify(&x);\n\n if let Ok(bind) = bind {\n\n let bind: HashMap<String, Rc<Object>> = bind.into_iter().map(|(k, v)| (k, Rc::new(v))).collect();\n\n if let Some(name) = name {\n\n env.insert(name.clone(), Rc::new(f));\n\n }\n\n env.extend(bind);\n\n Ok(())\n\n } else {\n\n //println!(\"{:?}\", bind);\n\n Err(EvalError::new(format!(\"Application error: Unification error: unifying {:?} and {:?}\", pat, x), backtrace.clone()))\n\n }\n\n}\n\n\n\n\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 79, "score": 40250.54032490199 }, { "content": "fn main() -> std::io::Result<()> {\n\n // Spawn thread with explicit stack size \n\n let child = thread::Builder::new()\n\n .stack_size(config::STACK_SIZE)\n\n .spawn(run)\n\n .unwrap();\n\n\n\n // Wait for thread to join\n\n child.join().unwrap()\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 80, "score": 37883.379663200336 }, { "content": "fn run() -> std::io::Result<()> {\n\n repl()\n\n} \n\n\n\n\n\n\n", "file_path": "src/main.rs", "rank": 81, "score": 37883.379663200336 }, { "content": "fn conversions(expr: Object) -> Object {\n\n let ans = convert_pattern(expr);\n\n ans\n\n}\n\n\n\n\n\n\n\n\n\n/**\n\n * The main parser function\n\n */\n", "file_path": "src/abyss/parser.rs", "rank": 82, "score": 36918.59842821327 }, { "content": "fn convert_pattern(expr: Object) -> Object {\n\n use Object::*;\n\n match expr {\n\n List(xs) => match &xs[..] {\n\n [Var(op), expr, List(cases)] if &op[..] == \"case\" => {\n\n let cases = List(cases.iter().map(|case| match case {\n\n List(xs) => match &xs[..] {\n\n [pat, result] => {\n\n let pat = Pattern::from(pat.clone()).unwrap();\n\n List(vec![pat, result.clone()])\n\n },\n\n others => List(others.to_vec())\n\n },\n\n others => others.clone()\n\n }).collect());\n\n List(vec![Var(op.clone()), expr.clone(), cases])\n\n },\n\n _ => List(xs.into_iter().map(|x| convert_pattern(x)).collect())\n\n },\n\n others => others\n\n }\n\n}\n\n\n", "file_path": "src/abyss/parser.rs", "rank": 83, "score": 36024.16311098111 }, { "content": "#[inline]\n\nfn arrow(types: Vec<Object>) -> Object {\n\n use Object::*;\n\n List(vec![Cons(\"->\".into())].into_iter().chain(types.into_iter()).collect())\n\n}\n\n\n", "file_path": "src/abyss/checker.rs", "rank": 84, "score": 35395.36029974486 }, { "content": "#[inline]\n\nfn wash_type(pat: &Object) -> &Object {\n\n use Object::*;\n\n match pat {\n\n List(xs) if match &xs[..] { \n\n [Var(s), _x, _type] if &s[..] == \":\" => true,\n\n _ => false,\n\n } => {\n\n &xs[1]\n\n },\n\n _ => pat\n\n }\n\n}\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 85, "score": 35192.65054926036 }, { "content": "#[inline]\n\nfn wash_type(pat: &Object) -> &Object {\n\n use Object::*;\n\n match pat {\n\n List(xs) if match &xs[..] { \n\n [Var(s), _x, _type] if &s[..] == \":\" => true,\n\n _ => false,\n\n } => {\n\n &xs[1]\n\n },\n\n _ => pat\n\n }\n\n}\n\n\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 86, "score": 35192.65054926036 }, { "content": "fn eval_thunk(thunk: &Object) -> Result<Object, EvalError> {\n\n //println!(\"\\nthunk: {:?}\", thunk);\n\n use Object::*;\n\n match thunk.clone() {\n\n Thunk(None, expr, mut env) => {\n\n //println!(\"{:?}\", env);\n\n evaluate(&expr, &mut env)\n\n },\n\n Thunk(Some(name), expr, mut env) => {\n\n env.insert(name.clone(), Rc::new(thunk.clone()));\n\n evaluate(&expr, &mut env)\n\n },\n\n _ => Err(EvalError { msg: format!(\"Error while evaluating thunk: {:?}\", thunk) })\n\n }\n\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 87, "score": 31818.750350263035 }, { "content": "fn result<V, E>(res: Result<V, E>) -> String \n\n where\n\n V: fmt::Debug + fmt::Display,\n\n E: fmt::Debug + fmt::Display,\n\n{\n\n match res {\n\n Ok(v) => format!(\"{}\", v ),\n\n Err(err) => format!(\"{}\", err),\n\n }\n\n}\n\n\n", "file_path": "src/abyss/repl.rs", "rank": 88, "score": 31510.84177553191 }, { "content": "fn result_debug<V, E>(res: Result<V, E>) -> String \n\n where\n\n V: fmt::Debug + fmt::Display,\n\n E: fmt::Debug + fmt::Display,\n\n{\n\n match res {\n\n Ok(v) => format!(\"{:?}\", v ),\n\n Err(err) => format!(\"{:?}\", err),\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/abyss/repl.rs", "rank": 89, "score": 30832.85061719772 }, { "content": "/// Handle bindings\n\nfn bindings(bindings: &[Object], env: &mut Env) -> Result<(), EvalError> {\n\n use Object::*;\n\n for binding in bindings {\n\n match binding {\n\n List(xs) => match &xs[..] {\n\n [Var(s), expr] => {\n\n let v = evaluate(expr, env)?;\n\n //let v = wrap(expr.clone(), env.clone());\n\n let v = match v {\n\n Closure(None, ps, expr, env) => {\n\n Closure(Some(s.clone()), ps, expr, env)\n\n },\n\n others => others,\n\n };\n\n env.insert(s.clone(), Rc::new(v.clone()));\n\n }\n\n _ => todo!() //unsupported yet!\n\n },\n\n _ => return Err(EvalError { msg: format!(\"Binding error: {:?}\", binding) })\n\n }\n\n }\n\n //println!(\">> {:?}\", env);\n\n Ok(())\n\n}\n", "file_path": "src/abyss/eval/strict.rs", "rank": 90, "score": 30196.672527102513 }, { "content": "//#[allow(dead_code)]\n\nfn subst(map: &HashMap<Object, Object>, obj: &Object) -> Option<Object> {\n\n use Object::*;\n\n match obj {\n\n Var(_x) => {\n\n if let Some(o) = map.get(obj) {\n\n if let Var(_y) = o { subst(map, o) } else { Some(o.clone()) }\n\n } else {\n\n None\n\n }\n\n },\n\n _ => Some(obj.clone())\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/abyss/logic.rs", "rank": 91, "score": 29766.133298413253 }, { "content": "/// Handle bindings\n\nfn bindings(bindings: &[Object], env: &mut Env, backtrace: &mut Backtrace) -> Result<()> {\n\n use Object::*;\n\n for binding in bindings {\n\n match binding {\n\n List(xs) => match &xs[..] {\n\n [def, expr] => {\n\n bind(def.clone(), expr.clone(), env, backtrace)?;\n\n }\n\n _ => return Err(EvalError::new(format!(\"Binding error: {:?}\", binding), backtrace.clone()))\n\n },\n\n _ => return Err(EvalError::new(format!(\"Binding error: {:?}\", binding), backtrace.clone()))\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n\n\n\n\n\n\n\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 92, "score": 28772.344578021002 }, { "content": "/// Handle bindings\n\nfn bindings(bindings: &[Object], env: &mut Env, backtrace: &mut Backtrace) -> Result<()> {\n\n use Object::*;\n\n for binding in bindings {\n\n match binding {\n\n List(xs) => match &xs[..] {\n\n [def, expr] => {\n\n bind(def.clone(), expr.clone(), env, backtrace)?\n\n }\n\n _ => return Err(EvalError::new(format!(\"Binding error: {:?}\", binding), backtrace.clone()))\n\n },\n\n _ => return Err(EvalError::new(format!(\"Binding error: {:?}\", binding), backtrace.clone()))\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n\n\n//(let ((gen (lambda (s n) (case n ((0 ()) (n (:: s (gen s (- n 1))))))))) (gen 'T_T 200))\n", "file_path": "src/abyss/eval/strict.rs", "rank": 93, "score": 28772.344578021002 }, { "content": "/// Evaluate arithmetic expressions.\n\nfn eval_arith(expr: &Object, env: &mut Env) -> Result<Object, EvalError> {\n\n use Object::*;\n\n \n\n let binary_integer: HashMap<&str, &fn(i64, i64) -> i64> = atom::BINARY_ARITH_INTEGER.iter().map(|(k, v)| (*k, v)).collect();\n\n let binary_real: HashMap<&str, &fn(f64, f64) -> f64> = atom::BINARY_ARITH_REAL.iter().map(|(k, v)| (*k, v)).collect();\n\n match expr {\n\n List(xs) => match &xs[..] {\n\n [Var(op), x, y] if is_arith(&op) => {\n\n let x = evaluate(x, env)?;\n\n let y = evaluate(y, env)?;\n\n match (x, y) {\n\n (Integer(x), Integer(y)) => Ok(Integer((binary_integer[&op[..]])(x, y))),\n\n (Real(x), Real(y)) => Ok(Real((binary_real[&op[..]])(x, y))),\n\n others => Err(EvalError { msg: format!(\"Arith error: evaluating {:?}\", others) })\n\n }\n\n },\n\n _ => Err(EvalError { msg: format!(\"eval arithmetic error!\") })\n\n },\n\n _ => Err(EvalError { msg: format!(\"eval arithmetic error!\") })\n\n }\n\n}\n\n\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 94, "score": 28604.642921359045 }, { "content": "/// Very bad, never use!\n\nfn force_value(obj: Object, env: &mut Env, backtrace: &mut Backtrace) -> Result<Object> {\n\n //println!(\"force value: {:?}\", obj);\n\n use Object::*;\n\n match obj {\n\n Thunk(_, _, _) => force_value(eval_thunk(obj, backtrace)?, env, backtrace),\n\n List(xs) => match &xs[..] {\n\n [Cons(cons), xs @ ..] => {\n\n let mut ys = vec![Cons(cons.clone())];\n\n for x in xs {\n\n let x = force_value(x.clone(), env, backtrace)?;\n\n ys.push(x);\n\n }\n\n Ok(List(ys))\n\n }\n\n others => Ok(List(others.to_vec()))\n\n }\n\n others => Ok(others.clone())\n\n }\n\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", "file_path": "src/abyss/eval/lazy.rs", "rank": 95, "score": 27315.992662615972 }, { "content": "fn check(expr: Object, env: &mut Env, tnv: &mut Env) -> Result<Object, CheckerError> {\n\n use Object::*;\n\n //println!(\"eval: {}\", expr);\n\n #[inline] fn tag(s: &str) -> Object {\n\n Var(s.to_string())\n\n }\n\n match expr {\n\n Nil => Ok(Nil),\n\n Var(ref s) => tnv.get(s).map(|x| (**x).clone()).ok_or(CheckerError { msg: format!(\"No such variable: {}\", s) }),\n\n Symbol(_) => Ok(tag(\"Symbol\")),\n\n Integer(_) => Ok(tag(\"Int\")),\n\n Real(_) => Ok(tag(\"Real\")),\n\n Str(_) => Ok(tag(\"String\")),\n\n Thunk(_, expr, env) => {\n\n let mut env = env.clone();\n\n check(expr.value(), &mut env, tnv)\n\n },\n\n List(xs) => match &xs[..] {\n\n\n\n // Empty list\n", "file_path": "src/abyss/checker.rs", "rank": 96, "score": 26974.988170706078 }, { "content": "/// Evaluate case(match) expressions\n\nfn eval_cases(expr: &Object, cases: &[Object], env: &mut Env) -> Result<Object, EvalError> {\n\n //println!(\"\\ncases\");\n\n use Object::*;\n\n let expr = evaluate(expr, env)?;\n\n for case in cases {\n\n match case {\n\n List(xs) => match &xs[..] {\n\n [pat, res] => {\n\n let bind = pat.unify(&expr);\n\n if let Ok(mut bind) = bind {\n\n let bind: HashMap<String, Rc<Object>> = bind.iter_mut().map(|(k, v)| (k.clone(), Rc::new(v.clone()))).collect();\n\n env.extend(bind);\n\n return evaluate(res, env)\n\n } else {\n\n continue;\n\n }\n\n },\n\n _ => return Err(EvalError { \n\n msg: format!(\"Case bindings should have format of `[pat result]`, instead of {:?}\", case) \n\n }),\n\n },\n\n _ => return Err(EvalError { \n\n msg: format!(\"Case bindings should have format of `[pat result]`, instead of {:?}\", case) \n\n }),\n\n }\n\n }\n\n Err(EvalError { msg: format!(\"Case expression error\") })\n\n}\n\n\n\n\n", "file_path": "src/abyss/eval/strict.rs", "rank": 97, "score": 26810.139283206685 }, { "content": "fn unify_objects(this: &Object, other: &Object, unv: &mut Env, backtrace: &mut Backtrace) -> Result<(), UnifyError> {\n\n use Object::*;\n\n //println!(\"Unifying: {:?} and {:?}\", this, 0);\n\n match (this, other) {\n\n (Nil, Nil) => Ok(()),\n\n (Integer(x), Integer(y)) if x == y => Ok(()),\n\n (Real (x), Real (y)) if x == y => Ok(()),\n\n (Str (x), Str (y)) if x == y => Ok(()),\n\n (Symbol (x), Symbol (y)) if x == y => Ok(()),\n\n (Cons (x), Cons (y)) if x == y => Ok(()), \n\n (Var(_), _) => {\n\n unify_var(&this, &other, unv, backtrace)\n\n },\n\n (_, Var(_)) => {\n\n unify_var(&other, &this, unv, backtrace)\n\n },\n\n (_, thunk @ Thunk(_, _, _)) => {\n\n //println!(\"before: {:?}\", thunk);\n\n let v = lazy::eval_thunk(thunk.clone(), backtrace);\n\n //println!(\"after: \");\n", "file_path": "src/abyss/logic.rs", "rank": 98, "score": 26661.824811033435 }, { "content": "fn unify_var(var: &Object, v: &Object, unv: &mut Env, backtrace: &mut Backtrace) -> Result<(), UnifyError> {\n\n use Object::*;\n\n //println!(\"Unifying var: {:?} and {}\", var, v);\n\n match (var, v) {\n\n (Var(_), _) if unv.contains_key(var) => {\n\n let next = unv[var].clone();\n\n unify_objects(&next, v, unv, backtrace)\n\n },\n\n (Var(_), _) if unv.contains_key(v) => {\n\n let next = unv[v].clone();\n\n unify_objects(var, &next, unv, backtrace)\n\n },\n\n (Var(_), _) if check_occurs(var, v, unv) => {\n\n Err(UnifyError { msg: format!(\"Unification error: check occurence error!\") })\n\n },\n\n (Var(_), _) => {\n\n unv.insert(var.clone(), v.clone());\n\n Ok(())\n\n }\n\n _ => {\n\n Err(UnifyError { msg: format!(\"Unification error: Unifying varibles\") })\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/abyss/logic.rs", "rank": 99, "score": 26159.43758102974 } ]
Rust
src/entities.rs
JoshMcguigan/amethyst-2d-platformer-demo
fa4c53621e4af22e6e7f0657e83bfa13f50bf341
use amethyst::{ assets::{AssetStorage, Loader}, core::{Transform}, ecs::{Entity}, prelude::*, renderer::{ Camera, PngFormat, Projection, Sprite, SpriteRender, SpriteSheet, SpriteSheetHandle, Texture, TextureMetadata, SpriteSheetFormat, Transparent }, }; use crate::{ DISPLAY_WIDTH, PLAYER_W, PLAYER_H, CRATE_SIZE, GROUND_Y, TOTAL_PLAYER_SPRITE_HEIGHT, components::{Player, TwoDimObject} }; pub struct InitialState; impl SimpleState for InitialState { fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) { let world = data.world; let background_sprite_sheet_handle = load_sprite_sheet(world, "./texture/BG.png", "./texture/BG.ron"); init_background_sprite(world, &background_sprite_sheet_handle); let ground_sprite_sheet_handle = load_sprite_sheet(world, "./texture/ground.png", "./texture/ground.ron"); init_ground_sprite(world, &ground_sprite_sheet_handle); let crate_sprite_sheet_handle = load_sprite_sheet(world, "./texture/Crate.png", "./texture/Crate.ron"); init_crate_sprite(world, &crate_sprite_sheet_handle, 0., GROUND_Y); init_crate_sprite(world, &crate_sprite_sheet_handle, CRATE_SIZE, GROUND_Y); init_crate_sprite(world, &crate_sprite_sheet_handle, 0., GROUND_Y + CRATE_SIZE); let floating_crate_height = (PLAYER_H + 10) as f32 + GROUND_Y; init_crate_sprite(world, &crate_sprite_sheet_handle, DISPLAY_WIDTH - CRATE_SIZE, floating_crate_height); init_crate_sprite(world, &crate_sprite_sheet_handle, DISPLAY_WIDTH - 2. * CRATE_SIZE, floating_crate_height); init_crate_sprite(world, &crate_sprite_sheet_handle, DISPLAY_WIDTH - 3. * CRATE_SIZE, floating_crate_height); world.register::<Player>(); let sprite_sheet_handle = load_player_sprite_sheet(world); init_player(world, &sprite_sheet_handle); init_camera(world); } } fn init_camera(world: &mut World) { let mut transform = Transform::default(); transform.set_xyz(0.0, 0.0, 1.0); world .create_entity() .with(Camera::from(Projection::orthographic( 0.0, DISPLAY_WIDTH, 0.0, 1000., ))) .with(transform) .build(); } fn init_player(world: &mut World, sprite_sheet_handle: &SpriteSheetHandle) -> Entity { let scale = 1.; let mut transform = Transform::default(); transform.set_scale(scale, scale, scale); let sprite_render = SpriteRender { sprite_sheet: sprite_sheet_handle.clone(), sprite_number: 60, }; let mut two_dim_object = TwoDimObject::new(PLAYER_W as f32, PLAYER_H as f32); two_dim_object.set_position(500., 500.); two_dim_object.update_transform_position(&mut transform); world .create_entity() .with(transform) .with(Player::new(two_dim_object)) .with(sprite_render) .with(Transparent) .build() } fn init_background_sprite(world: &mut World, sprite_sheet: &SpriteSheetHandle) -> Entity { let mut transform = Transform::default(); transform.set_xyz(500., 500., -10.); transform.set_scale(1., 1.5, 1.); let sprite = SpriteRender { sprite_sheet: sprite_sheet.clone(), sprite_number: 0, }; world.create_entity() .with(transform) .with(sprite) .with(Transparent) .build() } fn init_ground_sprite(world: &mut World, sprite_sheet: &SpriteSheetHandle) -> Entity { let mut transform = Transform::default(); transform.set_z(-9.); transform.set_scale(10., 1., 1.); let sprite = SpriteRender { sprite_sheet: sprite_sheet.clone(), sprite_number: 0, }; let mut two_dim_object = TwoDimObject::new(1280., 128.); two_dim_object.set_left(0.); two_dim_object.set_top(GROUND_Y); two_dim_object.update_transform_position(&mut transform); world.create_entity() .with(transform) .with(two_dim_object) .with(sprite) .with(Transparent) .build() } fn init_crate_sprite(world: &mut World, sprite_sheet: &SpriteSheetHandle, left: f32, bottom: f32) -> Entity { let mut transform = Transform::default(); transform.set_z(-9.); let sprite = SpriteRender { sprite_sheet: sprite_sheet.clone(), sprite_number: 0, }; let mut two_dim_object = TwoDimObject::new(CRATE_SIZE, CRATE_SIZE); two_dim_object.set_left(left); two_dim_object.set_bottom(bottom); two_dim_object.update_transform_position(&mut transform); world.create_entity() .with(transform) .with(two_dim_object) .with(sprite) .with(Transparent) .build() } fn load_sprite_sheet(world: &mut World, png_path: &str, ron_path: &str) -> SpriteSheetHandle { let texture_handle = { let loader = world.read_resource::<Loader>(); let texture_storage = world.read_resource::<AssetStorage<Texture>>(); loader.load( png_path, PngFormat, TextureMetadata::srgb_scale(), (), &texture_storage, ) }; let loader = world.read_resource::<Loader>(); let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>(); loader.load( ron_path, SpriteSheetFormat, texture_handle, (), &sprite_sheet_store, ) } fn load_player_sprite_sheet(world: &mut World) -> SpriteSheetHandle { let texture_handle = { let loader = world.read_resource::<Loader>(); let texture_storage = world.read_resource::<AssetStorage<Texture>>(); loader.load( "./texture/spritesheet.png", PngFormat, TextureMetadata::srgb_scale(), (), &texture_storage, ) }; let loader = world.read_resource::<Loader>(); let sprite_count = 75; let mut sprites = Vec::with_capacity(sprite_count); let image_w = 200; let image_h = 13980; for i in 0..(sprite_count as u32) { let offset_x = 0; let offset_y = TOTAL_PLAYER_SPRITE_HEIGHT * i; let offsets = [0.; 2]; let sprite = Sprite::from_pixel_values( image_w, image_h, PLAYER_W, PLAYER_H, offset_x, offset_y, offsets, ); sprites.push(sprite); } let sprite_sheet = SpriteSheet { texture: texture_handle, sprites, }; loader.load_from_data( sprite_sheet, (), &world.read_resource::<AssetStorage<SpriteSheet>>(), ) }
use amethyst::{ assets::{AssetStorage, Loader}, core::{Transform}, ecs::{Entity}, prelude::*, renderer::{ Camera, PngFormat, Projection, Sprite, SpriteRender, SpriteSheet, SpriteSheetHandle, Texture, TextureMetadata, SpriteSheetFormat, Transparent }, }; use crate::{ DISPLAY_WIDTH, PLAYER_W, PLAYER_H, CRATE_SIZE, GROUND_Y, TOTAL_PLAYER_SPRITE_HEIGHT, components::{Player, TwoDimObject} }; pub struct InitialState; impl SimpleState for InitialState { fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) { let world = data.world; let background_sprite_sheet_handle = load_sprite_sheet(world, "./texture/BG.png", "./texture/BG.ron"); init_background_sprite(world, &background_sprite_sheet_handle); let ground_sprite_sheet_handle = load_sprite_sheet(world, "./texture/ground.png", "./texture/ground.ron"); init_ground_sprite(world, &ground_sprite_sheet_handle); let crate_sprite_sheet_handle = load_sprite_sheet(world, "./texture/Crate.png", "./texture/Crate.ron"); init_crate_sprite(world, &crate_sprite_sheet_handle, 0., GROUND_Y); init_crate_sprite(world, &crate_sprite_sheet_handle, CRATE_SIZE, GROUND_Y); init_crate_sprite(world, &crate_sprite_sheet_handle, 0., GROUND_Y + CRATE_SIZE); let floating_crate_height = (PLAYER_H + 10) as f32 + GROUND_Y; init_crate_sprite(world, &crate_sprite_sheet_handle, DISPLAY_WIDTH - CRATE_SIZE, floating_crate_height); init_crate_sprite(world, &crate_sprite_sheet_handle, DISPLAY_WIDTH - 2. * CRATE_SIZE, floating_crate_height); init_crate_sprite(world, &crate_sprite_sheet_handle, DISPLAY_WIDTH - 3. * CRATE_SIZE, floating_crate_height); world.register::<Player>(); let sprite_sheet_handle = load_player_sprite_sheet(world); init_player(world, &sprite_sheet_handle); init_camera(world); } } fn init_camera(world: &mut World) { let mut transform = Transform::default(); transform.set_xyz(0.0, 0.0, 1.0); world .create_entity() .with(Camera::from(Projection::orthographic( 0.0, DISPLAY_WIDTH, 0.0, 1000., ))) .with(transform) .build(); } fn init_player(world: &mut World, sprite_sheet_handle: &SpriteSheetHandle) -> Entity { let scale = 1.; let mut transform = Transform::default(); transform.set_scale(scale, scale, scale); let sprite_render = SpriteRender { sprite_sheet: sprite_sheet_handle.clone(), sprite_number: 60, }; let mut two_dim_object = TwoDimObject::new(PLAYER_W as f32, PLAYER_H as f32); two_dim_object.set_position(500., 500.); two_dim_object.update_transform_position(&mut transform); world .create_entity() .with(transform) .with(Player::new(two_dim_object)) .with(sprite_render) .with(Transparent) .build() } fn init_background_sprite(world: &mut World, sprite_sheet: &SpriteSheetHandle) -> Entity { let mut transform = Transform::default(); transform.set_xyz(500., 500., -10.); transform.set_scale(1., 1.5, 1.); let sprite = SpriteRender { sprite_sheet: sprite_sheet.clone(), sprite_number: 0, }; world.create_entity() .with(transform) .with(sprite) .with(Transparent) .build() } fn init_ground_sprite(world: &mut World, sprite_sheet: &SpriteSheetHandle) -> Entity { let mut transform = Transform::default(); transform.set_z(-9.); transform.set_scale(10., 1., 1.); let sprite = SpriteRender { sprite_sheet: sprite_sheet.clone(), sprite_number: 0, }; let mut two_dim_object = TwoDimObject::new(1280., 128.); two_dim_object.set_left(0.); two_dim_object.set_top(GROUND_Y); two_dim_object.update_transform_position(&mut transform);
fn init_crate_sprite(world: &mut World, sprite_sheet: &SpriteSheetHandle, left: f32, bottom: f32) -> Entity { let mut transform = Transform::default(); transform.set_z(-9.); let sprite = SpriteRender { sprite_sheet: sprite_sheet.clone(), sprite_number: 0, }; let mut two_dim_object = TwoDimObject::new(CRATE_SIZE, CRATE_SIZE); two_dim_object.set_left(left); two_dim_object.set_bottom(bottom); two_dim_object.update_transform_position(&mut transform); world.create_entity() .with(transform) .with(two_dim_object) .with(sprite) .with(Transparent) .build() } fn load_sprite_sheet(world: &mut World, png_path: &str, ron_path: &str) -> SpriteSheetHandle { let texture_handle = { let loader = world.read_resource::<Loader>(); let texture_storage = world.read_resource::<AssetStorage<Texture>>(); loader.load( png_path, PngFormat, TextureMetadata::srgb_scale(), (), &texture_storage, ) }; let loader = world.read_resource::<Loader>(); let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>(); loader.load( ron_path, SpriteSheetFormat, texture_handle, (), &sprite_sheet_store, ) } fn load_player_sprite_sheet(world: &mut World) -> SpriteSheetHandle { let texture_handle = { let loader = world.read_resource::<Loader>(); let texture_storage = world.read_resource::<AssetStorage<Texture>>(); loader.load( "./texture/spritesheet.png", PngFormat, TextureMetadata::srgb_scale(), (), &texture_storage, ) }; let loader = world.read_resource::<Loader>(); let sprite_count = 75; let mut sprites = Vec::with_capacity(sprite_count); let image_w = 200; let image_h = 13980; for i in 0..(sprite_count as u32) { let offset_x = 0; let offset_y = TOTAL_PLAYER_SPRITE_HEIGHT * i; let offsets = [0.; 2]; let sprite = Sprite::from_pixel_values( image_w, image_h, PLAYER_W, PLAYER_H, offset_x, offset_y, offsets, ); sprites.push(sprite); } let sprite_sheet = SpriteSheet { texture: texture_handle, sprites, }; loader.load_from_data( sprite_sheet, (), &world.read_resource::<AssetStorage<SpriteSheet>>(), ) }
world.create_entity() .with(transform) .with(two_dim_object) .with(sprite) .with(Transparent) .build() }
function_block-function_prefix_line
[ { "content": "fn main() -> amethyst::Result<()> {\n\n amethyst::start_logger(Default::default());\n\n let config = DisplayConfig::load(\"./resources/display_config.ron\");\n\n let pipe = Pipeline::build().with_stage(\n\n Stage::with_backbuffer()\n\n .clear_target([0.1, 0.1, 0.2, 1.0], 1.0)\n\n .with_pass(\n\n DrawFlat2D::new()\n\n .with_transparency(ColorMask::all(), ALPHA, None)\n\n ),\n\n );\n\n let input_bundle = InputBundle::<String, String>::new()\n\n .with_bindings_from_file(\"./resources/bindings_config.ron\")?;\n\n\n\n let game_data = GameDataBuilder::default()\n\n .with_bundle(TransformBundle::new())?\n\n .with_bundle(input_bundle)?\n\n .with_bundle(RenderBundle::new(pipe, Some(config))\n\n .with_sprite_sheet_processor()\n\n .with_sprite_visibility_sorting(&[])\n", "file_path": "src/main.rs", "rank": 7, "score": 48223.9587628813 }, { "content": "# Amethyst 2D Platformer Demo\n\n\n\n![pitch-2019-03-10 20 51 07](https://user-images.githubusercontent.com/22216761/54245251-52e22100-44ed-11e9-8129-ebbc919fbdb0.gif)\n\n\n\n# Software License\n\n\n\nThis is free and unencumbered software released into the public domain.\n\n\n\n# Art License\n\n\n\nAll art comes from [Game Art 2d](https://www.gameart2d.com/). Check their [license page](https://www.gameart2d.com/license.html) for more information.\n\n\n", "file_path": "README.md", "rank": 11, "score": 10951.509077133653 }, { "content": "use amethyst::{\n\n core::{Transform},\n\n ecs::{Entities, Join, System, WriteStorage},\n\n renderer::{Flipped, SpriteRender},\n\n};\n\nuse crate::{\n\n PLAYER_MAX_X_VELOCITY,\n\n components::{Player, PlayerState}\n\n};\n\n\n\npub struct AnimationSystem;\n\n\n\nimpl<'s> System<'s> for AnimationSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n WriteStorage<'s, Player>,\n\n WriteStorage<'s, SpriteRender>,\n\n WriteStorage<'s, Flipped>,\n\n WriteStorage<'s, Transform>,\n\n );\n", "file_path": "src/systems/animation.rs", "rank": 12, "score": 12.729218068055028 }, { "content": "use amethyst::{\n\n core::{TransformBundle},\n\n input::{InputBundle},\n\n prelude::*,\n\n renderer::{ALPHA, ColorMask, DisplayConfig, DrawFlat2D, Pipeline, RenderBundle, Stage},\n\n};\n\n\n\nmod entities;\n\nuse entities::{InitialState};\n\nmod components;\n\nmod systems;\n\nuse systems::{ControlSystem, PhysicsSystem, AnimationSystem};\n\n\n\npub const PLAYER_W: u32 = 90;\n\npub const TOTAL_PLAYER_SPRITE_HEIGHT: u32 = 184;\n\npub const PLAYER_SPRITE_Y_PADDING: u32 = 20; // pixels between sprites\n\npub const PLAYER_H: u32 = TOTAL_PLAYER_SPRITE_HEIGHT - PLAYER_SPRITE_Y_PADDING;\n\npub const GROUND_Y: f32 = 74.;\n\npub const CRATE_SIZE: f32 = 77.;\n\npub const DISPLAY_WIDTH: f32 = 1000.;\n\npub const PLAYER_MAX_X_VELOCITY: f32 = 5.;\n\n\n", "file_path": "src/main.rs", "rank": 13, "score": 11.51898327148424 }, { "content": "use amethyst::{\n\n ecs::{Entities, Join, Read, ReadStorage, System, WriteStorage},\n\n input::{InputHandler},\n\n};\n\nuse crate::{\n\n PLAYER_MAX_X_VELOCITY,\n\n components::{Player, TwoDimObject}\n\n};\n\n\n\npub struct ControlSystem;\n\n\n\nimpl<'s> System<'s> for ControlSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n WriteStorage<'s, Player>,\n\n ReadStorage<'s, TwoDimObject>,\n\n Read<'s, InputHandler<String, String>>,\n\n );\n\n\n\n fn run(&mut self, (entities, mut players, two_dim_objects, input): Self::SystemData) {\n", "file_path": "src/systems/control.rs", "rank": 14, "score": 9.652096096099786 }, { "content": "}\n\n\n\nimpl TwoDimObject {\n\n pub fn new(width: f32, height: f32) -> Self {\n\n TwoDimObject {\n\n size: TwoDimVector { x: width, y: height },\n\n position: TwoDimVector { x: 0., y: 0. },\n\n velocity: TwoDimVector { x: 0., y: 0. },\n\n }\n\n }\n\n\n\n pub fn set_position(&mut self, x: f32, y: f32) {\n\n self.position = TwoDimVector { x, y };\n\n }\n\n\n\n pub fn set_velocity(&mut self, x: f32, y: f32) {\n\n self.velocity = TwoDimVector { x, y };\n\n }\n\n\n\n pub fn update_transform_position(&self, transform: &mut Transform) {\n", "file_path": "src/components.rs", "rank": 15, "score": 8.707884216242272 }, { "content": "use amethyst::{\n\n ecs::{Join, ReadStorage, System, WriteStorage},\n\n};\n\nuse crate::{\n\n DISPLAY_WIDTH, PLAYER_W,\n\n components::{Player, TwoDimObject}\n\n};\n\n\n\npub struct PhysicsSystem;\n\n\n\nimpl<'s> System<'s> for PhysicsSystem {\n\n type SystemData = (\n\n WriteStorage<'s, Player>,\n\n ReadStorage<'s, TwoDimObject>,\n\n );\n\n\n\n fn run(&mut self, (mut players, two_dim_objects): Self::SystemData) {\n\n for mut player in (&mut players).join() {\n\n if player.two_dim.velocity.x > 0. {\n\n // player moving right\n", "file_path": "src/systems/physics.rs", "rank": 16, "score": 8.527386722366106 }, { "content": "\n\n fn run(&mut self, (entities, mut players, mut sprites, mut flipped, mut transforms): Self::SystemData) {\n\n for (player_entity, mut player, mut sprite, mut transform) in (&entities, &mut players, &mut sprites, &mut transforms).join() {\n\n // set sprite direction\n\n if player.two_dim.velocity.x > 0. {\n\n // face right\n\n flipped.remove(player_entity);\n\n } else if player.two_dim.velocity.x < 0. {\n\n // face left\n\n flipped.insert(player_entity, Flipped::Horizontal)\n\n .expect(\"Failed to flip\");\n\n }\n\n\n\n // set player state\n\n let current_state = player.state;\n\n let next_state =\n\n if player.two_dim.velocity.y != 0. { PlayerState::Jumping }\n\n else if player.two_dim.velocity.x.abs() > PLAYER_MAX_X_VELOCITY * 0.7 { PlayerState::Running }\n\n else if player.two_dim.velocity.x != 0. { PlayerState::Walking }\n\n else { PlayerState::Idle };\n", "file_path": "src/systems/animation.rs", "rank": 17, "score": 7.685213470336253 }, { "content": "use amethyst::{\n\n core::{Transform},\n\n ecs::{Component, VecStorage},\n\n};\n\nuse specs_derive::Component;\n\n\n\n#[derive(PartialEq, Clone, Copy)]\n\npub enum PlayerState {\n\n Idle,\n\n Walking,\n\n Running,\n\n Jumping,\n\n}\n\n\n\nimpl Default for PlayerState {\n\n fn default() -> Self {\n\n PlayerState::Idle\n\n }\n\n}\n\n\n", "file_path": "src/components.rs", "rank": 18, "score": 7.5215287249902065 }, { "content": " transform.set_x(self.position.x);\n\n transform.set_y(self.position.y);\n\n }\n\n\n\n pub fn top(&self) -> f32 {\n\n self.position.y + self.size.y / 2.\n\n }\n\n\n\n pub fn set_top(&mut self, top: f32) {\n\n self.position.y = top - self.size.y / 2.;\n\n }\n\n\n\n pub fn bottom(&self) -> f32 {\n\n self.position.y - self.size.y / 2.\n\n }\n\n\n\n pub fn set_bottom(&mut self, bottom: f32) {\n\n self.position.y = bottom + self.size.y / 2.;\n\n }\n\n\n", "file_path": "src/components.rs", "rank": 19, "score": 7.160195651129705 }, { "content": " pub fn left(&self) -> f32 {\n\n self.position.x - self.size.x / 2.\n\n }\n\n\n\n pub fn set_left(&mut self, left: f32) {\n\n self.position.x = left + self.size.x / 2.;\n\n }\n\n\n\n pub fn right(&self) -> f32 {\n\n self.position.x + self.size.x / 2.\n\n }\n\n\n\n pub fn set_right(&mut self, right: f32) {\n\n self.position.x = right - self.size.x / 2.;\n\n }\n\n\n\n pub fn overlapping_x(&self, other: &Self) -> bool {\n\n self.left() < other.right() && other.left() < self.right()\n\n }\n\n\n", "file_path": "src/components.rs", "rank": 20, "score": 6.658923303571429 }, { "content": "pub struct TwoDimVector<T> {\n\n pub x: T,\n\n pub y: T,\n\n}\n\n\n\nimpl Default for TwoDimVector<f32> {\n\n fn default() -> Self {\n\n Self {\n\n x: 0.0,\n\n y: 0.0,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Component)]\n\n#[storage(VecStorage)]\n\npub struct TwoDimObject {\n\n pub size: TwoDimVector<f32>,\n\n pub position: TwoDimVector<f32>,\n\n pub velocity: TwoDimVector<f32>,\n", "file_path": "src/components.rs", "rank": 21, "score": 6.566067134979568 }, { "content": " pub fn overlapping_y(&self, other: &Self) -> bool {\n\n self.bottom() < other.top() && other.bottom() < self.top()\n\n }\n\n}\n\n\n\n#[derive(Component)]\n\n#[storage(VecStorage)]\n\npub struct Player {\n\n pub ticks: usize,\n\n pub state: PlayerState,\n\n pub two_dim: TwoDimObject,\n\n}\n\n\n\nimpl Player {\n\n pub fn new(two_dim: TwoDimObject) -> Self {\n\n Player {\n\n ticks: 0,\n\n state: PlayerState::Idle,\n\n two_dim,\n\n }\n\n }\n\n}", "file_path": "src/components.rs", "rank": 22, "score": 4.50912395427978 }, { "content": " )?\n\n .with(ControlSystem, \"control_system\", &[])\n\n .with(PhysicsSystem, \"physics_system\", &[\"control_system\"])\n\n .with(AnimationSystem, \"animation_system\", &[\"physics_system\"]);\n\n\n\n let mut game =\n\n Application::build(\"./\", InitialState)?.build(game_data)?;\n\n game.run();\n\n\n\n Ok(())\n\n}", "file_path": "src/main.rs", "rank": 23, "score": 4.366303198022184 }, { "content": "mod control;\n\npub use control::ControlSystem;\n\n\n\nmod physics;\n\npub use physics::PhysicsSystem;\n\n\n\nmod animation;\n\npub use animation::AnimationSystem;", "file_path": "src/systems/mod.rs", "rank": 24, "score": 4.21218176771771 }, { "content": "\n\n if current_state != next_state {\n\n player.state = next_state;\n\n player.ticks = 0; // reset animation if player state changed\n\n }\n\n\n\n let (sprite_initial_index, num_sprites) = match player.state {\n\n PlayerState::Idle => (15, 15),\n\n PlayerState::Walking => (60, 15),\n\n PlayerState::Running => (45, 15),\n\n PlayerState::Jumping => (35, 7),\n\n };\n\n let game_frames_per_animation_frame = 6;\n\n sprite.sprite_number = (player.ticks / game_frames_per_animation_frame) % num_sprites + sprite_initial_index;\n\n player.ticks = player.ticks.wrapping_add(1);\n\n\n\n player.two_dim.update_transform_position(&mut transform);\n\n }\n\n }\n\n}", "file_path": "src/systems/animation.rs", "rank": 25, "score": 3.936107282035258 }, { "content": " // calculate this so we know if the character should be able to jump\n\n let mut player_entities_on_ground = vec![];\n\n\n\n for (player, player_entity) in (&players, &entities).join() {\n\n for two_dim_object in (&two_dim_objects).join() {\n\n if player.two_dim.bottom() == two_dim_object.top() {\n\n player_entities_on_ground.push(player_entity);\n\n }\n\n }\n\n }\n\n\n\n for (mut player, player_entity) in (&mut players, &entities).join() {\n\n let player_on_ground = player_entities_on_ground.contains(&player_entity);\n\n\n\n let x_input = input.axis_value(\"horizontal\").expect(\"horizontal axis exists\");\n\n let jump_input = input.action_is_down(\"jump\").expect(\"jump action exists\");\n\n\n\n if x_input == 0. {\n\n player.two_dim.velocity.x = 0.;\n\n } else {\n", "file_path": "src/systems/control.rs", "rank": 26, "score": 3.5035045015370754 }, { "content": " let old_x = player.two_dim.right();\n\n let mut possible_new_x = old_x + player.two_dim.velocity.x;\n\n\n\n for two_dim_object in (&two_dim_objects).join() {\n\n if player.two_dim.overlapping_y(two_dim_object)\n\n && old_x <= two_dim_object.left()\n\n && possible_new_x >= two_dim_object.left() {\n\n // can't early return here, because we need to consider collision with more than one other object\n\n // don't need to set velocity back to zero here, but could depending on how we want the player animation to act\n\n possible_new_x = two_dim_object.left();\n\n }\n\n }\n\n // ensure player stays inside \"walls\" of display\n\n let new_x = possible_new_x.min(DISPLAY_WIDTH).max(PLAYER_W as f32);\n\n player.two_dim.set_right(new_x);\n\n } else if player.two_dim.velocity.x < 0. {\n\n // player moving left\n\n let old_x = player.two_dim.left();\n\n let mut possible_new_x = old_x + player.two_dim.velocity.x;\n\n\n", "file_path": "src/systems/physics.rs", "rank": 27, "score": 1.4284637923516998 }, { "content": " player.two_dim.velocity.x += 0.1 * x_input as f32;\n\n player.two_dim.velocity.x = player.two_dim.velocity.x.min(PLAYER_MAX_X_VELOCITY).max(-1. * PLAYER_MAX_X_VELOCITY);\n\n }\n\n\n\n if jump_input && player_on_ground {\n\n player.two_dim.velocity.y = 20.;\n\n };\n\n }\n\n }\n\n}", "file_path": "src/systems/control.rs", "rank": 28, "score": 1.1722393217797098 }, { "content": " for two_dim_object in (&two_dim_objects).join() {\n\n if player.two_dim.overlapping_y(two_dim_object)\n\n && old_x >= two_dim_object.right()\n\n && possible_new_x <= two_dim_object.right() {\n\n // can't early return here, because we need to consider collision with more than one other object\n\n // don't need to set velocity back to zero here, but could depending on how we want the player animation to act\n\n possible_new_x = two_dim_object.right();\n\n }\n\n }\n\n // ensure player stays inside \"walls\" of display\n\n let new_x = possible_new_x.min(DISPLAY_WIDTH - PLAYER_W as f32).max(0.);\n\n player.two_dim.set_left(new_x);\n\n };\n\n\n\n let player_on_ground = if player.two_dim.velocity.y > 0. {\n\n let old_y = player.two_dim.top();\n\n let possible_new_y = player.two_dim.top() + player.two_dim.velocity.y;\n\n let mut new_y = possible_new_y;\n\n\n\n for two_dim_object in (&two_dim_objects).join() {\n", "file_path": "src/systems/physics.rs", "rank": 29, "score": 1.0950137277062297 }, { "content": " if player.two_dim.overlapping_x(two_dim_object)\n\n && old_y <= two_dim_object.bottom()\n\n && new_y >= two_dim_object.bottom() {\n\n new_y = two_dim_object.bottom();\n\n player.two_dim.velocity.y = 0.;\n\n }\n\n }\n\n player.two_dim.set_top(new_y);\n\n\n\n false\n\n } else if player.two_dim.velocity.y < 0. {\n\n let old_y = player.two_dim.bottom();\n\n let possible_new_y = player.two_dim.bottom() + player.two_dim.velocity.y;\n\n let mut new_y = possible_new_y;\n\n let mut player_on_ground = false;\n\n\n\n for two_dim_object in (&two_dim_objects).join() {\n\n if player.two_dim.overlapping_x(two_dim_object)\n\n && old_y >= two_dim_object.top()\n\n && new_y <= two_dim_object.top() {\n", "file_path": "src/systems/physics.rs", "rank": 30, "score": 0.9569060451587696 }, { "content": " player_on_ground = true;\n\n new_y = two_dim_object.top();\n\n player.two_dim.velocity.y = 0.;\n\n }\n\n }\n\n player.two_dim.set_bottom(new_y);\n\n\n\n player_on_ground\n\n } else {\n\n let mut player_on_ground = false;\n\n\n\n for two_dim_object in (&two_dim_objects).join() {\n\n if player.two_dim.overlapping_x(two_dim_object)\n\n && player.two_dim.bottom() == two_dim_object.top() {\n\n player_on_ground = true;\n\n }\n\n }\n\n\n\n player_on_ground\n\n };\n", "file_path": "src/systems/physics.rs", "rank": 31, "score": 0.81207408712233 } ]
Rust
src/lib.rs
psFried/pgen
008a4c680cd3651da5442780076523df1e8df86e
#[macro_use] extern crate failure; #[macro_use] extern crate lazy_static; extern crate byteorder; extern crate encoding; extern crate itertools; extern crate lalrpop_util; extern crate rand; extern crate regex; extern crate rustyline; extern crate string_cache; mod arguments; pub(crate) mod builtins; mod context; pub mod interpreter; pub mod program; pub mod repl; mod types; pub mod verbosity; mod writer; #[cfg(test)] mod fun_test; pub use self::arguments::Arguments; pub use self::context::ProgramContext; pub use self::interpreter::ast::GenType; pub use self::interpreter::prototype::{ BoundArgument, BuiltinFunctionCreator, BuiltinFunctionPrototype, CreateFunctionResult, FunctionPrototype, InterpretedFunctionPrototype, }; pub use self::interpreter::{Interpreter, Source}; pub use self::types::{ ConstBin, ConstBoolean, ConstDecimal, ConstInt, ConstString, ConstUint, OutputType, }; pub use self::writer::DataGenOutput; use failure::Error; use std::fmt::Debug; use std::rc::Rc; pub type IString = string_cache::DefaultAtom; pub trait RunnableFunction<T>: Debug { fn gen_value(&self, context: &mut ProgramContext) -> Result<T, Error>; fn write_value( &self, context: &mut ProgramContext, output: &mut DataGenOutput, ) -> Result<(), Error>; } pub type DynFun<T> = Rc<RunnableFunction<T>>; pub type DynStringFun = DynFun<IString>; pub type DynUintFun = DynFun<u64>; pub type DynIntFun = DynFun<i64>; pub type DynDecimalFun = DynFun<f64>; pub type DynBooleanFun = DynFun<bool>; pub type DynBinFun = DynFun<Vec<u8>>; #[derive(Debug, Clone)] pub enum AnyFunction { String(DynStringFun), Uint(DynUintFun), Int(DynIntFun), Decimal(DynDecimalFun), Boolean(DynBooleanFun), Bin(DynBinFun), } impl AnyFunction { pub fn get_type(&self) -> GenType { match *self { AnyFunction::String(_) => GenType::String, AnyFunction::Uint(_) => GenType::Uint, AnyFunction::Int(_) => GenType::Int, AnyFunction::Decimal(_) => GenType::Decimal, AnyFunction::Boolean(_) => GenType::Boolean, AnyFunction::Bin(_) => GenType::Bin, } } pub fn write_value( &self, context: &mut ProgramContext, output: &mut DataGenOutput, ) -> Result<(), Error> { match *self { AnyFunction::String(ref fun) => fun.write_value(context, output), AnyFunction::Uint(ref fun) => fun.write_value(context, output), AnyFunction::Int(ref fun) => fun.write_value(context, output), AnyFunction::Decimal(ref fun) => fun.write_value(context, output), AnyFunction::Boolean(ref fun) => fun.write_value(context, output), AnyFunction::Bin(ref fun) => fun.write_value(context, output), } } } macro_rules! type_conversions { ($([$as_fn_name:ident, $req_fn_name:ident, $return_type:ty, $do_match:path]),*) => { impl AnyFunction { $( pub fn $as_fn_name(self) -> Result<$return_type, AnyFunction> { match self { $do_match(fun) => Ok(fun), other @ _ => Err(other) } } pub fn $req_fn_name(self) -> Result<$return_type, Error> { self.$as_fn_name().map_err(|fun| { format_err!("Invalid argument type, expected: {}, actual: {}", stringify!($return_type), fun.get_type()) }) } )* } } } type_conversions!{ [as_string, require_string, DynStringFun, AnyFunction::String], [as_int, require_int, DynIntFun, AnyFunction::Int], [as_uint, require_uint, DynUintFun, AnyFunction::Uint], [as_decimal, require_decimal, DynDecimalFun, AnyFunction::Decimal], [as_boolean, require_boolean, DynBooleanFun, AnyFunction::Boolean], [as_bin, require_bin, DynBinFun, AnyFunction::Bin] }
#[macro_use] extern crate failure; #[macro_use] extern crate lazy_static; extern crate byteorder; extern crate encoding; extern crate itertools; extern crate lalrpop_util; extern crate rand; extern crate regex; extern crate rustyline; extern crate string_cache; mod arguments; pub(crate) mod builtins; mod context; pub mod interpreter; pub mod program; pub mod repl; mod types; pub mod verbosity; mod writer; #[cfg(test)] mod fun_test; pub use self::arguments::Arguments; pub use self::context::ProgramContext; pub use self::interpreter::ast::GenType; pub use self::interpreter::prototype::{ BoundArgument, BuiltinFunctionCreator, BuiltinFunctionPrototype, CreateFunctionResult, FunctionPrototype, InterpretedFunctionPrototype, }; pub use self::interpreter::{Interpreter, Source}; pub use self::types::{ ConstBin, ConstBoolean, ConstDecimal, ConstInt, ConstString, ConstUint, OutputType, }; pub use self::writer::DataGenOutput; use failure::Error; use std::fmt::Debug; use std::rc::Rc; pub type IString = string_cache::DefaultAtom; pub trait RunnableFunction<T>: Debug { fn gen_value(&self, context: &mut ProgramContext) -> Result<T, Error>; fn write_value( &self, context: &mut ProgramContext, output: &mut DataGenOutput, ) -> Result<(), Error>; } pub type DynFun<T> = Rc<RunnableFunction<T>>; pub type DynStringFun = DynFun<IString>; pub type DynUintFun = DynFun<u64>; pub type DynIntFun = DynFun<i64>; pub type DynDecimalFun = DynFun<f64>; pub type DynBooleanFun = DynFun<bool>; pub type DynBinFun = DynFun<Vec<u8>>; #[derive(Debug, Clone)] pub enum AnyFunction { String(DynStringFun), Uint(DynUintFun), Int(DynIntFun), Decimal(DynDecimalFun), Boolean(DynBooleanFun), Bin(DynBinFun), } impl AnyFunction {
pub fn write_value( &self, context: &mut ProgramContext, output: &mut DataGenOutput, ) -> Result<(), Error> { match *self { AnyFunction::String(ref fun) => fun.write_value(context, output), AnyFunction::Uint(ref fun) => fun.write_value(context, output), AnyFunction::Int(ref fun) => fun.write_value(context, output), AnyFunction::Decimal(ref fun) => fun.write_value(context, output), AnyFunction::Boolean(ref fun) => fun.write_value(context, output), AnyFunction::Bin(ref fun) => fun.write_value(context, output), } } } macro_rules! type_conversions { ($([$as_fn_name:ident, $req_fn_name:ident, $return_type:ty, $do_match:path]),*) => { impl AnyFunction { $( pub fn $as_fn_name(self) -> Result<$return_type, AnyFunction> { match self { $do_match(fun) => Ok(fun), other @ _ => Err(other) } } pub fn $req_fn_name(self) -> Result<$return_type, Error> { self.$as_fn_name().map_err(|fun| { format_err!("Invalid argument type, expected: {}, actual: {}", stringify!($return_type), fun.get_type()) }) } )* } } } type_conversions!{ [as_string, require_string, DynStringFun, AnyFunction::String], [as_int, require_int, DynIntFun, AnyFunction::Int], [as_uint, require_uint, DynUintFun, AnyFunction::Uint], [as_decimal, require_decimal, DynDecimalFun, AnyFunction::Decimal], [as_boolean, require_boolean, DynBooleanFun, AnyFunction::Boolean], [as_bin, require_bin, DynBinFun, AnyFunction::Bin] }
pub fn get_type(&self) -> GenType { match *self { AnyFunction::String(_) => GenType::String, AnyFunction::Uint(_) => GenType::Uint, AnyFunction::Int(_) => GenType::Int, AnyFunction::Decimal(_) => GenType::Decimal, AnyFunction::Boolean(_) => GenType::Boolean, AnyFunction::Bin(_) => GenType::Bin, } }
function_block-full_function
[ { "content": "fn execute_fn(function: AnyFunction, context: &mut ProgramContext) -> Result<(), Error> {\n\n let out = io::stdout();\n\n let mut lock = out.lock();\n\n\n\n let result = {\n\n let mut dgen_out = DataGenOutput::new(&mut lock);\n\n function.write_value(context, &mut dgen_out).and_then(|_| {\n\n // need to write a newline at the end to ensure that the last line of output doesn't get clobbered\n\n // by the next readline prompt\n\n dgen_out\n\n .write_str(\"\\n\")\n\n .and_then(|_| {\n\n // probably not necessary but it's good to do the write thing\n\n dgen_out.flush().map_err(Into::into)\n\n })\n\n .map_err(Into::into)\n\n })\n\n };\n\n\n\n if let Err(err) = result {\n", "file_path": "src/repl/mod.rs", "rank": 0, "score": 338891.7552707153 }, { "content": "fn handle_error(context: &mut ProgramContext, error: &Error) {\n\n use std::fmt::Write;\n\n\n\n if let Some(mut out) = context.error_output(crate::verbosity::VERBOSE) {\n\n writeln!(out, \"Program Runtime Error: {}\", error).expect(MUY_MALO);\n\n }\n\n if let Some(mut out) = context.error_output(crate::verbosity::DGEN_DEBUG) {\n\n writeln!(out, \"{}\", error.backtrace()).expect(MUY_MALO);\n\n }\n\n\n\n if let Some(program_error) = context.reset_error() {\n\n // program_error should not generally indicate an error/bug in dgen itself\n\n // it is generally caused by invalid code that was passed to the interpreter\n\n if let Some(mut out) = context.error_output(crate::verbosity::QUIET) {\n\n writeln!(out, \"{}\", program_error).expect(MUY_MALO);\n\n }\n\n }\n\n}\n\n\n\nconst MUY_MALO: &str = \"Failed to print to error stream\";\n", "file_path": "src/program/mod.rs", "rank": 1, "score": 312123.28711714654 }, { "content": "pub fn parse_program(source_name: IString, input: &str) -> Result<Program, Error> {\n\n ProgramParser::new()\n\n .parse(input)\n\n .map_err(ParseErrorInner::from)\n\n .map_err(|e| DgenParseError {\n\n source_name,\n\n input: input.to_owned(),\n\n inner: e,\n\n }).map_err(Into::into)\n\n}\n\n\n\nimpl Display for DgenParseError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"Error parsing '{}': {}\\n\",\n\n self.source_name, self.inner.description\n\n )?;\n\n if let Some(offset) = self.inner.location {\n\n let err_region = SourceErrRegion::new(self.input.as_str(), offset);\n\n write!(f, \"{}\", err_region)?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/interpreter/parser.rs", "rank": 2, "score": 272214.7664601712 }, { "content": "pub fn run_program(iterations: u64, program: &str) -> Result<Vec<u8>, Error> {\n\n let mut out = Vec::new();\n\n {\n\n let mut output = DataGenOutput::new(&mut out);\n\n let mut prog = Runner::new(\n\n iterations,\n\n program.to_owned(),\n\n create_context(),\n\n Interpreter::new(),\n\n );\n\n prog.add_std_lib();\n\n prog.run(&mut output).map_err(|error| {\n\n format_err!(\"Failed to run program. Eror: {}\", error)\n\n })?;\n\n }\n\n\n\n Ok(out)\n\n}\n\n\n", "file_path": "src/fun_test.rs", "rank": 3, "score": 219635.64354000037 }, { "content": "fn run_program(program: Runner) -> Result<(), Error> {\n\n let sout = std::io::stdout();\n\n // lock stdout once at the beginning so we don't have to keep locking/unlocking it\n\n let lock = sout.lock();\n\n let mut buf_writer = ::std::io::BufWriter::new(lock);\n\n let mut output = dgen::DataGenOutput::new(&mut buf_writer);\n\n\n\n program.run(&mut output)\n\n}\n", "file_path": "src/main.rs", "rank": 4, "score": 217954.27337482787 }, { "content": "pub fn create_context() -> ProgramContext {\n\n ProgramContext::from_seed(*RAND_SEED, crate::verbosity::NORMAL)\n\n}\n", "file_path": "src/fun_test.rs", "rank": 5, "score": 203991.2379608529 }, { "content": "pub trait OutputType {\n\n fn write_output(&self, writer: &mut DataGenOutput) -> Result<(), Error>;\n\n}\n\n\n\nimpl OutputType for i64 {\n\n fn write_output(&self, writer: &mut DataGenOutput) -> Result<(), Error> {\n\n writer.write_string(self)\n\n }\n\n}\n\nimpl OutputType for u64 {\n\n fn write_output(&self, writer: &mut DataGenOutput) -> Result<(), Error> {\n\n writer.write_string(self)\n\n }\n\n}\n\nimpl OutputType for f64 {\n\n fn write_output(&self, writer: &mut DataGenOutput) -> Result<(), Error> {\n\n writer.write_string(self)\n\n }\n\n}\n\nimpl OutputType for bool {\n", "file_path": "src/types.rs", "rank": 6, "score": 201520.43149364484 }, { "content": "fn find_region_offsets(file: &mut File, delimiter: &str) -> Result<Vec<u64>, io::Error> {\n\n file.seek(SeekFrom::Start(0))?;\n\n let mut result = Vec::with_capacity(32);\n\n let mut buffer = [0; 8192];\n\n let delimiter_bytes = delimiter.as_bytes();\n\n let delimiter_length = delimiter_bytes.len();\n\n\n\n let mut carry_over_len = 0;\n\n let mut index_adder = 0;\n\n loop {\n\n let nread = do_read(file, &mut buffer[carry_over_len..])?;\n\n if nread == 0 {\n\n break;\n\n }\n\n\n\n let buffer_end = nread - carry_over_len;\n\n let mut buffer_idx = 0;\n\n while buffer_idx < buffer_end {\n\n if is_region_start(&buffer[..], buffer_idx, delimiter_bytes) {\n\n let resolved_idx = buffer_idx as u64 + index_adder;\n", "file_path": "src/builtins/from_file.rs", "rank": 7, "score": 181230.20542152983 }, { "content": "pub trait DgenCommand: Sized {\n\n fn execute(self, out: &mut DataGenOutput) -> Result<(), Error>;\n\n}\n\n\n\npub struct Runner {\n\n iterations: u64,\n\n source: UnreadSource,\n\n runtime_context: ProgramContext,\n\n interpreter: Interpreter,\n\n}\n\n\n\nimpl DgenCommand for Runner {\n\n fn execute(self, out: &mut DataGenOutput) -> Result<(), Error> {\n\n self.run(out)\n\n }\n\n}\n\n\n\nimpl Runner {\n\n pub fn new<S: Into<UnreadSource>>(\n\n iterations: u64,\n", "file_path": "src/program/mod.rs", "rank": 8, "score": 177829.0695276389 }, { "content": "fn filter_matching_arguments<'a, I: Iterator<Item = &'a FunctionPrototype>>(name: IString, arguments: &[AnyFunction], iter: I, caller_source_ref: &SourceRef) -> Result<&'a FunctionPrototype, CompileError> {\n\n let mut first_from_same_module: Option<&'a FunctionPrototype> = None;\n\n let mut second_from_same_module: Option<&'a FunctionPrototype> = None;\n\n\n\n let mut first_from_other_module: Option<&'a FunctionPrototype> = None;\n\n let mut second_from_other_module: Option<&'a FunctionPrototype> = None;\n\n\n\n for candidate in iter {\n\n if candidate.do_arguments_match(arguments) {\n\n // whether the module of the caller is the same as the module of the candidate function\n\n // if the candidate has no source_ref, then it means that it's a builtin function\n\n let is_same_module = candidate.get_source().map(|source| source.module_name() == caller_source_ref.module_name()).unwrap_or(false);\n\n\n\n if is_same_module {\n\n set_first_empty(candidate, &mut first_from_same_module, &mut second_from_same_module);\n\n } else {\n\n set_first_empty(candidate, &mut first_from_other_module, &mut second_from_other_module);\n\n }\n\n }\n\n }\n", "file_path": "src/interpreter/mod.rs", "rank": 9, "score": 173261.79169283563 }, { "content": "pub fn get_default_builtins_module() -> Module {\n\n Module::new_builtin(BUILTIN_FNS.iter().map(|fun| *fun))\n\n}", "file_path": "src/builtins/mod.rs", "rank": 10, "score": 170889.78097534506 }, { "content": "pub fn assert_bin_output_is_expected(program: &str, expected: &[u8]) {\n\n let results = run_program(1, program).expect(\"Failed to run program\");\n\n assert_eq!(results.as_slice(), expected);\n\n}\n\n\n", "file_path": "src/fun_test.rs", "rank": 12, "score": 167055.68539792736 }, { "content": "pub fn default_module_name() -> IString {\n\n \"default\".into()\n\n}\n\n\n\nimpl UnreadSource {\n\n pub fn get_name(&self) -> IString {\n\n match *self {\n\n UnreadSource::File(ref pb) => pb\n\n .file_stem()\n\n .map(|name| {\n\n name.to_str()\n\n .map(Into::into)\n\n .unwrap_or_else(|| default_module_name())\n\n }).unwrap_or_else(|| default_module_name()),\n\n UnreadSource::String(_) => default_module_name(),\n\n UnreadSource::Builtin(ref name, _) => (*name).into(),\n\n UnreadSource::Stdin => default_module_name(),\n\n }\n\n }\n\n\n", "file_path": "src/interpreter/source.rs", "rank": 13, "score": 165064.87511503889 }, { "content": "fn do_read<R: Read>(read: &mut R, buf: &mut [u8]) -> io::Result<usize> {\n\n loop {\n\n match read.read(buf) {\n\n Ok(n) => return Ok(n),\n\n Err(ref e) if e.kind() == io::ErrorKind::Interrupted => { /* loop around and retry */ }\n\n err @ _ => return err,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/builtins/from_file.rs", "rank": 14, "score": 159195.22922726703 }, { "content": "fn is_unexpected_eof_parse_err(err: &Error) -> bool {\n\n if let Some(parse_err) = err.downcast_ref::<DgenParseError>() {\n\n if parse_err.is_unexpected_eof() {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n\nconst HELP_TXT: &str = r##\"dgen shell:\n\nYou can type dgen code as you normally would. All language features are supported.\n\nSpecial commands are:\n\n\n\nshow -> prints the current module source\n\nclear -> clears all functions that have been declared\n\nhelp -> prints this message\n\n\"##;\n", "file_path": "src/repl/mod.rs", "rank": 15, "score": 156280.72177411825 }, { "content": "fn create_concat(args: Arguments) -> CreateFunctionResult {\n\n let funs = args.get_required_varargs(CONCAT_ARG_NAME, 0, AnyFunction::require_string)?;\n\n Ok(AnyFunction::String(Rc::new(Concat { funs })))\n\n}\n\n\n", "file_path": "src/builtins/concat.rs", "rank": 16, "score": 155254.49630416723 }, { "content": "fn create_to_string(args: Arguments) -> CreateFunctionResult {\n\n let fun = args.require_any(TO_STRING_PARAM, 0)?;\n\n\n\n match fun {\n\n str_fun @ AnyFunction::String(_) => Ok(str_fun),\n\n AnyFunction::Boolean(fun) => Ok(ToString::new(fun)),\n\n AnyFunction::Decimal(fun) => Ok(ToString::new(fun)),\n\n AnyFunction::Int(fun) => Ok(ToString::new(fun)),\n\n AnyFunction::Uint(fun) => Ok(ToString::new(fun)),\n\n AnyFunction::Bin(_) => Err(format_err!(\"Invalid binary argument to to_string function\")),\n\n }\n\n}\n\n\n\nmacro_rules! make_to_string {\n\n ($proto_name:ident, $gen_type:expr) => {\n\n pub const $proto_name: &BuiltinFunctionPrototype = &BuiltinFunctionPrototype {\n\n function_name: \"to_string\",\n\n description: \"Converts its input to a string using the default formating\",\n\n arguments: &[(TO_STRING_PARAM, $gen_type)],\n\n variadic: false,\n\n create_fn: &create_to_string,\n\n };\n\n };\n\n}\n\n\n\nmake_to_string!(BOOLEAN_TO_STRING_BUILTIN, GenType::Boolean);\n\nmake_to_string!(DECIMAL_TO_STRING_BUILTIN, GenType::Decimal);\n\nmake_to_string!(INT_TO_STRING_BUILTIN, GenType::Int);\n\nmake_to_string!(UINT_TO_STRING_BUILTIN, GenType::Uint);\n", "file_path": "src/builtins/to_string.rs", "rank": 17, "score": 155254.49630416723 }, { "content": "fn create_env(args: Arguments) -> CreateFunctionResult {\n\n let key = args.required_arg(ARG_NAME, 0, AnyFunction::require_string)?;\n\n Ok(AnyFunction::String(Rc::new(EnvVar { key })))\n\n}\n\n\n\npub const ENV_VAR: &BuiltinFunctionPrototype = &BuiltinFunctionPrototype {\n\n function_name: \"env\",\n\n description:\n\n \"Returns the value of the given env variable, throws an error if the env var is not set\",\n\n arguments: &[(ARG_NAME, GenType::String)],\n\n variadic: false,\n\n create_fn: &create_env,\n\n};\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::fun_test::test_program_success;\n\n\n\n #[test]\n\n fn returns_the_value_of_an_environment_variable() {\n\n let (key, value) = ::std::env::vars()\n\n .next()\n\n .expect(\"At least one environment variable has to be set\");\n\n let program = format!(r#\"env(\"{}\")\"#, key);\n\n test_program_success(1, program.as_str(), value.as_str());\n\n }\n\n}\n", "file_path": "src/builtins/env.rs", "rank": 18, "score": 155254.49630416723 }, { "content": "fn create_words_fun(_: Arguments) -> CreateFunctionResult {\n\n use std::path::Path;\n\n use crate::ConstString;\n\n\n\n let words_paths = [\"/usr/share/dict/words\", \"/usr/dict/words\"];\n\n let path = words_paths\n\n .iter()\n\n .filter(|path| Path::new(path).is_file())\n\n .next()\n\n .map(|path| ConstString::new(*path))\n\n .ok_or_else(|| {\n\n format_err!(\n\n \"Could not find a words file in the usual places: {:?} Try using `select_from_file(String, String)` instead\",\n\n words_paths\n\n )\n\n })?;\n\n let delimiter = ConstString::new(\"\\n\");\n\n\n\n let args = Arguments::new(vec![path, delimiter]);\n\n create_file_fun(args)\n", "file_path": "src/builtins/from_file.rs", "rank": 19, "score": 155254.49630416723 }, { "content": "pub fn process_string_escapes(input: &str) -> Result<IString, &'static str> {\n\n let mut result = String::with_capacity(input.len());\n\n\n\n let mut char_iter = input.chars();\n\n loop {\n\n let next_char = {\n\n let c = char_iter.next();\n\n if c.is_none() {\n\n break;\n\n }\n\n c.unwrap()\n\n };\n\n\n\n if next_char == '\\\\' {\n\n // process escape sequences\n\n let escape_id = char_iter.next().ok_or(\"Unfinished escape sequence\")?;\n\n let result_char = match escape_id {\n\n '\\\\' => '\\\\',\n\n '\"' => '\"',\n\n 't' => '\\t',\n", "file_path": "src/interpreter/ast.rs", "rank": 20, "score": 155081.5693084385 }, { "content": "fn has_matching_module(interpreter: &Interpreter, module_name: &str) -> Result<(), ()> {\n\n if interpreter\n\n .module_iterator()\n\n .any(|m| m.name.contains(module_name))\n\n {\n\n Ok(())\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "src/program/help.rs", "rank": 21, "score": 154725.73937507236 }, { "content": "fn select_fun<'a, 'b, T>(ctx: &'a mut ProgramContext, values: &'b [DynFun<T>]) -> &'b DynFun<T> {\n\n let i = ctx.gen_range_exclusive(0, values.len());\n\n &values[i]\n\n}\n\n\n\nimpl<T: Debug> RunnableFunction<T> for SelectFun<T> {\n\n fn gen_value(&self, ctx: &mut ProgramContext) -> Result<T, Error> {\n\n let fun = select_fun(ctx, self.wrapped.as_slice());\n\n fun.gen_value(ctx)\n\n }\n\n fn write_value(&self, ctx: &mut ProgramContext, out: &mut DataGenOutput) -> Result<(), Error> {\n\n let fun = select_fun(ctx, self.wrapped.as_slice());\n\n fun.write_value(ctx, out)\n\n }\n\n}\n\n\n\nconst SELECT_ARG: &str = \"gen\";\n\n\n\nmacro_rules! make_select_proto {\n\n ($select_name:ident, $create_select_fn_name:ident, $stable_select_name:ident, $create_stable_select_fn_name:ident, $gen_type:expr, $any_fun_type:path, $ret_type:ty, $convert_fun:path) => {\n", "file_path": "src/builtins/select.rs", "rank": 22, "score": 153781.54415612016 }, { "content": "pub trait FunProto {\n\n fn get_name(&self) -> &str;\n\n fn arg_count(&self) -> usize;\n\n fn get_arg(&self, index: usize) -> (&str, GenType);\n\n fn get_description(&self) -> &str;\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct InterpretedFunctionPrototype {\n\n source_ref: SourceRef,\n\n function_name: IString,\n\n arguments: Vec<MacroArgument>,\n\n doc_comments: String, // no point in interning these\n\n body: WithSpan<Expr>,\n\n}\n\n\n\nimpl FunProto for InterpretedFunctionPrototype {\n\n fn get_name(&self) -> &str {\n\n &*self.function_name\n\n }\n", "file_path": "src/interpreter/prototype.rs", "rank": 23, "score": 152628.42366321172 }, { "content": "fn process_unicode_escape(char_iter: &mut Chars) -> Result<char, &'static str> {\n\n const ERR_MSG: &str = \"invalid unicode escape sequence\";\n\n let l_curly = char_iter.next().ok_or(ERR_MSG)?;\n\n if l_curly != '{' {\n\n return Err(ERR_MSG);\n\n }\n\n\n\n let mut sequence = String::with_capacity(6);\n\n loop {\n\n let c = char_iter.next().ok_or(ERR_MSG)?;\n\n if c == '}' {\n\n break;\n\n } else {\n\n sequence.push(c);\n\n }\n\n if sequence.len() > 6 {\n\n return Err(ERR_MSG);\n\n }\n\n }\n\n\n", "file_path": "src/interpreter/ast.rs", "rank": 24, "score": 152568.08942017722 }, { "content": "fn create_file_fun(args: Arguments) -> CreateFunctionResult {\n\n let (filepath, delimiter) = args.require_2_args(\n\n FILEPATH_PARAM,\n\n AnyFunction::require_string,\n\n DELIMITER_PARAM,\n\n AnyFunction::require_string,\n\n )?;\n\n Ok(AnyFunction::String(SelectFromFile::new(\n\n filepath, delimiter,\n\n )))\n\n}\n\n\n", "file_path": "src/builtins/from_file.rs", "rank": 25, "score": 151949.0943860322 }, { "content": "fn create_concat_bin(args: Arguments) -> CreateFunctionResult {\n\n let funs = args.get_required_varargs(CONCAT_ARG_NAME, 0, AnyFunction::require_bin)?;\n\n Ok(AnyFunction::Bin(Rc::new(Concat { funs })))\n\n}\n\n\n\npub const CONCAT_BUILTIN: &BuiltinFunctionPrototype = &BuiltinFunctionPrototype {\n\n function_name: \"concat\",\n\n description: \"concatenates the input strings into a single output string\",\n\n arguments: &[(CONCAT_ARG_NAME, GenType::String)],\n\n variadic: true,\n\n create_fn: &create_concat,\n\n};\n\n\n\npub const CONCAT_BIN_BUILTIN: &BuiltinFunctionPrototype = &BuiltinFunctionPrototype {\n\n function_name: \"concat\",\n\n description: \"concatenates the input bytes into a single output\",\n\n arguments: &[(CONCAT_ARG_NAME, GenType::Bin)],\n\n variadic: true,\n\n create_fn: &create_concat_bin,\n\n};\n", "file_path": "src/builtins/concat.rs", "rank": 26, "score": 151949.0943860322 }, { "content": "fn create_string_gen(args: Arguments) -> CreateFunctionResult {\n\n let (length_gen, min_cp_inclusive, max_cp_inclusive) = args.require_3_args(\n\n \"length\",\n\n AnyFunction::require_uint,\n\n \"min_codepoint_inclusive\",\n\n AnyFunction::require_uint,\n\n \"max_codepoint_inclusive\",\n\n AnyFunction::require_uint,\n\n )?;\n\n Ok(AnyFunction::String(Rc::new(StringGenerator {\n\n length_gen,\n\n max_cp_inclusive,\n\n min_cp_inclusive,\n\n })))\n\n}\n\n\n\npub const STRING_GEN_BUILTIN: &BuiltinFunctionPrototype = &BuiltinFunctionPrototype {\n\n function_name: \"string\",\n\n description:\n\n \"constructs a string using the given length and min and max code point values inclusive\",\n\n arguments: &[\n\n (\"length\", GenType::Uint),\n\n (\"min_codepoint_inclusive\", GenType::Uint),\n\n (\"max_codepoint_inclusive\", GenType::Uint),\n\n ],\n\n variadic: false,\n\n create_fn: &create_string_gen,\n\n};\n\n\n", "file_path": "src/builtins/strings.rs", "rank": 27, "score": 151949.0943860322 }, { "content": "fn create_str_len(args: Arguments) -> CreateFunctionResult {\n\n let wrapped = args.required_arg(\"string\", 0, AnyFunction::require_string)?;\n\n Ok(AnyFunction::Uint(Rc::new(StringLength { wrapped })))\n\n}\n\n\n\npub const STRING_LENGTH_BUILTIN: &BuiltinFunctionPrototype = &BuiltinFunctionPrototype {\n\n function_name: \"string_length\",\n\n description: \"returns the length in utf8-encoded bytes of the generated string\",\n\n arguments: &[(\"string\", GenType::String)],\n\n variadic: false,\n\n create_fn: &create_str_len,\n\n};\n\n\n", "file_path": "src/builtins/strings.rs", "rank": 28, "score": 151949.0943860322 }, { "content": "fn create_string_bytes(args: Arguments) -> CreateFunctionResult {\n\n let (encoding, string) = args.require_2_args(\n\n \"encoding\",\n\n AnyFunction::require_string,\n\n \"string\",\n\n AnyFunction::require_string,\n\n )?;\n\n\n\n Ok(AnyFunction::Bin(Rc::new(StringBytes { encoding, string })))\n\n}\n\n\n\npub const STRING_ENCODE_BUILTIN: &BuiltinFunctionPrototype = &BuiltinFunctionPrototype {\n\n function_name: \"string_bytes\",\n\n description: \"encodes strings using the given encoding, provided as a WHATWG encoding label\",\n\n arguments: &[(\"encoding\", GenType::String), (\"string\", GenType::String)],\n\n variadic: false,\n\n create_fn: &create_string_bytes,\n\n};\n\n\n\n#[cfg(test)]\n", "file_path": "src/builtins/strings.rs", "rank": 29, "score": 151949.0943860322 }, { "content": "fn create_bin_len(args: Arguments) -> CreateFunctionResult {\n\n let bin = args.required_arg(\"binary\", 0, AnyFunction::require_bin)?;\n\n Ok(AnyFunction::Uint(Rc::new(BinLength(bin))))\n\n}\n\n\n\npub const BIN_LENGTH: &BuiltinFunctionPrototype = &BuiltinFunctionPrototype {\n\n function_name: \"bin_length\",\n\n description:\n\n \"returns the length of the given binary as a Uint. Mostly useful in mapped functions\",\n\n arguments: &[(\"binary\", GenType::Bin)],\n\n variadic: false,\n\n create_fn: &create_bin_len,\n\n};\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::fun_test::test_program_success;\n\n\n\n #[test]\n\n fn bin_length_returns_length_of_binary() {\n\n let program = \"bin_length(sequence([0x00, 0x01, 0x02], [0xFF, 0xFF], [0x00]))\";\n\n let expected = \"321\";\n\n test_program_success(3, program, expected);\n\n }\n\n}\n", "file_path": "src/builtins/bin_length.rs", "rank": 30, "score": 148837.07107409296 }, { "content": "fn create_repeat_delim(args: Arguments) -> CreateFunctionResult {\n\n let count = args.required_arg(COUNT_PARAM, 0, AnyFunction::require_uint)?;\n\n let prefix = args.required_arg(PREFIX_PARAM, 1, AnyFunction::require_string)?;\n\n let repeated = args.required_arg(TO_REPEAT_PARAM, 2, AnyFunction::require_string)?;\n\n let delimiter = args.required_arg(DELIMITER_PARAM, 3, AnyFunction::require_string)?;\n\n let suffix = args.required_arg(SUFFIX_PARAM, 4, AnyFunction::require_string)?;\n\n\n\n let fun = RepeatDelimited {\n\n count,\n\n prefix,\n\n repeated,\n\n delimiter,\n\n suffix,\n\n };\n\n Ok(AnyFunction::String(Rc::new(fun)))\n\n}\n", "file_path": "src/builtins/repeat_delim.rs", "rank": 31, "score": 148837.07107409296 }, { "content": "fn create_bin_repeat_delim(args: Arguments) -> CreateFunctionResult {\n\n let count = args.required_arg(COUNT_PARAM, 0, AnyFunction::require_uint)?;\n\n let prefix = args.required_arg(PREFIX_PARAM, 1, AnyFunction::require_bin)?;\n\n let repeated = args.required_arg(TO_REPEAT_PARAM, 2, AnyFunction::require_bin)?;\n\n let delimiter = args.required_arg(DELIMITER_PARAM, 3, AnyFunction::require_bin)?;\n\n let suffix = args.required_arg(SUFFIX_PARAM, 4, AnyFunction::require_bin)?;\n\n\n\n let fun = RepeatDelimited {\n\n count,\n\n prefix,\n\n repeated,\n\n delimiter,\n\n suffix,\n\n };\n\n Ok(AnyFunction::Bin(Rc::new(fun)))\n\n}\n\n\n\npub const REPEAT_DELIM_BUILTIN: &BuiltinFunctionPrototype = &BuiltinFunctionPrototype {\n\n function_name: \"repeat_delimited\",\n\n description: \"Formats the output by repeating the given generator separated by the delimiter\",\n", "file_path": "src/builtins/repeat_delim.rs", "rank": 32, "score": 145901.93858940597 }, { "content": "pub fn wrap(\n\n any_function: AnyFunction,\n\n function_name: IString,\n\n source_ref: SourceRef,\n\n) -> AnyFunction {\n\n match any_function {\n\n AnyFunction::Boolean(fun) => {\n\n AnyFunction::Boolean(RuntimeWrapper::new(fun, function_name, source_ref))\n\n }\n\n AnyFunction::Bin(fun) => {\n\n AnyFunction::Bin(RuntimeWrapper::new(fun, function_name, source_ref))\n\n }\n\n AnyFunction::String(fun) => {\n\n AnyFunction::String(RuntimeWrapper::new(fun, function_name, source_ref))\n\n }\n\n AnyFunction::Uint(fun) => {\n\n AnyFunction::Uint(RuntimeWrapper::new(fun, function_name, source_ref))\n\n }\n\n AnyFunction::Int(fun) => {\n\n AnyFunction::Int(RuntimeWrapper::new(fun, function_name, source_ref))\n", "file_path": "src/interpreter/runtime_wrapper.rs", "rank": 33, "score": 144964.1361291851 }, { "content": "fn create_context(args: &CliOptions) -> ProgramContext {\n\n let verbosity = args.get_verbosity();\n\n args.seed\n\n .as_ref()\n\n .map(|s| {\n\n let resolved_seed = string_to_byte_array(s);\n\n ProgramContext::from_seed(resolved_seed, verbosity)\n\n }).unwrap_or_else(|| ProgramContext::from_random_seed(verbosity))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 34, "score": 140982.3895645679 }, { "content": "fn writer_benches(c: &mut Criterion) {\n\n let mut bytes = Vec::with_capacity(8 * 1024);\n\n\n\n let sizes = &[1, 16, 256, 4096];\n\n let inputs = sizes\n\n .iter()\n\n .cloned()\n\n .map(RandomBytes::with_length)\n\n .collect::<Vec<RandomBytes>>();\n\n\n\n c.bench_function_over_inputs(\n\n \"datagen_output\",\n\n move |bencher, input| {\n\n bencher.iter(|| {\n\n bytes.clear();\n\n let mut out = DataGenOutput::new(&mut bytes);\n\n out.write_bytes(input.as_slice()).unwrap()\n\n });\n\n },\n\n inputs.clone(),\n", "file_path": "benches/bench.rs", "rank": 35, "score": 140723.9802957456 }, { "content": "fn set_first_empty<T>(value: T, opt1: &mut Option<T>, opt2: &mut Option<T>) {\n\n if opt1.is_none() {\n\n *opt1 = Some(value);\n\n } else if opt2.is_none() {\n\n *opt2 = Some(value)\n\n }\n\n}\n\n\n", "file_path": "src/interpreter/mod.rs", "rank": 36, "score": 136950.25107022867 }, { "content": "pub fn test_program_success(iterations: u64, input: &str, expected_output: &str) {\n\n let results = run_program(iterations, input).expect(\"Failed to run program\");\n\n let as_str = String::from_utf8(results).expect(\"program results were not valid utf8\");\n\n if expected_output != as_str.as_str() {\n\n panic!(\n\n \"Incorrect program output, expected: '{}', actual: '{}', actual_debug: '{:?}'\",\n\n expected_output, as_str, as_str\n\n );\n\n }\n\n}\n", "file_path": "src/fun_test.rs", "rank": 37, "score": 134593.9476885793 }, { "content": "fn choose_best_match<'a>(opt1: Option<&'a FunctionPrototype>, opt2: Option<&'a FunctionPrototype>, called_name: &IString, actual_args: &[AnyFunction], caller_source_ref: &SourceRef) -> Result<&'a FunctionPrototype, CompileError> {\n\n match (opt1, opt2) {\n\n (Some(a), Some(b)) => {\n\n if a.is_variadic() && !b.is_variadic() {\n\n Ok(b)\n\n } else if !a.is_variadic() && b.is_variadic() {\n\n Ok(a)\n\n } else {\n\n Err(CompileError::ambiguous_function_call(called_name.clone(), actual_args, a, b, caller_source_ref.clone()))\n\n }\n\n }\n\n (Some(a), None) => Ok(a),\n\n (None, Some(b)) => Ok(b),\n\n (None, None) => {\n\n Err(CompileError::no_such_method(called_name.clone(), actual_args, caller_source_ref.clone()))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/interpreter/mod.rs", "rank": 38, "score": 132671.09494811276 }, { "content": "#[derive(Debug)]\n\nenum MetaCommand {\n\n Clear,\n\n Show,\n\n Help,\n\n}\n\n\n\nimpl ::std::str::FromStr for MetaCommand {\n\n type Err = ();\n\n fn from_str(val: &str) -> Result<MetaCommand, ()> {\n\n match val {\n\n \"clear\" => Ok(MetaCommand::Clear),\n\n \"show\" => Ok(MetaCommand::Show),\n\n \"help\" => Ok(MetaCommand::Help),\n\n _ => Err(()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/repl/mod.rs", "rank": 39, "score": 131864.8193801276 }, { "content": "fn create_string_benchmark<I: fmt::Debug + 'static>(program: &'static str) -> Fun<I> {\n\n let mut interpreter = Interpreter::new();\n\n interpreter.add_std_lib();\n\n let compiled = interpreter\n\n .eval(UnreadSource::Builtin(\"test\", program))\n\n .unwrap();\n\n let mut context = ProgramContext::from_seed(SEED, ::dgen::verbosity::NORMAL);\n\n let mut out = Vec::with_capacity(OUT_CAPACITY);\n\n\n\n Fun::new(program, move |b, _| {\n\n b.iter(|| {\n\n out.clear();\n\n let mut real_out = DataGenOutput::new(&mut out);\n\n compiled.write_value(&mut context, &mut real_out).unwrap();\n\n })\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 40, "score": 128816.29278202016 }, { "content": "fn do_arguments_match<A: Iterator<Item = GenType>, B: Iterator<Item = GenType>>(\n\n expected_types: A,\n\n actual_types: B,\n\n variadic: bool,\n\n) -> bool {\n\n use itertools::{EitherOrBoth, Itertools};\n\n\n\n let mut actual_types = actual_types.peekable();\n\n let mut expected_types = expected_types.peekable();\n\n\n\n // if this is a 0-arg function, then our job is really easy\n\n if expected_types.peek().is_none() {\n\n return actual_types.peek().is_none();\n\n }\n\n\n\n /*\n\n * expected_types is now guaranteed to be non-empty\n\n * we must require at least one argument for a varargs parameter. This is because we don't really\n\n * resolve to a \"best\" match. We instead assume that a function call will match at most two prototypes.\n\n * When a call does match two prototypes, we will select whichever one is NOT variadic, and error\n", "file_path": "src/interpreter/prototype.rs", "rank": 41, "score": 122219.12757891385 }, { "content": "pub fn process_doc_comments(raw_lines: Vec<String>) -> String {\n\n if raw_lines.is_empty() {\n\n \"user defined function\".to_owned()\n\n } else {\n\n raw_lines.join(\"\\n\")\n\n }\n\n}", "file_path": "src/interpreter/ast.rs", "rank": 42, "score": 116073.15626041012 }, { "content": "fn list_modules(interpreter: &Interpreter) -> String {\n\n use itertools::Itertools;\n\n interpreter\n\n .module_iterator()\n\n .map(|m| m.name.clone())\n\n .join(\"\\n\")\n\n}\n\n\n", "file_path": "src/program/help.rs", "rank": 43, "score": 115307.37294818881 }, { "content": "#[test]\n\nfn parse_err_is_displayed_correctly() {\n\n let source = r##\"\n\n\n\n foo\n\n\n\n bar\n\n\n\n baz\n\n\n\n\"##;\n\n\n\n let location_offset = 17; // the 'r' at the end of \"bar\" (offset includes the indentation spaces)\n\n assert_eq!(\"r\", &source[location_offset..(location_offset + 1)]);\n\n\n\n {\n\n let subject = SourceErrRegion {source, location_offset};\n\n let rendered = format!(\"{}\", subject);\n\n /*\n\n Should render with the caret underneath the problem character in the terminal as:\n\nline 5: bar\n", "file_path": "src/interpreter/errors.rs", "rank": 44, "score": 114601.26809116956 }, { "content": "pub fn create_memoized_fun(input: AnyFunction) -> (AnyFunction, Rc<Resetter>) {\n\n let resetter = Rc::new(Resetter::new());\n\n let reset_to_return = resetter.clone();\n\n\n\n let fun_to_return = match input {\n\n AnyFunction::String(fun) => AnyFunction::String(MemoizedFunction::new(fun, resetter)),\n\n AnyFunction::Boolean(fun) => AnyFunction::Boolean(MemoizedFunction::new(fun, resetter)),\n\n AnyFunction::Decimal(fun) => AnyFunction::Decimal(MemoizedFunction::new(fun, resetter)),\n\n AnyFunction::Uint(fun) => AnyFunction::Uint(MemoizedFunction::new(fun, resetter)),\n\n AnyFunction::Int(fun) => AnyFunction::Int(MemoizedFunction::new(fun, resetter)),\n\n AnyFunction::Bin(fun) => AnyFunction::Bin(MemoizedFunction::new(fun, resetter)),\n\n };\n\n (fun_to_return, reset_to_return)\n\n}\n\n\n\n// impl<T> MemoizedFunction<T> {\n\n// pub fn new(wrapped: DynFun<T>) -> (MemoizedFunction<T>, )\n\n// }\n", "file_path": "src/interpreter/map.rs", "rank": 45, "score": 113802.88389267828 }, { "content": "pub fn finish_mapped(resolved: AnyFunction, resetter: Rc<Resetter>) -> AnyFunction {\n\n match resolved {\n\n AnyFunction::String(fun) => {\n\n AnyFunction::String(WrappedMemoizedFunction::new(fun, resetter))\n\n }\n\n AnyFunction::Boolean(fun) => {\n\n AnyFunction::Boolean(WrappedMemoizedFunction::new(fun, resetter))\n\n }\n\n AnyFunction::Decimal(fun) => {\n\n AnyFunction::Decimal(WrappedMemoizedFunction::new(fun, resetter))\n\n }\n\n AnyFunction::Uint(fun) => AnyFunction::Uint(WrappedMemoizedFunction::new(fun, resetter)),\n\n AnyFunction::Int(fun) => AnyFunction::Int(WrappedMemoizedFunction::new(fun, resetter)),\n\n AnyFunction::Bin(fun) => AnyFunction::Bin(WrappedMemoizedFunction::new(fun, resetter)),\n\n }\n\n}\n\n\n", "file_path": "src/interpreter/map.rs", "rank": 46, "score": 111776.78071779202 }, { "content": "#[test]\n\nfn parses_program_with_macro_definitions() {\n\n let input = r#\"\n\n # comment 1 \n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 47, "score": 111700.20550803289 }, { "content": "#[test]\n\nfn parses_function_call_with_zero_arguments() {\n\n let result = ExprParser::new().parse(\"fun_name()\");\n\n let expected = FunctionCall {\n\n function_name: \"fun_name\".into(),\n\n args: Vec::new(),\n\n mapper: None,\n\n };\n\n assert_eq!(Ok(with_span(0, 10, Expr::Function(expected))), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 48, "score": 108984.7966264288 }, { "content": "#[test]\n\nfn parses_function_call_with_literal_arguments() {\n\n let result = ExprParser::new().parse(r#\"fun_name(\"foo\", 55, 12.5)\"#);\n\n let expected = FunctionCall {\n\n function_name: \"fun_name\".into(),\n\n args: vec![\n\n with_span(9, 14, Expr::StringLiteral(\"foo\".into())),\n\n with_span(16, 18, Expr::IntLiteral(55)),\n\n with_span(20, 24, Expr::DecimalLiteral(12.5))\n\n ],\n\n mapper: None,\n\n };\n\n assert_eq!(Ok(with_span(0, 25, Expr::Function(expected))), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 49, "score": 108984.7966264288 }, { "content": "fn homogeneous_string_benches(c: &mut Criterion) {\n\n let functions = vec![\n\n create_string_benchmark(\"ascii_lowercase_chars(16)\"),\n\n create_string_benchmark(\"ascii_lowercase_chars(128)\"),\n\n create_string_benchmark(\"ascii_lowercase_chars(1024)\"),\n\n ];\n\n\n\n c.bench_functions(\"heterogeneous_strings\", functions, ());\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 50, "score": 106268.7155967408 }, { "content": "fn heterogeneous_string_benches(c: &mut Criterion) {\n\n let functions = vec![\n\n create_string_benchmark(\"ascii_alphanumeric_chars(16)\"),\n\n create_string_benchmark(\"ascii_alphanumeric_chars(128)\"),\n\n create_string_benchmark(\"ascii_alphanumeric_chars(1024)\"),\n\n ];\n\n\n\n c.bench_functions(\"heterogeneous_strings\", functions, ());\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 51, "score": 106268.7155967408 }, { "content": "fn get_metacommand(line: &str) -> Option<MetaCommand> {\n\n let line = line.trim();\n\n line.split(\"\\\\s+\")\n\n .nth(0)\n\n .and_then(|split| split.parse().ok())\n\n}\n\n\n", "file_path": "src/repl/mod.rs", "rank": 52, "score": 101622.63447650927 }, { "content": "fn split_module_and_function(function_name: &str) -> Option<(IString, IString)> {\n\n function_name.rfind(MODULE_SEPARATOR_CHAR).map(|separator_position| {\n\n let (module, function_with_separator) = function_name.split_at(separator_position);\n\n let function = (&function_with_separator[1..]).into();\n\n (module.into(), function)\n\n })\n\n}\n\n\n", "file_path": "src/interpreter/mod.rs", "rank": 53, "score": 93524.41261969181 }, { "content": "#[derive(Debug)]\n\nstruct ToString<T: Display + OutputType> {\n\n gen: DynFun<T>,\n\n}\n\n\n\nimpl<T: Display + Debug + OutputType + 'static> ToString<T> {\n\n fn new(wrapped: DynFun<T>) -> AnyFunction {\n\n AnyFunction::String(Rc::new(ToString { gen: wrapped }))\n\n }\n\n}\n\n\n\n// Display bound is here to prevent this impl from being used to format binary\n\nimpl<T: Display + Debug + OutputType> RunnableFunction<IString> for ToString<T> {\n\n fn gen_value(&self, ctx: &mut ProgramContext) -> Result<IString, Error> {\n\n let mut buffer = Vec::with_capacity(32);\n\n {\n\n let mut out = DataGenOutput::new(&mut buffer);\n\n self.write_value(ctx, &mut out)?;\n\n }\n\n // we know that the result will be valid utf8 because the implementations of OutputType for non-binary\n\n // are all guaranteed to produce valid utf-8\n", "file_path": "src/builtins/to_string.rs", "rank": 54, "score": 88964.93178824912 }, { "content": "#[test]\n\nfn declare_and_use_functions() {\n\n let expected_output = \"aw6OqR822CZggJ42f1aT0\";\n\n let input = r#\"\n", "file_path": "src/fun_test.rs", "rank": 55, "score": 88508.3495972536 }, { "content": "#[test]\n\nfn use_binary_functions() {\n\n let program = r##\"\n", "file_path": "src/fun_test.rs", "rank": 56, "score": 88508.3495972536 }, { "content": "fn find_modules<'a>(\n\n interpreter: &'a Interpreter,\n\n module_name: &'a str,\n\n) -> Result<impl Iterator<Item = &'a Module>, Error> {\n\n let _ = has_matching_module(interpreter, module_name).map_err(|_| {\n\n let other_modules = list_modules(interpreter);\n\n format_err!(\n\n \"No module exists with name matching '{}'. Available modules are: \\n\\n{}\\n\",\n\n module_name,\n\n other_modules\n\n )\n\n })?;\n\n\n\n Ok(interpreter\n\n .module_iterator()\n\n .filter(move |m| m.name.contains(module_name)))\n\n}\n", "file_path": "src/program/help.rs", "rank": 57, "score": 88436.39112709086 }, { "content": "#[test]\n\nfn use_std_boolean_function() {\n\n let expected_output = \"truetruetrue\";\n\n let input = r#\"boolean()\"#;\n\n test_program_success(3, input, expected_output);\n\n}\n\n\n", "file_path": "src/fun_test.rs", "rank": 58, "score": 86353.73717035452 }, { "content": "#[test]\n\nfn declare_and_use_function_with_mapper() {\n\n let input = r#\"\n", "file_path": "src/fun_test.rs", "rank": 59, "score": 86353.73717035452 }, { "content": "fn create_interpreter(options: &CliOptions) -> Interpreter {\n\n let verbosity = options.get_verbosity();\n\n let mut interpreter = Interpreter::new();\n\n if !options.no_std_lib {\n\n interpreter.add_std_lib();\n\n }\n\n for lib in options.get_library_sources() {\n\n interpreter.add_module(lib).or_bail(verbosity);\n\n }\n\n interpreter\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 60, "score": 85430.05264936878 }, { "content": "#[test]\n\nfn pass_mapped_function_as_function_argument() {\n\n let input = r#\"\n", "file_path": "src/fun_test.rs", "rank": 61, "score": 84072.16438146654 }, { "content": "#[test]\n\nfn parses_bin_literal() {\n\n let input = \"[ 0x00,0xff, 0x01]\";\n\n let expected_output = with_span(0, 18, bin(&[0x00, 0xff, 0x01]));\n\n let actual = ExprParser::new().parse(input).expect(\"failed to parse\");\n\n assert_eq!(expected_output, actual);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 62, "score": 83748.43204139723 }, { "content": "#[test]\n\nfn parses_nested_function_calls() {\n\n let result = ExprParser::new().parse(r#\"fun1(\"foo\", fun2(12.5, fun3(111)), \"bar\")\"#);\n\n let expected = with_span(0, 41, fun(\n\n \"fun1\",\n\n vec![\n\n with_span(5, 10, string(\"foo\")),\n\n with_span(12, 33, fun(\"fun2\", vec![\n\n with_span(17, 21, float(12.5)), \n\n with_span(23, 32, fun(\"fun3\", vec![\n\n with_span(28, 31, int(111))\n\n ]))\n\n ])),\n\n with_span(35, 40, string(\"bar\")),\n\n ],\n\n ));\n\n assert_eq!(Ok(expected), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 63, "score": 81822.88520921284 }, { "content": "#[test]\n\nfn parses_mapped_function_call() {\n\n let input = r#\"fun1(\"foo\", 7) {mapper_arg ->\n\n inner(mapper_arg, mapper_arg)\n\n }\"#;\n\n let result = ExprParser::new().parse(input);\n\n let expected = with_span(0, 73, mfun(\n\n \"fun1\",\n\n vec![with_span(5, 10, string(\"foo\")), with_span(12, 13, int(7))],\n\n \"mapper_arg\",\n\n with_span(38, 67, fun(\n\n \"inner\",\n\n vec![with_span(44, 54, arg_usage(\"mapper_arg\")), with_span(56, 66, arg_usage(\"mapper_arg\"))],\n\n )),\n\n ));\n\n assert_eq!(Ok(expected), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 64, "score": 81822.88520921284 }, { "content": "#[test]\n\nfn parses_empty_bin_literal() {\n\n let input = \"[ ]\";\n\n let expected_output = with_span(0, 3, bin(&[]));\n\n let actual = ExprParser::new().parse(input).expect(\"failed to parse\");\n\n assert_eq!(expected_output, actual);\n\n}\n\n\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 65, "score": 81822.88520921284 }, { "content": "#[test]\n\nfn parses_string_literal_that_is_all_whitespace() {\n\n string_literal_test(r#\"\" \\t \\n \\r \"\"#, \" \\t \\n \\r \");\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 66, "score": 81822.88520921284 }, { "content": "#[test]\n\nfn parses_decimal_literal_token() {\n\n let result = ExprParser::new().parse(r#\"123.45\"#);\n\n assert_eq!(Ok(with_span(0, 6, float(123.45))), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 67, "score": 81822.88520921284 }, { "content": "#[test]\n\nfn parses_boolean_literal_true_token() {\n\n let result = ExprParser::new().parse(r#\"true\"#);\n\n assert_eq!(Ok(with_span(0, 4, boolean(true))), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 68, "score": 80020.8270391404 }, { "content": "#[test]\n\nfn parses_basic_string_literal_token() {\n\n string_literal_test(r#\"\"somestr\"\"#, \"somestr\");\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 69, "score": 80020.8270391404 }, { "content": "#[test]\n\nfn parses_string_literal_with_escaped_quotes() {\n\n let result = ExprParser::new().parse(r#\"\"some\\\"str\"\"#);\n\n assert_eq!(Ok(with_span(0, 11, string(r#\"some\"str\"#))), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 70, "score": 80020.8270391404 }, { "content": "#[test]\n\nfn parses_unsigned_int_literal_token() {\n\n let result = ExprParser::new().parse(r#\"1234\"#);\n\n assert_eq!(Ok(with_span(0, 4, int(1234))), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 71, "score": 80020.8270391404 }, { "content": "#[test]\n\nfn parses_boolean_literal_false_token() {\n\n let result = ExprParser::new().parse(r#\"false\"#);\n\n assert_eq!(Ok(with_span(0, 5, boolean(false))), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 72, "score": 80020.8270391404 }, { "content": "#[test]\n\nfn adding_a_library_that_defines_two_functions_with_the_same_signature_returns_error() {\n\n let lib = r##\"\n", "file_path": "src/fun_test.rs", "rank": 73, "score": 78704.09849903561 }, { "content": "fn s(val: &str) -> IString {\n\n val.into()\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 74, "score": 78481.69534353737 }, { "content": "fn string(s: &str) -> Expr {\n\n Expr::StringLiteral(s.into())\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 75, "score": 78481.69534353737 }, { "content": "fn float(f: f64) -> Expr {\n\n Expr::DecimalLiteral(f)\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 76, "score": 78481.69534353737 }, { "content": "fn boolean(b: bool) -> Expr {\n\n Expr::BooleanLiteral(b)\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 77, "score": 78481.69534353737 }, { "content": "fn int(i: u64) -> Expr {\n\n Expr::IntLiteral(i)\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 78, "score": 78481.69534353737 }, { "content": "fn sint(i: i64) -> Expr {\n\n Expr::SignedIntLiteral(i)\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 79, "score": 78481.69534353737 }, { "content": "#[test]\n\nfn parses_string_literal_with_unicode_escape_sequences() {\n\n string_literal_test(r#\"\"foo\\U{1F4A9}\"\"#, \"foo💩\");\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 80, "score": 78330.74730312251 }, { "content": "#[test]\n\nfn parses_unsigned_int_hex_literal_token() {\n\n let result = ExprParser::new().parse(\"0xFF\");\n\n assert_eq!(Ok(with_span(0, 4, int(255))), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 81, "score": 78330.74730312251 }, { "content": "#[test]\n\nfn parses_mapped_function_call_withou_args() {\n\n let input = r#\"fun1() {mapper_arg ->\n\n inner(mapper_arg, mapper_arg)\n\n }\"#;\n\n let result = ExprParser::new().parse(input);\n\n let expected = with_span(0, 65, mfun(\n\n \"fun1\",\n\n Vec::new(),\n\n \"mapper_arg\",\n\n with_span(30, 59, fun(\n\n \"inner\",\n\n vec![with_span(36, 46, arg_usage(\"mapper_arg\")), with_span(48, 58, arg_usage(\"mapper_arg\"))],\n\n )),\n\n ));\n\n assert_eq!(Ok(expected), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 82, "score": 78330.74730312251 }, { "content": "#[test]\n\nfn parses_signed_int_literal_negative_token() {\n\n let result = ExprParser::new().parse(r#\"-1234\"#);\n\n assert_eq!(Ok(with_span(0, 5, sint(-1234))), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 83, "score": 78330.74730312251 }, { "content": "#[test]\n\nfn parses_signed_int_literal_positive_token() {\n\n let result = ExprParser::new().parse(r#\"+1234\"#);\n\n assert_eq!(Ok(with_span(0, 5, sint(1234))), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 84, "score": 78330.74730312251 }, { "content": "#[test]\n\nfn parses_decimal_literal_token_with_negative_sign() {\n\n let result = ExprParser::new().parse(r#\"-123.45\"#);\n\n assert_eq!(Ok(with_span(0, 7, float(-123.45))), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 85, "score": 78330.74730312251 }, { "content": "#[test]\n\nfn parses_decimal_literal_token_with_positive_sign() {\n\n let result = ExprParser::new().parse(r#\"+123.45\"#);\n\n assert_eq!(Ok(with_span(0, 7, float(123.45))), result);\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 86, "score": 78330.74730312251 }, { "content": "#[test]\n\nfn parses_string_literal_with_whitespace_chars_and_escape_sequences() {\n\n string_literal_test(r#\"\" some\\t str\\n \"\"#, \" some\\t str\\n \");\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 87, "score": 76742.52314063876 }, { "content": "fn bin(bytes: &[u8]) -> Expr {\n\n Expr::BinaryLiteral(bytes.to_owned())\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 88, "score": 76556.14851135298 }, { "content": "fn arg_usage(name: &str) -> Expr {\n\n Expr::ArgumentUsage(name.into())\n\n}\n\n\n", "file_path": "src/interpreter/parse_test.rs", "rank": 89, "score": 74754.09034128053 }, { "content": "fn create_env_map() -> HashMap<IString, IString> {\n\n let mut map = HashMap::with_capacity(64);\n\n for (key, value) in ::std::env::vars() {\n\n map.insert(key.into(), value.into());\n\n }\n\n map\n\n}\n\n\n\nlazy_static! {\n\n static ref ENV_VARS: HashMap<IString, IString> = create_env_map();\n\n}\n\n\n", "file_path": "src/builtins/env.rs", "rank": 90, "score": 73936.19197885634 }, { "content": "fn list_functions<'a, 'b, I: Iterator<Item = &'a FunctionPrototype>>(\n\n function_iterator: I,\n\n function_name: Option<&'b str>,\n\n out: &mut DataGenOutput,\n\n print_source: bool,\n\n) -> Result<(), Error> {\n\n use std::fmt::Write;\n\n\n\n let mut filtered = function_iterator\n\n .filter(|fun| {\n\n function_name\n\n .as_ref()\n\n .map(|name| fun.name().contains(*name))\n\n .unwrap_or(true)\n\n }).peekable();\n\n\n\n if filtered.peek().is_none() {\n\n writeln!(out, \"No matching functions\")?;\n\n } else {\n\n writeln!(out, \"\")?;\n", "file_path": "src/program/help.rs", "rank": 91, "score": 72507.62762337524 }, { "content": "use encoding::ByteWriter;\n\nuse std::fmt::Display;\n\nuse std::io::{self, Write};\n\nuse crate::OutputType;\n\nuse failure::Error;\n\n\n\npub struct TrackingWriter<'a> {\n\n delegate: &'a mut Write,\n\n num_written: u64,\n\n}\n\n\n\nimpl<'a> Write for TrackingWriter<'a> {\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n let result = self.delegate.write(buf);\n\n if let Ok(num) = result.as_ref() {\n\n self.num_written += *num as u64;\n\n }\n\n result\n\n }\n\n\n", "file_path": "src/writer/mod.rs", "rank": 92, "score": 71471.25024076631 }, { "content": " .write_fmt(format_args!(\"{}\", value))\n\n .map_err(|_| {\n\n format_err!(\"Failed to write to output\")\n\n })\n\n }\n\n\n\n pub fn write<O: OutputType>(&mut self, value: &O) -> Result<(), Error> {\n\n value.write_output(self)\n\n }\n\n\n\n pub fn with<F, T>(&mut self, fun: F) -> Result<(), Error>\n\n where\n\n F: FnOnce(&mut DataGenOutput) -> Result<T, ::failure::Error>,\n\n {\n\n let _start = self.writer.get_num_bytes_written();\n\n let _ = fun(self)?;\n\n Ok(())\n\n }\n\n\n\n pub fn flush(&mut self) -> io::Result<()> {\n", "file_path": "src/writer/mod.rs", "rank": 93, "score": 71466.16013954273 }, { "content": "}\n\n\n\nimpl<'a> DataGenOutput<'a> {\n\n pub fn new(writer: &'a mut Write) -> DataGenOutput<'a> {\n\n DataGenOutput {\n\n writer: TrackingWriter::new(writer),\n\n }\n\n }\n\n\n\n pub fn write_bytes(&mut self, bytes: &[u8]) -> Result<(), Error> {\n\n self.writer.write_all(bytes).map_err(|io_err| io_err.into())\n\n }\n\n\n\n pub fn write_str(&mut self, value: &str) -> Result<(), Error> {\n\n self.write_bytes(value.as_bytes())\n\n }\n\n\n\n pub fn write_string<D: Display + ?Sized>(&mut self, value: &D) -> Result<(), Error> {\n\n let _start = self.writer.get_num_bytes_written();\n\n self.writer\n", "file_path": "src/writer/mod.rs", "rank": 94, "score": 71462.39646228713 }, { "content": " self.writer.flush()\n\n }\n\n}\n\n\n\nimpl<'a> ByteWriter for DataGenOutput<'a> {\n\n fn write_byte(&mut self, b: u8) {\n\n self.writer\n\n .write_all(&[b])\n\n .expect(\"Failed to write output of encoded string\");\n\n }\n\n fn write_bytes(&mut self, v: &[u8]) {\n\n self.writer\n\n .write_all(v)\n\n .expect(\"Failed to write output of encoded string\");\n\n }\n\n}\n\n\n\n\n\nimpl<'a> ::std::fmt::Write for DataGenOutput<'a> {\n\n fn write_str(&mut self, s: &str) -> ::std::fmt::Result {\n\n self.write_str(s).map(|_| ()).map_err(|_| ::std::fmt::Error)\n\n }\n\n}\n", "file_path": "src/writer/mod.rs", "rank": 95, "score": 71460.84609749002 }, { "content": " fn flush(&mut self) -> io::Result<()> {\n\n self.delegate.flush()\n\n }\n\n}\n\n\n\nimpl<'a> TrackingWriter<'a> {\n\n pub fn new(delegate: &'a mut Write) -> TrackingWriter<'a> {\n\n TrackingWriter {\n\n delegate,\n\n num_written: 0,\n\n }\n\n }\n\n\n\n pub fn get_num_bytes_written(&self) -> u64 {\n\n self.num_written\n\n }\n\n}\n\n\n\npub struct DataGenOutput<'a> {\n\n writer: TrackingWriter<'a>,\n", "file_path": "src/writer/mod.rs", "rank": 96, "score": 71458.29153369089 }, { "content": "use crate::interpreter::{DgenParseError, Interpreter, UnreadSource};\n\nuse crate::{AnyFunction, DataGenOutput, ProgramContext};\n\nuse failure::Error;\n\nuse rustyline::error::ReadlineError;\n\nuse rustyline::Editor;\n\nuse std::io::{self, Write};\n\n\n\nconst MAX_EMPTY_LINES: u32 = 2;\n\n\n\npub struct Repl {\n\n context: ProgramContext,\n\n interpreter: Interpreter,\n\n editor: Editor<()>,\n\n module_source: String,\n\n partial_source: String,\n\n consecutive_blank_lines: u32,\n\n awaiting_incomplete_input: bool,\n\n}\n\n\n\nconst MODULE_NAME: &str = \"default\";\n\n\n", "file_path": "src/repl/mod.rs", "rank": 97, "score": 71367.84898329218 }, { "content": " writeln!(lock, \"Program Error: {}\", err)?;\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl Repl {\n\n pub fn new(context: ProgramContext, interpreter: Interpreter) -> Repl {\n\n Repl {\n\n context,\n\n interpreter,\n\n editor: Editor::new(),\n\n module_source: String::with_capacity(1024),\n\n partial_source: String::with_capacity(512),\n\n consecutive_blank_lines: 0,\n\n awaiting_incomplete_input: false,\n\n }\n\n }\n\n\n\n pub fn run(mut self) -> Result<(), Error> {\n\n self.help();\n", "file_path": "src/repl/mod.rs", "rank": 98, "score": 71364.02598778585 }, { "content": " }\n\n\n\n let mut new_combined_input = self.module_source.clone();\n\n new_combined_input.push_str(\"\\n\");\n\n new_combined_input.push_str(self.partial_source.as_str());\n\n\n\n self.interpreter.remove_module(MODULE_NAME);\n\n let result = self\n\n .interpreter\n\n .eval_any(UnreadSource::String(new_combined_input));\n\n match result {\n\n Ok(Some(function)) => {\n\n let history_entry = self.partial_source.as_str().replace(\"\\n\", \" \");\n\n self.partial_source.clear();\n\n self.editor.add_history_entry(history_entry);\n\n execute_fn(function, &mut self.context)?;\n\n }\n\n Ok(None) => {\n\n println!(\"Added function\");\n\n let history_entry = self.partial_source.as_str().replace(\"\\n\", \" \");\n", "file_path": "src/repl/mod.rs", "rank": 99, "score": 71350.65869473672 } ]
Rust
src/resolution/constraint/contact_equation.rs
BenBergman/nphysics
11ca4d6f967c35e7f51e65295174c5b0395cbd93
use na::Bounded; use na; use num::Float; use ncollide::geometry::Contact; use volumetric::InertiaTensor; use resolution::constraint::velocity_constraint::VelocityConstraint; use object::RigidBody; use math::{Scalar, Point, Vect, Orientation}; pub enum CorrectionMode { Velocity(Scalar), VelocityAndPosition(Scalar, Scalar, Scalar), VelocityAndPositionThresold(Scalar, Scalar, Scalar) } impl CorrectionMode { #[inline] pub fn vel_corr_factor(&self) -> Scalar { match *self { CorrectionMode::Velocity(ref v) => v.clone(), CorrectionMode::VelocityAndPosition(ref v, _, _) => v.clone(), CorrectionMode::VelocityAndPositionThresold(ref v, _, _) => v.clone() } } #[inline] pub fn pos_corr_factor(&self) -> Scalar { match *self { CorrectionMode::VelocityAndPosition(_, ref p, _) => p.clone(), CorrectionMode::VelocityAndPositionThresold(_, ref p, _) => p.clone(), CorrectionMode::Velocity(_) => na::zero() } } #[inline] pub fn min_depth_for_pos_corr(&self) -> Scalar { match *self { CorrectionMode::VelocityAndPosition(_, _, ref t) => t.clone(), CorrectionMode::VelocityAndPositionThresold(_, _, ref t) => t.clone(), CorrectionMode::Velocity(_) => Bounded::max_value() } } #[inline] pub fn max_depth_for_vel_corr(&self) -> Scalar { match *self { CorrectionMode::VelocityAndPosition(_, _, _) => Bounded::max_value(), CorrectionMode::VelocityAndPositionThresold(_, _, ref t) => t.clone(), CorrectionMode::Velocity(_) => Bounded::max_value() } } } pub struct CorrectionParameters { pub corr_mode: CorrectionMode, pub joint_corr: Scalar, pub rest_eps: Scalar } pub fn reinit_to_first_order_equation(dt: Scalar, coll: &Contact<Point>, constraint: &mut VelocityConstraint, correction: &CorrectionParameters) { /* * Fill b */ if coll.depth >= correction.corr_mode.min_depth_for_pos_corr() { constraint.objective = correction.corr_mode.pos_corr_factor() * coll.depth.max(na::zero()) / dt; } else { constraint.objective = na::zero(); } /* * Reset forces */ constraint.impulse = na::zero(); } pub fn fill_second_order_equation(dt: Scalar, coll: &Contact<Point>, rb1: &RigidBody, rb2: &RigidBody, rconstraint: &mut VelocityConstraint, idr: usize, fconstraints: &mut [VelocityConstraint], idf: usize, cache: &[Scalar], correction: &CorrectionParameters) { let restitution = rb1.restitution() * rb2.restitution(); let center = na::center(&coll.world1, &coll.world2); fill_velocity_constraint(dt.clone(), coll.normal.clone(), center.clone(), restitution, coll.depth.clone(), cache[0].clone(), na::zero(), Bounded::max_value(), rb1, rb2, rconstraint, correction); let friction = rb1.friction() * rb2.friction(); let mut i = 0; na::orthonormal_subspace_basis(&coll.normal, |friction_axis| { let constraint = &mut fconstraints[idf + i]; fill_velocity_constraint(dt.clone(), friction_axis, center.clone(), na::zero(), na::zero(), cache[i + 1].clone(), na::zero(), na::zero(), rb1, rb2, constraint, correction); constraint.friction_coeff = friction.clone(); constraint.friction_limit_id = idr; i = i + 1; true }) } pub fn fill_constraint_geometry(normal: Vect, rot_axis1: Orientation, rot_axis2: Orientation, rb1: &Option<&RigidBody>, rb2: &Option<&RigidBody>, constraint: &mut VelocityConstraint) { constraint.normal = normal; constraint.inv_projected_mass = na::zero(); match *rb1 { Some(ref rb) => { constraint.weighted_normal1 = constraint.normal * rb.inv_mass(); constraint.rot_axis1 = rot_axis1; constraint.weighted_rot_axis1 = rb.inv_inertia().apply(&constraint.rot_axis1); constraint.inv_projected_mass = constraint.inv_projected_mass + na::dot(&constraint.normal, &constraint.weighted_normal1) + na::dot(&constraint.rot_axis1, &constraint.weighted_rot_axis1); }, None => { } } match *rb2 { Some(ref rb) => { constraint.weighted_normal2 = constraint.normal * rb.inv_mass(); constraint.rot_axis2 = rot_axis2; constraint.weighted_rot_axis2 = rb.inv_inertia().apply(&constraint.rot_axis2); constraint.inv_projected_mass = constraint.inv_projected_mass + na::dot(&constraint.normal, &constraint.weighted_normal2) + na::dot(&constraint.rot_axis2, &constraint.weighted_rot_axis2); }, None => { } } let _1: Scalar = na::one(); constraint.inv_projected_mass = _1 / constraint.inv_projected_mass; } fn fill_velocity_constraint(dt: Scalar, normal: Vect, center: Point, restitution: Scalar, depth: Scalar, initial_impulse: Scalar, lobound: Scalar, hibound: Scalar, rb1: &RigidBody, rb2: &RigidBody, constraint: &mut VelocityConstraint, correction: &CorrectionParameters) { let rot_axis1 = na::cross(&(center - *rb1.center_of_mass()), &-normal); let rot_axis2 = na::cross(&(center - *rb2.center_of_mass()), &normal); let opt_rb1 = if rb1.can_move() { Some(rb1) } else { None }; let opt_rb2 = if rb2.can_move() { Some(rb2) } else { None }; fill_constraint_geometry(normal, rot_axis1, rot_axis2, &opt_rb1, &opt_rb2, constraint); /* * Fill indice */ constraint.id1 = rb1.index(); constraint.id2 = rb2.index(); /* * correction amount */ constraint.objective = relative_velocity( &opt_rb1, &opt_rb2, &constraint.normal, &constraint.rot_axis1, &constraint.rot_axis2, &dt); if constraint.objective < -correction.rest_eps { constraint.objective = constraint.objective + restitution * constraint.objective } constraint.objective = -constraint.objective; if depth < na::zero() { constraint.objective = constraint.objective + depth / dt } else if depth < correction.corr_mode.max_depth_for_vel_corr() { constraint.objective = constraint.objective + depth * correction.corr_mode.vel_corr_factor() / dt } constraint.impulse = if depth < na::zero() { na::zero() } else { initial_impulse }; /* * constraint bounds */ constraint.lobound = lobound; constraint.hibound = hibound; } pub fn relative_velocity(rb1: &Option<&RigidBody>, rb2: &Option<&RigidBody>, normal: &Vect, rot_axis1: &Orientation, rot_axis2: &Orientation, dt: &Scalar) -> Scalar { let mut dvel: Scalar = na::zero(); match *rb1 { Some(ref rb) => { dvel = dvel - na::dot(&(rb.lin_vel() + rb.lin_acc() * *dt), normal) + na::dot(&(rb.ang_vel() + rb.ang_acc() * *dt), rot_axis1); }, None => { } } match *rb2 { Some(ref rb) => { dvel = dvel + na::dot(&(rb.lin_vel() + rb.lin_acc() * *dt), normal) + na::dot(&(rb.ang_vel() + rb.ang_acc() * *dt), rot_axis2); }, None => { } } dvel }
use na::Bounded; use na; use num::Float; use ncollide::geometry::Contact; use volumetric::InertiaTensor; use resolution::constraint::velocity_constraint::VelocityConstraint; use object::RigidBody; use math::{Scalar, Point, Vect, Orientation}; pub enum CorrectionMode { Velocity(Scalar), VelocityAndPosition(Scalar, Scalar, Scalar), VelocityAndPositionThresold(Scalar, Scalar, Scalar) } impl CorrectionMode { #[inline] pub fn vel_corr_factor(&self) -> Scalar { match *self { CorrectionMode::Velocity(ref v) => v.clone(), CorrectionMode::VelocityAndPosition(ref v, _, _) => v.clone(), CorrectionMode::VelocityAndPositionThresold(ref v, _, _) => v.clone() } } #[inline] pub fn pos_c
e::VelocityAndPositionThresold(_, ref p, _) => p.clone(), CorrectionMode::Velocity(_) => na::zero() } } #[inline] pub fn min_depth_for_pos_corr(&self) -> Scalar { match *self { CorrectionMode::VelocityAndPosition(_, _, ref t) => t.clone(), CorrectionMode::VelocityAndPositionThresold(_, _, ref t) => t.clone(), CorrectionMode::Velocity(_) => Bounded::max_value() } } #[inline] pub fn max_depth_for_vel_corr(&self) -> Scalar { match *self { CorrectionMode::VelocityAndPosition(_, _, _) => Bounded::max_value(), CorrectionMode::VelocityAndPositionThresold(_, _, ref t) => t.clone(), CorrectionMode::Velocity(_) => Bounded::max_value() } } } pub struct CorrectionParameters { pub corr_mode: CorrectionMode, pub joint_corr: Scalar, pub rest_eps: Scalar } pub fn reinit_to_first_order_equation(dt: Scalar, coll: &Contact<Point>, constraint: &mut VelocityConstraint, correction: &CorrectionParameters) { /* * Fill b */ if coll.depth >= correction.corr_mode.min_depth_for_pos_corr() { constraint.objective = correction.corr_mode.pos_corr_factor() * coll.depth.max(na::zero()) / dt; } else { constraint.objective = na::zero(); } /* * Reset forces */ constraint.impulse = na::zero(); } pub fn fill_second_order_equation(dt: Scalar, coll: &Contact<Point>, rb1: &RigidBody, rb2: &RigidBody, rconstraint: &mut VelocityConstraint, idr: usize, fconstraints: &mut [VelocityConstraint], idf: usize, cache: &[Scalar], correction: &CorrectionParameters) { let restitution = rb1.restitution() * rb2.restitution(); let center = na::center(&coll.world1, &coll.world2); fill_velocity_constraint(dt.clone(), coll.normal.clone(), center.clone(), restitution, coll.depth.clone(), cache[0].clone(), na::zero(), Bounded::max_value(), rb1, rb2, rconstraint, correction); let friction = rb1.friction() * rb2.friction(); let mut i = 0; na::orthonormal_subspace_basis(&coll.normal, |friction_axis| { let constraint = &mut fconstraints[idf + i]; fill_velocity_constraint(dt.clone(), friction_axis, center.clone(), na::zero(), na::zero(), cache[i + 1].clone(), na::zero(), na::zero(), rb1, rb2, constraint, correction); constraint.friction_coeff = friction.clone(); constraint.friction_limit_id = idr; i = i + 1; true }) } pub fn fill_constraint_geometry(normal: Vect, rot_axis1: Orientation, rot_axis2: Orientation, rb1: &Option<&RigidBody>, rb2: &Option<&RigidBody>, constraint: &mut VelocityConstraint) { constraint.normal = normal; constraint.inv_projected_mass = na::zero(); match *rb1 { Some(ref rb) => { constraint.weighted_normal1 = constraint.normal * rb.inv_mass(); constraint.rot_axis1 = rot_axis1; constraint.weighted_rot_axis1 = rb.inv_inertia().apply(&constraint.rot_axis1); constraint.inv_projected_mass = constraint.inv_projected_mass + na::dot(&constraint.normal, &constraint.weighted_normal1) + na::dot(&constraint.rot_axis1, &constraint.weighted_rot_axis1); }, None => { } } match *rb2 { Some(ref rb) => { constraint.weighted_normal2 = constraint.normal * rb.inv_mass(); constraint.rot_axis2 = rot_axis2; constraint.weighted_rot_axis2 = rb.inv_inertia().apply(&constraint.rot_axis2); constraint.inv_projected_mass = constraint.inv_projected_mass + na::dot(&constraint.normal, &constraint.weighted_normal2) + na::dot(&constraint.rot_axis2, &constraint.weighted_rot_axis2); }, None => { } } let _1: Scalar = na::one(); constraint.inv_projected_mass = _1 / constraint.inv_projected_mass; } fn fill_velocity_constraint(dt: Scalar, normal: Vect, center: Point, restitution: Scalar, depth: Scalar, initial_impulse: Scalar, lobound: Scalar, hibound: Scalar, rb1: &RigidBody, rb2: &RigidBody, constraint: &mut VelocityConstraint, correction: &CorrectionParameters) { let rot_axis1 = na::cross(&(center - *rb1.center_of_mass()), &-normal); let rot_axis2 = na::cross(&(center - *rb2.center_of_mass()), &normal); let opt_rb1 = if rb1.can_move() { Some(rb1) } else { None }; let opt_rb2 = if rb2.can_move() { Some(rb2) } else { None }; fill_constraint_geometry(normal, rot_axis1, rot_axis2, &opt_rb1, &opt_rb2, constraint); /* * Fill indice */ constraint.id1 = rb1.index(); constraint.id2 = rb2.index(); /* * correction amount */ constraint.objective = relative_velocity( &opt_rb1, &opt_rb2, &constraint.normal, &constraint.rot_axis1, &constraint.rot_axis2, &dt); if constraint.objective < -correction.rest_eps { constraint.objective = constraint.objective + restitution * constraint.objective } constraint.objective = -constraint.objective; if depth < na::zero() { constraint.objective = constraint.objective + depth / dt } else if depth < correction.corr_mode.max_depth_for_vel_corr() { constraint.objective = constraint.objective + depth * correction.corr_mode.vel_corr_factor() / dt } constraint.impulse = if depth < na::zero() { na::zero() } else { initial_impulse }; /* * constraint bounds */ constraint.lobound = lobound; constraint.hibound = hibound; } pub fn relative_velocity(rb1: &Option<&RigidBody>, rb2: &Option<&RigidBody>, normal: &Vect, rot_axis1: &Orientation, rot_axis2: &Orientation, dt: &Scalar) -> Scalar { let mut dvel: Scalar = na::zero(); match *rb1 { Some(ref rb) => { dvel = dvel - na::dot(&(rb.lin_vel() + rb.lin_acc() * *dt), normal) + na::dot(&(rb.ang_vel() + rb.ang_acc() * *dt), rot_axis1); }, None => { } } match *rb2 { Some(ref rb) => { dvel = dvel + na::dot(&(rb.lin_vel() + rb.lin_acc() * *dt), normal) + na::dot(&(rb.ang_vel() + rb.ang_acc() * *dt), rot_axis2); }, None => { } } dvel }
orr_factor(&self) -> Scalar { match *self { CorrectionMode::VelocityAndPosition(_, ref p, _) => p.clone(), CorrectionMod
function_block-random_span
[]
Rust
src/hs_types.rs
a-mackay/raystack
26837f3c148f0e21ba529f40925828e3f291c333
use chrono::{NaiveDate, NaiveTime}; use chrono_tz::Tz; use raystack_core::{FromHaysonError, Hayson}; use serde_json::{json, Value}; use std::convert::From; const KIND: &str = "_kind"; #[derive(Clone, Debug, Eq, PartialEq)] pub struct Date(NaiveDate); impl Date { pub fn new(naive_date: NaiveDate) -> Self { Date(naive_date) } pub fn naive_date(&self) -> &NaiveDate { &self.0 } pub fn into_naive_date(self) -> NaiveDate { self.0 } } impl Hayson for Date { fn from_hayson(value: &Value) -> Result<Self, FromHaysonError> { match &value { Value::Object(obj) => { if let Some(kind_err) = hayson_check_kind("date", value) { return Err(kind_err); } let val = obj.get("val"); if val.is_none() { return hayson_error("Date val is missing"); } let val = val.unwrap().as_str(); if val.is_none() { return hayson_error("Date val is not a string"); } let val = val.unwrap(); match val.parse() { Ok(naive_date) => Ok(Date::new(naive_date)), Err(_) => hayson_error( "Date val string could not be parsed as a NaiveDate", ), } } _ => hayson_error("Date JSON value must be an object"), } } fn to_hayson(&self) -> Value { json!({ KIND: "date", "val": self.naive_date().to_string(), }) } } impl From<NaiveDate> for Date { fn from(d: NaiveDate) -> Self { Self::new(d) } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct Time(NaiveTime); impl Time { pub fn new(naive_time: NaiveTime) -> Self { Time(naive_time) } pub fn naive_time(&self) -> &NaiveTime { &self.0 } pub fn into_naive_time(self) -> NaiveTime { self.0 } } impl Hayson for Time { fn from_hayson(value: &Value) -> Result<Self, FromHaysonError> { match &value { Value::Object(obj) => { if let Some(kind_err) = hayson_check_kind("time", value) { return Err(kind_err); } let val = obj.get("val"); if val.is_none() { return hayson_error("Time val is missing"); } let val = val.unwrap().as_str(); if val.is_none() { return hayson_error("Time val is not a string"); } let val = val.unwrap(); match val.parse() { Ok(naive_time) => Ok(Time::new(naive_time)), Err(_) => hayson_error( "Time val string could not be parsed as a NaiveTime", ), } } _ => hayson_error("Time JSON value must be an object"), } } fn to_hayson(&self) -> Value { json!({ KIND: "time", "val": self.naive_time().to_string(), }) } } impl From<NaiveTime> for Time { fn from(t: NaiveTime) -> Self { Self::new(t) } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct DateTime { date_time: chrono::DateTime<Tz>, } impl DateTime { pub fn new(date_time: chrono::DateTime<Tz>) -> Self { Self { date_time } } pub fn date_time(&self) -> &chrono::DateTime<Tz> { &self.date_time } pub fn into_date_time(self) -> chrono::DateTime<Tz> { self.date_time } pub fn time_zone(&self) -> &str { self.date_time.timezone().name() } pub fn short_time_zone(&self) -> &str { crate::tz::time_zone_name_to_short_name(self.time_zone()) } } impl Hayson for DateTime { fn from_hayson(value: &Value) -> Result<Self, FromHaysonError> { let default_tz = "GMT"; match &value { Value::Object(obj) => { if let Some(kind_err) = hayson_check_kind("dateTime", value) { return Err(kind_err); } let tz_value = obj.get("tz"); let mut tz_str = default_tz.to_owned(); if let Some(value) = tz_value { match value { Value::Null => { tz_str = default_tz.to_owned(); } Value::String(tz_string) => { tz_str = tz_string.clone(); } _ => { return hayson_error( "DateTime tz is not a null or a string", ) } } } let dt = obj.get("val"); if dt.is_none() { return hayson_error("DateTime val is missing"); } let dt = dt.unwrap().as_str(); if dt.is_none() { return hayson_error("DateTime val is not a string"); } let dt = dt.unwrap(); match chrono::DateTime::parse_from_rfc3339(dt) { Ok(dt) => { let tz = crate::skyspark_tz_string_to_tz(&tz_str); if let Some(tz) = tz { let dt = dt.with_timezone(&tz); Ok(DateTime::new(dt)) } else { hayson_error(format!("DateTime tz '{}' has no matching chrono_tz time zone", tz_str)) } } Err(_) => hayson_error( "Time val string could not be parsed as a NaiveTime", ), } } _ => hayson_error("Time JSON value must be an object"), } } fn to_hayson(&self) -> Value { json!({ KIND: "dateTime", "val": self.date_time().to_rfc3339(), "tz": self.short_time_zone(), }) } } impl From<chrono::DateTime<Tz>> for DateTime { fn from(dt: chrono::DateTime<Tz>) -> Self { Self::new(dt) } } fn hayson_error<T, M>(message: M) -> Result<T, FromHaysonError> where M: AsRef<str>, { Err(FromHaysonError::new(message.as_ref().to_owned())) } fn hayson_error_opt<M>(message: M) -> Option<FromHaysonError> where M: AsRef<str>, { Some(FromHaysonError::new(message.as_ref().to_owned())) } fn hayson_check_kind( target_kind: &str, value: &Value, ) -> Option<FromHaysonError> { match value.get(KIND) { Some(kind) => match kind { Value::String(kind) => { if kind == target_kind { None } else { hayson_error_opt(format!( "Expected '{}' = {} but found {}", KIND, kind, kind )) } } _ => hayson_error_opt(format!("'{}' key is not a string", KIND)), }, None => hayson_error_opt(format!("Missing '{}' key", KIND)), } } #[cfg(test)] mod test { use crate::{Date, DateTime, Time}; use chrono::{NaiveDate, NaiveTime}; use chrono_tz::Tz; use raystack_core::Hayson; #[test] fn serde_date_works() { let naive_date = NaiveDate::from_ymd(2021, 1, 1); let x = Date::new(naive_date); let value = x.to_hayson(); let deserialized = Date::from_hayson(&value).unwrap(); assert_eq!(x, deserialized); } #[test] fn serde_time_works() { let naive_time = NaiveTime::from_hms(2, 15, 59); let x = Time::new(naive_time); let value = x.to_hayson(); let deserialized = Time::from_hayson(&value).unwrap(); assert_eq!(x, deserialized); } #[test] fn serde_date_time_works() { let dt = chrono::DateTime::parse_from_rfc3339("2021-01-01T18:30:09.453Z") .unwrap() .with_timezone(&Tz::GMT); let x = DateTime::new(dt); let value = x.to_hayson(); let deserialized = DateTime::from_hayson(&value).unwrap(); assert_eq!(x, deserialized); } #[test] fn serde_date_time_with_one_slash_tz_works() { let dt = chrono::DateTime::parse_from_rfc3339("2021-01-01T18:30:09.453Z") .unwrap() .with_timezone(&Tz::Australia__Sydney); let x = DateTime::new(dt); let value = x.to_hayson(); let deserialized = DateTime::from_hayson(&value).unwrap(); assert_eq!(x, deserialized); } #[test] fn serde_date_time_with_multiple_slashes_tz_works() { let dt = chrono::DateTime::parse_from_rfc3339("2021-01-01T18:30:09.453Z") .unwrap() .with_timezone(&Tz::America__North_Dakota__Beulah); let x = DateTime::new(dt); let value = x.to_hayson(); let deserialized = DateTime::from_hayson(&value).unwrap(); assert_eq!(x, deserialized); } #[test] fn short_time_zone_works() { let dt: DateTime = chrono::DateTime::parse_from_rfc3339("2021-01-01T18:30:09.453Z") .unwrap() .with_timezone(&Tz::America__North_Dakota__Beulah) .into(); assert_eq!(dt.short_time_zone(), "Beulah"); let dt: DateTime = chrono::DateTime::parse_from_rfc3339("2021-01-01T18:30:09.453Z") .unwrap() .with_timezone(&Tz::GMT) .into(); assert_eq!(dt.short_time_zone(), "GMT"); let dt: DateTime = chrono::DateTime::parse_from_rfc3339("2021-01-01T18:30:09.453Z") .unwrap() .with_timezone(&Tz::Australia__Sydney) .into(); assert_eq!(dt.short_time_zone(), "Sydney"); } }
use chrono::{NaiveDate, NaiveTime}; use chrono_tz::Tz; use raystack_core::{FromHaysonError, Hayson}; use serde_json::{json, Value}; use std::convert::From; const KIND: &str = "_kind"; #[derive(Clone, Debug, Eq, PartialEq)] pub struct Date(NaiveDate); impl Date { pub fn new(naive_date: NaiveDate) -> Self { Date(naive_date) } pub fn naive_date(&self) -> &NaiveDate { &self.0 } pub fn into_naive_date(self) -> NaiveDate { self.0 } } impl Hayson for Date { fn from_hayson(value: &Value) -> Result<Self, FromHaysonError> { match &value { Value::Object(obj) => { if let Some(kind_err) = hayson_check_kind("date", value) { return Err(kind_err); } let val = obj.get("val"); if val.is_none() { return hayson_error("Date val is missing"); } let val = val.unwrap().as_str(); if val.is_none() { return hayson_error("Date val is not a string"); } let val = val.unwrap(); match val.parse() { Ok(naive_date) => Ok(Date::new(naive_date)), Err(_) => hayson_error( "Date val string could not be parsed as a NaiveDate", ), } } _ => hayson_error("Date JSON value must be an object"), } } fn to_hayson(&self) -> Value { json!({ KIND: "date", "val": self.naive_date().to_string(), }) } } impl From<NaiveDate> for Date { fn from(d: NaiveDate) -> Self { Self::new(d) } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct Time(NaiveTime); impl Time { pub fn new(naive_time: NaiveTime) -> Self { Time(naive_time) } pub fn naive_time(&self) -> &NaiveTime { &self.0 } pub fn into_naive_time(self) -> NaiveTime { self.0 } } impl Hayson for Time { fn from_hayson(value: &Value) -> Result<Self, FromHaysonError> { match &value { Value::Object(obj) => { if let Some(kind_err) = hayson_check_kind("time", value) { return Err(kind_err); } let val = obj.get("val"); if val.is_none() { return hayson_error("Time val is missing"); } let val = val.unwrap().as_str(); if val.is_none() { return hayson_error("Time val is not a string"); } let val = val.unwrap(); match val.parse() { Ok(naive_time) => Ok(Time::new(naive_time)), Err(_) => hayson_error( "Time val string could not be parsed as a NaiveTime", ), } } _ => hayson_error("Time JSON value must be an object"), } } fn to_hayson(&self) -> Value { json!({ KIND: "time", "val": self.naive_time().to_string(), }) } } impl From<NaiveTime> for Time { fn from(t: NaiveTime) -> Self { Self::new(t) } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct DateTime { date_time: chrono::DateTime<Tz>, } impl DateTime { pub fn new(date_time: chrono::DateTime<Tz>) -> Self { Self { date_time } } pub fn date_time(&self) -> &chrono::DateTime<Tz> { &self.date_time } pub fn into_date_time(self) -> chrono::DateTime<Tz> { self.date_time } pub fn time_zone(&self) -> &str { self.date_time.timezone().name() } pub fn short_time_zone(&self) -> &str { crate::tz::time_zone_name_to_short_name(self.time_zone()) } } impl Hayson for DateTime { fn from_hayson(value: &Value) -> Result<Self, FromHaysonError> { let default_tz = "GMT"; match &value { Value::Object(obj) => {
let tz_value = obj.get("tz"); let mut tz_str = default_tz.to_owned(); if let Some(value) = tz_value { match value { Value::Null => { tz_str = default_tz.to_owned(); } Value::String(tz_string) => { tz_str = tz_string.clone(); } _ => { return hayson_error( "DateTime tz is not a null or a string", ) } } } let dt = obj.get("val"); if dt.is_none() { return hayson_error("DateTime val is missing"); } let dt = dt.unwrap().as_str(); if dt.is_none() { return hayson_error("DateTime val is not a string"); } let dt = dt.unwrap(); match chrono::DateTime::parse_from_rfc3339(dt) { Ok(dt) => { let tz = crate::skyspark_tz_string_to_tz(&tz_str); if let Some(tz) = tz { let dt = dt.with_timezone(&tz); Ok(DateTime::new(dt)) } else { hayson_error(format!("DateTime tz '{}' has no matching chrono_tz time zone", tz_str)) } } Err(_) => hayson_error( "Time val string could not be parsed as a NaiveTime", ), } } _ => hayson_error("Time JSON value must be an object"), } } fn to_hayson(&self) -> Value { json!({ KIND: "dateTime", "val": self.date_time().to_rfc3339(), "tz": self.short_time_zone(), }) } } impl From<chrono::DateTime<Tz>> for DateTime { fn from(dt: chrono::DateTime<Tz>) -> Self { Self::new(dt) } } fn hayson_error<T, M>(message: M) -> Result<T, FromHaysonError> where M: AsRef<str>, { Err(FromHaysonError::new(message.as_ref().to_owned())) } fn hayson_error_opt<M>(message: M) -> Option<FromHaysonError> where M: AsRef<str>, { Some(FromHaysonError::new(message.as_ref().to_owned())) } fn hayson_check_kind( target_kind: &str, value: &Value, ) -> Option<FromHaysonError> { match value.get(KIND) { Some(kind) => match kind { Value::String(kind) => { if kind == target_kind { None } else { hayson_error_opt(format!( "Expected '{}' = {} but found {}", KIND, kind, kind )) } } _ => hayson_error_opt(format!("'{}' key is not a string", KIND)), }, None => hayson_error_opt(format!("Missing '{}' key", KIND)), } } #[cfg(test)] mod test { use crate::{Date, DateTime, Time}; use chrono::{NaiveDate, NaiveTime}; use chrono_tz::Tz; use raystack_core::Hayson; #[test] fn serde_date_works() { let naive_date = NaiveDate::from_ymd(2021, 1, 1); let x = Date::new(naive_date); let value = x.to_hayson(); let deserialized = Date::from_hayson(&value).unwrap(); assert_eq!(x, deserialized); } #[test] fn serde_time_works() { let naive_time = NaiveTime::from_hms(2, 15, 59); let x = Time::new(naive_time); let value = x.to_hayson(); let deserialized = Time::from_hayson(&value).unwrap(); assert_eq!(x, deserialized); } #[test] fn serde_date_time_works() { let dt = chrono::DateTime::parse_from_rfc3339("2021-01-01T18:30:09.453Z") .unwrap() .with_timezone(&Tz::GMT); let x = DateTime::new(dt); let value = x.to_hayson(); let deserialized = DateTime::from_hayson(&value).unwrap(); assert_eq!(x, deserialized); } #[test] fn serde_date_time_with_one_slash_tz_works() { let dt = chrono::DateTime::parse_from_rfc3339("2021-01-01T18:30:09.453Z") .unwrap() .with_timezone(&Tz::Australia__Sydney); let x = DateTime::new(dt); let value = x.to_hayson(); let deserialized = DateTime::from_hayson(&value).unwrap(); assert_eq!(x, deserialized); } #[test] fn serde_date_time_with_multiple_slashes_tz_works() { let dt = chrono::DateTime::parse_from_rfc3339("2021-01-01T18:30:09.453Z") .unwrap() .with_timezone(&Tz::America__North_Dakota__Beulah); let x = DateTime::new(dt); let value = x.to_hayson(); let deserialized = DateTime::from_hayson(&value).unwrap(); assert_eq!(x, deserialized); } #[test] fn short_time_zone_works() { let dt: DateTime = chrono::DateTime::parse_from_rfc3339("2021-01-01T18:30:09.453Z") .unwrap() .with_timezone(&Tz::America__North_Dakota__Beulah) .into(); assert_eq!(dt.short_time_zone(), "Beulah"); let dt: DateTime = chrono::DateTime::parse_from_rfc3339("2021-01-01T18:30:09.453Z") .unwrap() .with_timezone(&Tz::GMT) .into(); assert_eq!(dt.short_time_zone(), "GMT"); let dt: DateTime = chrono::DateTime::parse_from_rfc3339("2021-01-01T18:30:09.453Z") .unwrap() .with_timezone(&Tz::Australia__Sydney) .into(); assert_eq!(dt.short_time_zone(), "Sydney"); } }
if let Some(kind_err) = hayson_check_kind("dateTime", value) { return Err(kind_err); }
if_condition
[ { "content": "/// Convert a `DateTime` into a string which can be used in ZINC files.\n\nfn to_zinc_encoded_string(date_time: &DateTime) -> String {\n\n let time_zone_name = date_time.short_time_zone();\n\n format!(\n\n \"{} {}\",\n\n date_time\n\n .date_time()\n\n .to_rfc3339_opts(SecondsFormat::Secs, true),\n\n time_zone_name,\n\n )\n\n}\n", "file_path": "src/api.rs", "rank": 0, "score": 105330.75489244895 }, { "content": "#[cfg(feature = \"grid_csv\")]\n\nfn value_to_string(value: &Value) -> String {\n\n use crate::{Coord, Date, DateTime, Time};\n\n use raystack_core::{\n\n Hayson, Marker, Na, Number, RemoveMarker, Symbol, Uri, Xstr,\n\n };\n\n if let Some(_) = value.as_null() {\n\n return \"\".to_owned();\n\n }\n\n if let Some(string) = value.as_str() {\n\n return string.to_owned();\n\n }\n\n if let Ok(_) = Marker::from_hayson(value) {\n\n return \"✔\".to_owned();\n\n }\n\n if let Ok(hs_ref) = Ref::from_hayson(value) {\n\n return hs_ref.to_axon_code().to_owned();\n\n }\n\n if let Ok(number) = Number::from_hayson(value) {\n\n return number.to_string();\n\n }\n", "file_path": "src/grid.rs", "rank": 1, "score": 94391.94200729394 }, { "content": "fn base64_decode_no_padding(s: &str) -> Result<String, Base64DecodeError> {\n\n let config = base64::Config::new(base64::CharacterSet::Standard, false);\n\n let bytes = base64::decode_config(s, config).map_err(|err| {\n\n let msg = format!(\"{}\", err);\n\n Base64DecodeError { msg }\n\n })?;\n\n String::from_utf8(bytes).map_err(|err| {\n\n let msg = format!(\"{}\", err);\n\n Base64DecodeError { msg }\n\n })\n\n}\n\n\n\n/// An error which occurred during the authentication process.\n\n#[derive(Debug, Error)]\n\npub enum AuthError {\n\n /// An error which occurred in the underlying HTTP client.\n\n #[error(\"A HTTP client error occurred while authenticating\")]\n\n Http(#[source] reqwest::Error),\n\n /// An error occurred in `raystack` during authentication.\n\n #[error(\"An internal error occurred while authenticating\")]\n", "file_path": "src/auth.rs", "rank": 3, "score": 62369.92862657919 }, { "content": "/// An extension trait for the `serde_json::Value` enum,\n\n/// containing helper functions which make it easier to\n\n/// parse specific Haystack types from the underlying Hayson encoding\n\n/// (a JSON value in a specific format, see https://github.com/j2inn/hayson).\n\npub trait ValueExt {\n\n /// Convert the JSON value to a Haystack Coord.\n\n fn as_hs_coord(&self) -> Option<Coord>;\n\n /// Convert the JSON value to a Haystack Date.\n\n fn as_hs_date(&self) -> Option<Date>;\n\n /// Convert the JSON value to a Haystack DateTime.\n\n fn as_hs_date_time(&self) -> Option<DateTime>;\n\n /// Convert the JSON value to a Haystack Marker.\n\n fn as_hs_marker(&self) -> Option<Marker>;\n\n /// Convert the JSON value to a Haystack NA.\n\n fn as_hs_na(&self) -> Option<Na>;\n\n /// Convert the JSON value to a Haystack Number.\n\n fn as_hs_number(&self) -> Option<Number>;\n\n /// Convert the JSON value to a Haystack Ref.\n\n fn as_hs_ref(&self) -> Option<Ref>;\n\n /// Convert the JSON value to a Haystack Remove Marker.\n\n fn as_hs_remove_marker(&self) -> Option<RemoveMarker>;\n\n /// Parse the JSON value as a Haystack Str.\n\n fn as_hs_str(&self) -> Option<&str>;\n\n /// Convert the JSON value to a Haystack Symbol.\n", "file_path": "src/value_ext.rs", "rank": 4, "score": 60172.30286543823 }, { "content": "struct KeyValuePairs {\n\n key_value_pairs: Vec<(String, String)>,\n\n}\n\n\n\nimpl KeyValuePairs {\n\n fn get(&self, key: &str) -> Result<String, KeyValuePairParseError> {\n\n self.key_value_pairs\n\n .iter()\n\n .find(|(k, _v)| k == key)\n\n .map(|(_k, v)| v.clone())\n\n .ok_or_else(|| {\n\n let msg = format!(\"missing key {} in key-value pairs\", key);\n\n KeyValuePairParseError { msg }\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/auth.rs", "rank": 5, "score": 57097.487727117245 }, { "content": "/// Converts a string containing a SkySpark time zone name into the matching\n\n/// `Tz` variant from the chrono_tz crate.\n\npub fn skyspark_tz_string_to_tz<T>(s: T) -> Option<Tz>\n\nwhere\n\n T: AsRef<str>,\n\n{\n\n let matching_tz = TZ_VARIANTS.iter().find(|tz| {\n\n let full_name = tz.name();\n\n let is_full_name_match = full_name == s.as_ref();\n\n\n\n if is_full_name_match {\n\n true\n\n } else {\n\n let short_name = time_zone_name_to_short_name(full_name);\n\n short_name == s.as_ref()\n\n }\n\n });\n\n matching_tz.copied()\n\n}\n\n\n\n/// Given an IANA TZDB identifier like \"America/New_York\", return the\n\n/// short time zone name used by SkySpark (like \"New_York\").\n", "file_path": "src/tz.rs", "rank": 6, "score": 55980.19018873031 }, { "content": "fn parse_key_value_pairs(\n\n s: &str,\n\n) -> Result<KeyValuePairs, KeyValuePairParseError> {\n\n let delimiters = &[' ', ','][..];\n\n\n\n let key_value_pairs: Result<Vec<_>, KeyValuePairParseError> = s\n\n .split(delimiters)\n\n .filter(|s| s.to_lowercase() != \"scram\" && !s.is_empty())\n\n .map(|s| {\n\n let delimiter_index = s.find('=');\n\n\n\n if let Some(delimiter_index) = delimiter_index {\n\n let split = s.split_at(delimiter_index);\n\n let key = split.0.to_string();\n\n let value = split.1.trim_start_matches('=').to_string();\n\n Ok((key, value))\n\n } else {\n\n let msg = format!(\"No '=' symbol in key-value pair {}\", s);\n\n Err(KeyValuePairParseError { msg })\n\n }\n\n })\n\n .collect();\n\n\n\n Ok(KeyValuePairs {\n\n key_value_pairs: key_value_pairs?,\n\n })\n\n}\n\n\n", "file_path": "src/auth.rs", "rank": 7, "score": 54058.004694874835 }, { "content": "fn parse_key_value_pairs_from_header(\n\n header: &str,\n\n res: Response,\n\n) -> Result<KeyValuePairs, KeyValuePairParseError> {\n\n let header_value = res.headers().get(header).ok_or_else(|| {\n\n let msg = format!(\"missing HTTP header {}\", header);\n\n KeyValuePairParseError { msg }\n\n })?;\n\n let header_value_str = header_value.to_str().map_err(|_| {\n\n let msg =\n\n format!(\"could not convert HTTP header {} to a string\", header);\n\n KeyValuePairParseError { msg }\n\n })?;\n\n parse_key_value_pairs(header_value_str)\n\n}\n\n\n", "file_path": "src/auth.rs", "rank": 8, "score": 51669.63747057569 }, { "content": "fn generate_nonce() -> Result<String, GenerateNonceError> {\n\n use rand::{RngCore, SeedableRng};\n\n use rand_chacha::ChaCha20Rng;\n\n use std::fmt::Write;\n\n\n\n let mut rng = ChaCha20Rng::from_entropy();\n\n let mut out = vec![0u8; 32];\n\n rng.fill_bytes(&mut out);\n\n\n\n let mut nonce = String::new();\n\n for byte in out.iter() {\n\n write!(&mut nonce, \"{:x}\", byte).map_err(|err| GenerateNonceError {\n\n msg: format!(\"{}\", err),\n\n })?;\n\n }\n\n Ok(nonce)\n\n}\n\n\n", "file_path": "src/auth.rs", "rank": 11, "score": 48126.89935695175 }, { "content": "fn base64_encode_no_padding<T: ?Sized + AsRef<[u8]>>(s: &T) -> String {\n\n let config = base64::Config::new(base64::CharacterSet::Standard, false);\n\n base64::encode_config(s, config)\n\n}\n\n\n", "file_path": "src/auth.rs", "rank": 12, "score": 39404.20824934949 }, { "content": "fn is_server_valid(\n\n auth_msg: &str,\n\n salted_password: &[u8],\n\n server_signature: &str,\n\n hash_fn: &HashFunction,\n\n) -> bool {\n\n let computed_server_key_tag =\n\n hash_fn.hmac_sign(salted_password, b\"Server Key\");\n\n let computed_server_key = &computed_server_key_tag[..];\n\n let computed_server_signature_tag =\n\n hash_fn.hmac_sign(computed_server_key, auth_msg.as_bytes());\n\n let computed_server_signature = &computed_server_signature_tag[..];\n\n let computed_server_signature = base64::encode(computed_server_signature);\n\n\n\n computed_server_signature == server_signature\n\n}\n\n\n", "file_path": "src/auth.rs", "rank": 13, "score": 36679.527832612606 }, { "content": "struct AuthSessionConfig {\n\n handshake_token: String,\n\n hash_fn: HashFunction,\n\n}\n\n\n\nasync fn auth_session_config(\n\n client: &Client,\n\n url: &str,\n\n username: &str,\n\n) -> AuthResult<AuthSessionConfig> {\n\n let base64_username = base64_encode_no_padding(username);\n\n let auth_header_value = format!(\"HELLO username={}\", base64_username);\n\n let res = client\n\n .get(url)\n\n .header(\"Authorization\", auth_header_value)\n\n .send()\n\n .await?;\n\n\n\n let kvps = parse_key_value_pairs_from_header(\"www-authenticate\", res)?;\n\n let handshake_token = kvps.get(\"handshakeToken\")?;\n", "file_path": "src/auth.rs", "rank": 14, "score": 35463.943613927775 }, { "content": "struct ServerFirstResponse {\n\n server_first_msg: String,\n\n server_iterations: u32,\n\n server_nonce: String,\n\n server_salt: String,\n\n}\n\n\n\nasync fn server_first_response(\n\n client: &Client,\n\n url: &str,\n\n handshake_token: &str,\n\n client_first_msg: &str,\n\n) -> AuthResult<ServerFirstResponse> {\n\n let auth_header_value = format!(\n\n \"SCRAM handshakeToken={}, data={}\",\n\n handshake_token,\n\n base64_encode_no_padding(&client_first_msg)\n\n );\n\n let res = client\n\n .get(url)\n", "file_path": "src/auth.rs", "rank": 15, "score": 35463.943613927775 }, { "content": "struct ServerSecondResponse {\n\n auth_token: String,\n\n server_signature: String,\n\n}\n\n\n\nasync fn server_second_response(\n\n client: &Client,\n\n url: &str,\n\n handshake_token: &str,\n\n auth_msg: &str,\n\n salted_password: &[u8],\n\n client_final_no_proof: &str,\n\n hash_fn: &HashFunction,\n\n) -> AuthResult<ServerSecondResponse> {\n\n let client_key_tag = hash_fn.hmac_sign(salted_password, b\"Client Key\");\n\n let client_key = &client_key_tag[..];\n\n let stored_key = hash_fn.digest(client_key);\n\n let client_signature_tag =\n\n hash_fn.hmac_sign(&stored_key, auth_msg.as_bytes());\n\n let client_signature = &client_signature_tag[..];\n", "file_path": "src/auth.rs", "rank": 16, "score": 35463.943613927775 }, { "content": " /// Returns true if the JSON value represents a Haystack\n\n /// URI.\n\n fn is_hs_uri(&self) -> bool;\n\n /// Returns true if the JSON value represents a Haystack\n\n /// XStr.\n\n fn is_hs_xstr(&self) -> bool;\n\n}\n\n\n\nimpl ValueExt for Value {\n\n fn as_hs_coord(&self) -> Option<Coord> {\n\n Coord::from_hayson(self).ok()\n\n }\n\n\n\n fn as_hs_date(&self) -> Option<Date> {\n\n Date::from_hayson(self).ok()\n\n }\n\n\n\n fn as_hs_date_time(&self) -> Option<DateTime> {\n\n DateTime::from_hayson(self).ok()\n\n }\n", "file_path": "src/value_ext.rs", "rank": 17, "score": 24987.27474443643 }, { "content": " fn as_hs_symbol(&self) -> Option<Symbol>;\n\n /// Convert the JSON value to a Haystack Time.\n\n fn as_hs_time(&self) -> Option<Time>;\n\n /// Returns the Haystack URI value as a Haystack Uri.\n\n fn as_hs_uri(&self) -> Option<Uri>;\n\n /// Return the Haystack XStr value as a Haystack Xstr.\n\n fn as_hs_xstr(&self) -> Option<Xstr>;\n\n /// Returns true if the JSON value represents a Haystack\n\n /// Coord.\n\n fn is_hs_coord(&self) -> bool;\n\n /// Returns true if the JSON value represents a Haystack\n\n /// Date.\n\n fn is_hs_date(&self) -> bool;\n\n /// Returns true if the JSON value represents a Haystack\n\n /// DateTime.\n\n fn is_hs_date_time(&self) -> bool;\n\n /// Returns true if the JSON value represents a Haystack\n\n /// marker.\n\n fn is_hs_marker(&self) -> bool;\n\n /// Returns true if the JSON value represents a Haystack\n", "file_path": "src/value_ext.rs", "rank": 18, "score": 24982.013689705305 }, { "content": "use crate::{Date, DateTime, Time};\n\nuse raystack_core::{\n\n Coord, Hayson, Marker, Na, Number, Ref, RemoveMarker, Symbol, Uri, Xstr,\n\n};\n\nuse serde_json::Value;\n\n\n\n/// An extension trait for the `serde_json::Value` enum,\n\n/// containing helper functions which make it easier to\n\n/// parse specific Haystack types from the underlying Hayson encoding\n\n/// (a JSON value in a specific format, see https://github.com/j2inn/hayson).\n", "file_path": "src/value_ext.rs", "rank": 19, "score": 24981.856524696614 }, { "content": " /// NA value.\n\n fn is_hs_na(&self) -> bool;\n\n /// Returns true if the JSON value represents a Haystack\n\n /// Number.\n\n fn is_hs_number(&self) -> bool;\n\n /// Returns true if the JSON value represents a Haystack\n\n /// Ref.\n\n fn is_hs_ref(&self) -> bool;\n\n /// Returns true if the JSON value represents a Haystack\n\n /// remove marker.\n\n fn is_hs_remove_marker(&self) -> bool;\n\n /// Returns true if the JSON value represents a Haystack\n\n /// Str.\n\n fn is_hs_str(&self) -> bool;\n\n /// Returns true if the JSON value represents a Haystack\n\n /// Symbol.\n\n fn is_hs_symbol(&self) -> bool;\n\n /// Returns true if the JSON value represents a Haystack\n\n /// Time.\n\n fn is_hs_time(&self) -> bool;\n", "file_path": "src/value_ext.rs", "rank": 20, "score": 24978.948621925592 }, { "content": "\n\n fn as_hs_str(&self) -> Option<&str> {\n\n self.as_str()\n\n }\n\n\n\n fn as_hs_symbol(&self) -> Option<Symbol> {\n\n Symbol::from_hayson(self).ok()\n\n }\n\n\n\n fn as_hs_time(&self) -> Option<Time> {\n\n Time::from_hayson(self).ok()\n\n }\n\n\n\n fn as_hs_uri(&self) -> Option<Uri> {\n\n Uri::from_hayson(self).ok()\n\n }\n\n\n\n fn as_hs_xstr(&self) -> Option<Xstr> {\n\n Xstr::from_hayson(self).ok()\n\n }\n", "file_path": "src/value_ext.rs", "rank": 21, "score": 24973.343743143556 }, { "content": "\n\n fn is_hs_coord(&self) -> bool {\n\n self.as_hs_coord().is_some()\n\n }\n\n\n\n fn is_hs_date(&self) -> bool {\n\n self.as_hs_date().is_some()\n\n }\n\n\n\n fn is_hs_date_time(&self) -> bool {\n\n self.as_hs_date_time().is_some()\n\n }\n\n\n\n fn is_hs_marker(&self) -> bool {\n\n self.as_hs_marker().is_some()\n\n }\n\n\n\n fn is_hs_na(&self) -> bool {\n\n self.as_hs_na().is_some()\n\n }\n", "file_path": "src/value_ext.rs", "rank": 22, "score": 24969.420258735267 }, { "content": "\n\n fn as_hs_marker(&self) -> Option<Marker> {\n\n Marker::from_hayson(self).ok()\n\n }\n\n\n\n fn as_hs_na(&self) -> Option<Na> {\n\n Na::from_hayson(self).ok()\n\n }\n\n\n\n fn as_hs_number(&self) -> Option<Number> {\n\n Number::from_hayson(self).ok()\n\n }\n\n\n\n fn as_hs_ref(&self) -> Option<Ref> {\n\n Ref::from_hayson(self).ok()\n\n }\n\n\n\n fn as_hs_remove_marker(&self) -> Option<RemoveMarker> {\n\n RemoveMarker::from_hayson(self).ok()\n\n }\n", "file_path": "src/value_ext.rs", "rank": 23, "score": 24966.003036254482 }, { "content": "\n\n fn is_hs_time(&self) -> bool {\n\n self.as_hs_time().is_some()\n\n }\n\n\n\n fn is_hs_uri(&self) -> bool {\n\n self.as_hs_uri().is_some()\n\n }\n\n\n\n fn is_hs_xstr(&self) -> bool {\n\n self.as_hs_xstr().is_some()\n\n }\n\n}\n", "file_path": "src/value_ext.rs", "rank": 24, "score": 24965.59310278625 }, { "content": "\n\n fn is_hs_number(&self) -> bool {\n\n self.as_hs_number().is_some()\n\n }\n\n\n\n fn is_hs_ref(&self) -> bool {\n\n self.as_hs_ref().is_some()\n\n }\n\n\n\n fn is_hs_remove_marker(&self) -> bool {\n\n self.as_hs_remove_marker().is_some()\n\n }\n\n\n\n fn is_hs_str(&self) -> bool {\n\n self.as_hs_str().is_some()\n\n }\n\n\n\n fn is_hs_symbol(&self) -> bool {\n\n self.as_hs_symbol().is_some()\n\n }\n", "file_path": "src/value_ext.rs", "rank": 25, "score": 24965.195133046473 }, { "content": " /// Query for history values from yesterday.\n\n Yesterday,\n\n /// Query for history values on a particular date.\n\n Date(Date),\n\n /// Query for history values between two dates.\n\n DateSpan { start: Date, end: Date },\n\n /// Query for history values between two datetimes.\n\n DateTimeSpan { start: DateTime, end: DateTime },\n\n /// Query for history values since a particular datetime.\n\n SinceDateTime { date_time: DateTime },\n\n}\n\n\n\nconst DATE_FMT: &str = \"%Y-%m-%d\";\n\n\n\nimpl HisReadRange {\n\n pub(crate) fn to_json_request_string(&self) -> String {\n\n match self {\n\n Self::Today => \"today\".to_owned(),\n\n Self::Yesterday => \"yesterday\".to_owned(),\n\n Self::Date(date) => date.naive_date().format(DATE_FMT).to_string(),\n", "file_path": "src/api.rs", "rank": 34, "score": 31.529541448312816 }, { "content": " .map(|(date_time, value)| {\n\n let date_time: DateTime = date_time.with_timezone(&tz).into();\n\n\n\n json!({\n\n \"ts\": date_time.to_hayson(),\n\n \"val\": value.to_hayson(),\n\n })\n\n })\n\n .collect();\n\n\n\n let mut req_grid = Grid::new_internal(rows);\n\n req_grid.add_ref_to_meta(id);\n\n\n\n self.post(self.his_write_url(), &req_grid).await\n\n }\n\n\n\n /// Writes string values with UTC timestamps to a single point.\n\n /// `time_zone_name` must be a valid SkySpark timezone name.\n\n pub async fn utc_his_write_str(\n\n &mut self,\n", "file_path": "src/lib.rs", "rank": 37, "score": 26.015939821681325 }, { "content": " return \"<Na>\".to_owned();\n\n }\n\n if let Ok(xstr) = Xstr::from_hayson(value) {\n\n return xstr.to_string();\n\n }\n\n if let Ok(coord) = Coord::from_hayson(value) {\n\n return coord.to_string();\n\n }\n\n if let Ok(sym) = Symbol::from_hayson(value) {\n\n return sym.to_string();\n\n }\n\n\n\n match value {\n\n Value::Array(_) => \"<List>\".to_owned(),\n\n Value::Object(obj) => {\n\n match obj.get(\"_kind\").and_then(|val| val.as_str()) {\n\n Some(\"grid\") => \"<Grid>\".to_owned(),\n\n _ => \"<Dict>\".to_owned(),\n\n }\n\n }\n", "file_path": "src/grid.rs", "rank": 39, "score": 24.970621417556625 }, { "content": " Self::DateSpan { start, end } => {\n\n format!(\n\n \"{},{}\",\n\n start.naive_date().format(DATE_FMT),\n\n end.naive_date().format(DATE_FMT)\n\n )\n\n }\n\n Self::DateTimeSpan { start, end } => {\n\n let start_str = to_zinc_encoded_string(start);\n\n let end_str = to_zinc_encoded_string(end);\n\n format!(\"{},{}\", start_str, end_str)\n\n }\n\n Self::SinceDateTime { date_time } => {\n\n to_zinc_encoded_string(date_time)\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Convert a `DateTime` into a string which can be used in ZINC files.\n", "file_path": "src/api.rs", "rank": 42, "score": 24.42816121265205 }, { "content": " )));\n\n }\n\n }\n\n\n\n Ok(Grid { json: value })\n\n }\n\n}\n\n\n\n/// Error denoting that a JSON value could not be parsed into a `Grid`.\n\n#[derive(Clone, Debug, Eq, Error, PartialEq)]\n\n#[error(\"{msg}\")]\n\npub struct ParseJsonGridError {\n\n pub(crate) msg: String,\n\n}\n\n\n\nimpl ParseJsonGridError {\n\n fn new(msg: String) -> Self {\n\n ParseJsonGridError { msg }\n\n }\n\n}\n", "file_path": "src/grid.rs", "rank": 43, "score": 23.887632127763272 }, { "content": " }\n\n\n\n /// Create an empty grid.\n\n pub fn empty() -> Self {\n\n Self::new(vec![]).expect(\"creating an empty grid should never fail\")\n\n }\n\n\n\n /// Return a map which represents the metadata for the grid.\n\n pub fn meta(&self) -> &Map<String, Value> {\n\n self.json[\"meta\"]\n\n .as_object()\n\n .expect(\"meta is a JSON Object\")\n\n }\n\n\n\n /// Return an owned map, which represents the\n\n /// metadata for the grid.\n\n pub fn to_meta(&self) -> Map<String, Value> {\n\n let meta = self.json[\"meta\"]\n\n .as_object()\n\n .expect(\"meta is a JSON Object\");\n", "file_path": "src/grid.rs", "rank": 45, "score": 23.129027069531684 }, { "content": " id: &Ref,\n\n time_zone_name: &str,\n\n his_data: &[(chrono::DateTime<Utc>, String)],\n\n ) -> Result<Grid> {\n\n let tz = skyspark_tz_string_to_tz(time_zone_name).ok_or_else(|| {\n\n Error::TimeZone {\n\n err_time_zone: time_zone_name.to_owned(),\n\n }\n\n })?;\n\n\n\n let rows = his_data\n\n .iter()\n\n .map(|(date_time, value)| {\n\n let date_time: DateTime = date_time.with_timezone(&tz).into();\n\n\n\n json!({\n\n \"ts\": date_time.to_hayson(),\n\n \"val\": value,\n\n })\n\n })\n", "file_path": "src/lib.rs", "rank": 46, "score": 22.640749082583174 }, { "content": " if let Ok(dt) = DateTime::from_hayson(value) {\n\n return dt.date_time().to_rfc3339();\n\n }\n\n if let Ok(date) = Date::from_hayson(value) {\n\n return date.naive_date().format(\"%Y-%m-%d\").to_string();\n\n }\n\n if let Ok(time) = Time::from_hayson(value) {\n\n return time.naive_time().format(\"%H:%M:%S\").to_string();\n\n }\n\n if let Ok(uri) = Uri::from_hayson(value) {\n\n return uri.to_string();\n\n }\n\n if let Some(bool) = value.as_bool() {\n\n let b = if bool { \"True\" } else { \"False\" };\n\n return b.to_owned();\n\n }\n\n if let Ok(_) = RemoveMarker::from_hayson(value) {\n\n return \"<R>\".to_owned();\n\n }\n\n if let Ok(_) = Na::from_hayson(value) {\n", "file_path": "src/grid.rs", "rank": 47, "score": 22.460015165062757 }, { "content": " })\n\n .collect();\n\n\n\n let mut req_grid = Grid::new_internal(rows);\n\n req_grid.add_ref_to_meta(id);\n\n\n\n self.post(self.his_write_url(), &req_grid).await\n\n }\n\n\n\n /// Writes string values to a single point.\n\n pub async fn his_write_str(\n\n &mut self,\n\n id: &Ref,\n\n his_data: &[(DateTime, String)],\n\n ) -> Result<Grid> {\n\n let rows = his_data\n\n .iter()\n\n .map(|(date_time, value)| {\n\n json!({\n\n \"ts\": date_time.to_hayson(),\n", "file_path": "src/lib.rs", "rank": 48, "score": 21.674451284503352 }, { "content": "use raystack_core::Ref;\n\nuse raystack_core::{is_tag_name, TagName};\n\nuse serde_json::json;\n\nuse serde_json::map::Map;\n\nuse serde_json::Value;\n\nuse serde_json::{to_string, to_string_pretty};\n\nuse std::cmp::Ordering;\n\nuse std::collections::HashSet;\n\nuse std::convert::TryInto;\n\nuse std::iter::FromIterator;\n\nuse thiserror::Error;\n\n\n\n/// A wrapper around a `serde_json::Value` which represents a Haystack Grid.\n\n/// Columns will always be sorted in alphabetical order.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Grid {\n\n json: Value,\n\n}\n\n\n\nimpl Grid {\n", "file_path": "src/grid.rs", "rank": 49, "score": 21.270449465707973 }, { "content": " let mut req_grid = Grid::new_internal(rows);\n\n req_grid.add_ref_to_meta(id);\n\n\n\n self.post(self.his_write_url(), &req_grid).await\n\n }\n\n\n\n /// Writes numeric values to a single point. `unit` must be a valid\n\n /// Haystack unit literal, such as `L/s` or `celsius`.\n\n pub async fn his_write_num(\n\n &mut self,\n\n id: &Ref,\n\n his_data: &[(DateTime, Number)],\n\n ) -> Result<Grid> {\n\n let rows = his_data\n\n .iter()\n\n .map(|(date_time, value)| {\n\n json!({\n\n \"ts\": date_time.to_hayson(),\n\n \"val\": value.to_hayson(),\n\n })\n", "file_path": "src/lib.rs", "rank": 50, "score": 21.171032740792832 }, { "content": "mod err;\n\npub mod eval;\n\nmod grid;\n\nmod hs_types;\n\nmod tz;\n\nmod value_ext;\n\n\n\nuse api::HaystackUrl;\n\npub use api::HisReadRange;\n\nuse chrono::Utc;\n\npub use err::{Error, NewSkySparkClientError};\n\npub use grid::{Grid, ParseJsonGridError};\n\npub use hs_types::{Date, DateTime, Time};\n\npub use raystack_core::Coord;\n\npub use raystack_core::{is_tag_name, ParseTagNameError, TagName};\n\npub use raystack_core::{BasicNumber, Number, ScientificNumber};\n\npub use raystack_core::{FromHaysonError, Hayson};\n\npub use raystack_core::{Marker, Na, RemoveMarker, Symbol, Uri, Xstr};\n\npub use raystack_core::{ParseRefError, Ref};\n\nuse serde_json::json;\n\nuse std::convert::TryInto;\n\npub use tz::skyspark_tz_string_to_tz;\n\nuse url::Url;\n\npub use value_ext::ValueExt;\n\n\n", "file_path": "src/lib.rs", "rank": 51, "score": 21.05197327383044 }, { "content": "\n\n self.post(self.his_read_url(), &req_grid).await\n\n }\n\n\n\n /// Writes boolean values to a single point.\n\n pub async fn his_write_bool(\n\n &mut self,\n\n id: &Ref,\n\n his_data: &[(DateTime, bool)],\n\n ) -> Result<Grid> {\n\n let rows = his_data\n\n .iter()\n\n .map(|(date_time, value)| {\n\n json!({\n\n \"ts\": date_time.to_hayson(),\n\n \"val\": value\n\n })\n\n })\n\n .collect();\n\n\n", "file_path": "src/lib.rs", "rank": 52, "score": 20.687140814277488 }, { "content": "use crate::auth::AuthError;\n\nuse crate::grid::{Grid, ParseJsonGridError};\n\nuse thiserror::Error;\n\n\n\nimpl Error {\n\n /// Return true if this error encapsulates a Haystack error grid.\n\n pub fn is_grid(&self) -> bool {\n\n matches!(self, Self::Grid { .. })\n\n }\n\n\n\n /// Return a reference to the Haystack error grid encapsulated by this\n\n /// error, if this error was caused by a Haystack error grid.\n\n pub fn grid(&self) -> Option<&Grid> {\n\n match self {\n\n Self::Grid { err_grid } => Some(err_grid),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Return the Haystack error grid encapsulated by this error, if this\n", "file_path": "src/err.rs", "rank": 53, "score": 19.63942961296821 }, { "content": " meta.clone()\n\n }\n\n\n\n pub(crate) fn add_ref_to_meta(&mut self, hsref: &Ref) {\n\n use raystack_core::Hayson;\n\n let meta = self.json[\"meta\"]\n\n .as_object_mut()\n\n .expect(\"meta is a JSON Object\");\n\n meta.insert(\"id\".to_owned(), hsref.to_hayson());\n\n }\n\n\n\n /// Return a vector of JSON values which represent the columns of the grid.\n\n pub fn cols(&self) -> &Vec<Value> {\n\n self.json[\"cols\"].as_array().expect(\"cols is a JSON Array\")\n\n }\n\n\n\n /// Add a new column, or overwrite an existing column by mapping\n\n /// each row to a new cell value.\n\n pub fn add_col<F>(&mut self, col_name: TagName, f: F)\n\n where\n", "file_path": "src/grid.rs", "rank": 54, "score": 19.56331489323419 }, { "content": " /// Create a new `Grid` from rows. Each row must be a JSON Object.\n\n /// # Example\n\n /// ```rust\n\n /// use raystack::Grid;\n\n /// use serde_json::json;\n\n ///\n\n /// let row = json!({\"firstName\": \"Otis\", \"lastName\": \"Jackson Jr.\"});\n\n /// let rows = vec![row];\n\n /// let grid = Grid::new(rows).unwrap();\n\n /// assert_eq!(grid.rows()[0][\"firstName\"], \"Otis\");\n\n /// ```\n\n pub fn new(rows: Vec<Value>) -> Result<Self, ParseJsonGridError> {\n\n let mut keys = HashSet::new();\n\n for row in &rows {\n\n if let Some(row_object) = row.as_object() {\n\n keys.extend(row_object.keys());\n\n } else {\n\n return Err(ParseJsonGridError::new(format!(\n\n \"Expected a JSON object for row but found {}\",\n\n row\n", "file_path": "src/grid.rs", "rank": 55, "score": 18.865251134645266 }, { "content": "use crate::{Date, DateTime};\n\nuse chrono::SecondsFormat;\n\nuse url::Url;\n\n\n\npub(crate) trait HaystackUrl {\n\n fn about_url(&self) -> Url;\n\n fn formats_url(&self) -> Url;\n\n fn his_read_url(&self) -> Url;\n\n fn his_write_url(&self) -> Url;\n\n fn nav_url(&self) -> Url;\n\n fn ops_url(&self) -> Url;\n\n fn read_url(&self) -> Url;\n\n}\n\n\n\n/// Represents the different time range queries that can be sent\n\n/// as part of the `hisRead` Haystack operation.\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub enum HisReadRange {\n\n /// Query for history values from today.\n\n Today,\n", "file_path": "src/api.rs", "rank": 56, "score": 18.82875753019705 }, { "content": " /// Return a vector of JSON values which represent the rows of the grid.\n\n pub fn rows(&self) -> &Vec<Value> {\n\n self.json[\"rows\"].as_array().expect(\"rows is a JSON Array\")\n\n }\n\n\n\n /// Return a vector of `Map`s which represent the rows of the grid.\n\n pub fn row_maps(&self) -> Vec<&Map<String, Value>> {\n\n self.json[\"rows\"]\n\n .as_array()\n\n .expect(\"rows is a JSON Array\")\n\n .iter()\n\n .map(|row| row.as_object().expect(\"row is a JSON Object\"))\n\n .collect()\n\n }\n\n\n\n /// Return a vector of mut `Map`s which represent the rows of the grid.\n\n fn row_maps_mut(&mut self) -> Vec<&mut Map<String, Value>> {\n\n self.json[\"rows\"]\n\n .as_array_mut()\n\n .expect(\"rows is a JSON Array\")\n", "file_path": "src/grid.rs", "rank": 57, "score": 18.73862847380562 }, { "content": "}\n\n\n\nimpl std::convert::From<crate::Error> for EvalError {\n\n fn from(error: crate::Error) -> Self {\n\n match error {\n\n crate::Error::Grid { err_grid } => Self::Grid { err_grid },\n\n crate::Error::Http { err } => Self::Http(err),\n\n crate::Error::ParseJsonGrid(err) => Self::ParseJsonGrid(err),\n\n crate::Error::TimeZone { err_time_zone } => {\n\n Self::TimeZone { err_time_zone }\n\n }\n\n crate::Error::UpdateAuthToken(_) => unreachable!(), // The standalone eval function will not update auth tokens.\n\n }\n\n }\n\n}\n\n\n\nimpl EvalError {\n\n /// Return true if this error encapsulates a Haystack error grid.\n\n pub fn is_grid(&self) -> bool {\n\n matches!(self, Self::Grid { .. })\n", "file_path": "src/eval.rs", "rank": 58, "score": 18.621303503275655 }, { "content": " .iter_mut()\n\n .map(|row| row.as_object_mut().expect(\"row is a JSON Object\"))\n\n .collect()\n\n }\n\n\n\n /// Return a vector of owned JSON values which\n\n /// represent the rows of the grid.\n\n pub fn to_rows(&self) -> Vec<Value> {\n\n self.json[\"rows\"]\n\n .as_array()\n\n .expect(\"rows is a JSON Array\")\n\n .to_vec()\n\n }\n\n\n\n /// Return a vector of owned JSON values which\n\n /// represent the rows of the grid.\n\n pub fn to_row_maps(&self) -> Vec<Map<String, Value>> {\n\n self.row_maps()\n\n .iter()\n\n .map(|&row_map| row_map.clone())\n", "file_path": "src/grid.rs", "rank": 59, "score": 18.37661948317716 }, { "content": " }\n\n\n\n /// Return the string representation of the underlying JSON value.\n\n pub fn to_json_string(&self) -> String {\n\n to_string(&self.json)\n\n .expect(\"serializing grid to String should never fail\")\n\n }\n\n\n\n /// Return a pretty formatted string representing the underlying JSON value.\n\n pub fn to_json_string_pretty(&self) -> String {\n\n to_string_pretty(&self.json)\n\n .expect(\"serializing grid to String should never fail\")\n\n }\n\n\n\n /// Returns true if the grid appears to be an error grid.\n\n pub fn is_error(&self) -> bool {\n\n self.meta().get(\"err\").is_some()\n\n }\n\n\n\n /// Return the error trace if present.\n", "file_path": "src/grid.rs", "rank": 60, "score": 18.145070489492152 }, { "content": " \"val\": value\n\n })\n\n })\n\n .collect();\n\n\n\n let mut req_grid = Grid::new_internal(rows);\n\n req_grid.add_ref_to_meta(id);\n\n\n\n self.post(self.his_write_url(), &req_grid).await\n\n }\n\n\n\n /// Writes boolean values with UTC timestamps to a single point.\n\n /// `time_zone_name` must be a valid SkySpark timezone name.\n\n pub async fn utc_his_write_bool(\n\n &mut self,\n\n id: &Ref,\n\n time_zone_name: &str,\n\n his_data: &[(chrono::DateTime<Utc>, bool)],\n\n ) -> Result<Grid> {\n\n let tz = skyspark_tz_string_to_tz(time_zone_name).ok_or_else(|| {\n", "file_path": "src/lib.rs", "rank": 61, "score": 17.85428615468889 }, { "content": " Error::TimeZone {\n\n err_time_zone: time_zone_name.to_owned(),\n\n }\n\n })?;\n\n\n\n let rows = his_data\n\n .iter()\n\n .map(|(date_time, value)| {\n\n let date_time: DateTime = date_time.with_timezone(&tz).into();\n\n json!({\n\n \"ts\": date_time.to_hayson(),\n\n \"val\": value\n\n })\n\n })\n\n .collect();\n\n\n\n let mut req_grid = Grid::new_internal(rows);\n\n req_grid.add_ref_to_meta(id);\n\n\n\n self.post(self.his_write_url(), &req_grid).await\n", "file_path": "src/lib.rs", "rank": 62, "score": 17.719548681463007 }, { "content": " pub fn error_trace(&self) -> Option<String> {\n\n use crate::ValueExt;\n\n self.meta()[\"errTrace\"].as_hs_str().map(|s| s.to_owned())\n\n }\n\n\n\n /// Return a string containing a CSV representation of the grid.\n\n /// The CSV string will have a header containing only the given column\n\n /// names, in the same order as they were provided. The header will\n\n /// include any given column names which are not present in the grid itself.\n\n ///\n\n /// Nested structures such as Dicts (JSON objects) or Lists (JSON arrays)\n\n /// will not be expanded, and will be displayed as `<StructureType>`.\n\n ///\n\n /// Example:\n\n ///\n\n /// ```rust\n\n /// use raystack::Grid;\n\n /// use serde_json::json;\n\n ///\n\n /// let grid = Grid::new(vec![json!({\"id\": 1, \"x\": 2, \"y\": 3})]).unwrap();\n", "file_path": "src/grid.rs", "rank": 63, "score": 17.58586697620796 }, { "content": " (date_time1.into(), true),\n\n (date_time2.into(), false),\n\n (date_time3.into(), true),\n\n ];\n\n\n\n let res = client.his_write_bool(&id, &his_data[..]).await.unwrap();\n\n assert_eq!(res.rows().len(), 0);\n\n }\n\n\n\n #[tokio::test]\n\n async fn utc_his_write_num() {\n\n use chrono::{Duration, NaiveDateTime, Utc};\n\n\n\n let ndt = NaiveDateTime::parse_from_str(\n\n \"2021-01-10 00:00:00\",\n\n \"%Y-%m-%d %H:%M:%S\",\n\n )\n\n .unwrap();\n\n\n\n let date_time1: chrono::DateTime<Utc> =\n", "file_path": "src/lib.rs", "rank": 64, "score": 17.329206584074974 }, { "content": " let res = client.his_write_num(&id, &his_data[..]).await.unwrap();\n\n assert_eq!(res.rows().len(), 0);\n\n }\n\n\n\n #[tokio::test]\n\n async fn utc_his_write_str() {\n\n use chrono::{DateTime, Duration, NaiveDateTime, Utc};\n\n\n\n let ndt = NaiveDateTime::parse_from_str(\n\n \"2021-01-10 00:00:00\",\n\n \"%Y-%m-%d %H:%M:%S\",\n\n )\n\n .unwrap();\n\n\n\n let date_time1 = DateTime::from_utc(ndt, Utc);\n\n let date_time2 = date_time1 + Duration::minutes(5);\n\n let date_time3 = date_time1 + Duration::minutes(10);\n\n\n\n let mut client = new_client().await;\n\n let id = get_ref_for_filter(\n", "file_path": "src/lib.rs", "rank": 65, "score": 17.30992292009764 }, { "content": "pub(crate) fn time_zone_name_to_short_name(tz_name: &str) -> &str {\n\n let parts: Vec<_> = tz_name.split('/').filter(|s| !s.is_empty()).collect();\n\n parts.last().expect(\"time zone parts should not be empty\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::skyspark_tz_string_to_tz;\n\n\n\n #[test]\n\n fn short_name_match_works() {\n\n let tz = skyspark_tz_string_to_tz(\"Sydney\").unwrap();\n\n assert_eq!(tz, chrono_tz::Tz::Australia__Sydney);\n\n }\n\n\n\n #[test]\n\n fn full_name_match_works() {\n\n let tz = skyspark_tz_string_to_tz(\"Australia/Sydney\").unwrap();\n\n assert_eq!(tz, chrono_tz::Tz::Australia__Sydney);\n\n }\n\n}\n", "file_path": "src/tz.rs", "rank": 66, "score": 17.284539635405412 }, { "content": " .collect(),\n\n );\n\n\n\n let mut json_grid = json!({\n\n \"_kind\": \"grid\",\n\n \"meta\": {\"ver\": \"3.0\"},\n\n });\n\n\n\n let json_grid_insert =\n\n json_grid.as_object_mut().expect(\"grid is a JSON Object\");\n\n let rows = Value::Array(rows);\n\n json_grid_insert.insert(\"cols\".to_owned(), cols);\n\n json_grid_insert.insert(\"rows\".to_owned(), rows);\n\n\n\n json_grid.try_into()\n\n }\n\n\n\n pub(crate) fn new_internal(rows: Vec<Value>) -> Self {\n\n Self::new(rows)\n\n .expect(\"creating grids within this crate should never fail\")\n", "file_path": "src/grid.rs", "rank": 68, "score": 17.096546792750623 }, { "content": " async fn utc_his_write_num_no_unit() {\n\n use chrono::{Duration, NaiveDateTime, Utc};\n\n\n\n let ndt = NaiveDateTime::parse_from_str(\n\n \"2021-01-10 00:00:00\",\n\n \"%Y-%m-%d %H:%M:%S\",\n\n )\n\n .unwrap();\n\n\n\n let date_time1: chrono::DateTime<Utc> =\n\n chrono::DateTime::from_utc(ndt, Utc);\n\n let date_time2 = date_time1 + Duration::minutes(5);\n\n let date_time3 = date_time1 + Duration::minutes(10);\n\n\n\n let mut client = new_client().await;\n\n\n\n let id = get_ref_for_filter(\n\n &mut client,\n\n \"continuousIntegrationHisWritePoint and kind == \\\"Number\\\" and not unit\",\n\n )\n", "file_path": "src/lib.rs", "rank": 69, "score": 16.72572301736991 }, { "content": " /// Return a vector containing the column names in this grid, as strings.\n\n pub fn col_name_strs(&self) -> Vec<&str> {\n\n self.cols()\n\n .iter()\n\n .map(|col| col[\"name\"].as_str().expect(\"col name is a JSON string\"))\n\n .collect()\n\n }\n\n\n\n /// Return a vector containing the values in the given column.\n\n pub fn col_to_vec(&self, col_name: &str) -> Vec<Option<&Value>> {\n\n self.rows().iter().map(|row| row.get(col_name)).collect()\n\n }\n\n\n\n /// Returns true if the grid contains the given column name.\n\n pub fn has_col_name(&self, name: &str) -> bool {\n\n self.col_name_strs().contains(&name)\n\n }\n\n\n\n /// Remove the column name from the grid if it is present, and return\n\n /// true if the column was removed.\n", "file_path": "src/grid.rs", "rank": 70, "score": 16.602273953752615 }, { "content": " }\n\n\n\n /// Writes numeric values with UTC timestamps to a single point.\n\n /// `unit` must be a valid Haystack unit literal, such as `L/s` or\n\n /// `celsius`.\n\n /// `time_zone_name` must be a valid SkySpark timezone name.\n\n pub async fn utc_his_write_num(\n\n &mut self,\n\n id: &Ref,\n\n time_zone_name: &str,\n\n his_data: &[(chrono::DateTime<Utc>, Number)],\n\n ) -> Result<Grid> {\n\n let tz = skyspark_tz_string_to_tz(time_zone_name).ok_or_else(|| {\n\n Error::TimeZone {\n\n err_time_zone: time_zone_name.to_owned(),\n\n }\n\n })?;\n\n\n\n let rows = his_data\n\n .iter()\n", "file_path": "src/lib.rs", "rank": 71, "score": 16.479561871137015 }, { "content": "#[derive(Debug, Error)]\n\n#[error(\"Could not parse key-value pair: {msg}\")]\n\npub(crate) struct KeyValuePairParseError {\n\n msg: String,\n\n}\n\n\n\nimpl KeyValuePairParseError {\n\n fn into_auth_error(self) -> InternalAuthError {\n\n InternalAuthError::from(HandshakeError::from(self))\n\n }\n\n}\n\n\n\nimpl From<KeyValuePairParseError> for InternalAuthError {\n\n fn from(err: KeyValuePairParseError) -> Self {\n\n err.into_auth_error()\n\n }\n\n}\n\n\n\n#[derive(Debug, Error)]\n\n#[error(\"Could not decode a base64-encoded string, cause: {msg}\")]\n", "file_path": "src/auth.rs", "rank": 72, "score": 16.458397919886522 }, { "content": " F: Fn(&mut Map<String, Value>) -> Value,\n\n {\n\n let col_name_string = col_name.to_string();\n\n\n\n for row in self.json[\"rows\"]\n\n .as_array_mut()\n\n .expect(\"rows is a JSON Array\")\n\n {\n\n let row = row.as_object_mut().expect(\"Each row is a JSON object\");\n\n let value = f(row);\n\n row.insert(col_name_string.clone(), value);\n\n }\n\n\n\n self.add_col_names(std::slice::from_ref(&col_name));\n\n }\n\n\n\n /// Add column names to the grid.\n\n fn add_col_names(&mut self, col_names: &[TagName]) {\n\n let mut all_names: HashSet<&str> =\n\n HashSet::from_iter(self.col_name_strs());\n", "file_path": "src/grid.rs", "rank": 73, "score": 15.849717078771395 }, { "content": " _ => \"<Unknown>\".to_owned(),\n\n }\n\n}\n\n\n\nimpl std::convert::TryFrom<Value> for Grid {\n\n type Error = ParseJsonGridError;\n\n\n\n fn try_from(value: Value) -> Result<Self, Self::Error> {\n\n if !value[\"meta\"].is_object() {\n\n return Err(ParseJsonGridError::new(\n\n \"Could not find a JSON object for 'meta'\".to_owned(),\n\n ));\n\n };\n\n\n\n let cols = value[\"cols\"].as_array().ok_or_else(|| {\n\n ParseJsonGridError::new(\n\n \"Could not find a JSON array for 'cols'\".to_owned(),\n\n )\n\n })?;\n\n\n", "file_path": "src/grid.rs", "rank": 74, "score": 15.525811426289527 }, { "content": "\n\n /// Returns a grid containing the operations available on the server.\n\n pub async fn ops(&mut self) -> Result<Grid> {\n\n self.get(self.ops_url()).await\n\n }\n\n\n\n /// Returns a grid containing the records matching the given Axon\n\n /// filter string.\n\n pub async fn read(\n\n &mut self,\n\n filter: &str,\n\n limit: Option<u64>,\n\n ) -> Result<Grid> {\n\n let row = match limit {\n\n Some(integer) => json!({\"filter\": filter, \"limit\": integer}),\n\n None => json!({ \"filter\": filter }),\n\n };\n\n\n\n let req_grid = Grid::new_internal(vec![row]);\n\n self.post(self.read_url(), &req_grid).await\n", "file_path": "src/lib.rs", "rank": 76, "score": 15.365394868095201 }, { "content": " /// Example:\n\n ///\n\n /// ```rust\n\n /// use raystack::Grid;\n\n /// use serde_json::json;\n\n ///\n\n /// let grid = Grid::new(vec![json!({\"id\": 1, \"x\": 2, \"y\": 3})]).unwrap();\n\n /// let csv_string = grid\n\n /// .to_csv_string()\n\n /// .unwrap();\n\n ///\n\n /// assert_eq!(\n\n /// csv_string,\n\n /// \"id,x,y\\n1,2,3\\n\".to_string()\n\n /// );\n\n /// ```\n\n #[cfg(feature = \"grid_csv\")]\n\n pub fn to_csv_string(&self) -> Result<String, CsvError> {\n\n self.to_csv_string_with_ordered_cols(&self.col_name_strs())\n\n }\n", "file_path": "src/grid.rs", "rank": 77, "score": 15.208515353444287 }, { "content": " new_auth_token: Option<String>,\n\n}\n\n\n\nimpl EvalOutput {\n\n fn new(grid: Grid, new_auth_token: Option<String>) -> Self {\n\n Self {\n\n grid,\n\n new_auth_token,\n\n }\n\n }\n\n\n\n pub fn grid(&self) -> &Grid {\n\n &self.grid\n\n }\n\n\n\n pub fn into_grid(self) -> Grid {\n\n self.grid\n\n }\n\n\n\n /// Return true only if a new auth token was obtained.\n", "file_path": "src/eval.rs", "rank": 78, "score": 15.179032965987034 }, { "content": " pub fn to_cols(&self) -> Vec<Value> {\n\n self.json[\"cols\"]\n\n .as_array()\n\n .expect(\"cols is a JSON Array\")\n\n .to_vec()\n\n }\n\n\n\n /// Return a vector containing the column names in this grid.\n\n pub fn col_names(&self) -> Vec<TagName> {\n\n self.cols()\n\n .iter()\n\n .map(|col| {\n\n let name =\n\n col[\"name\"].as_str().expect(\"col name is a JSON string\");\n\n TagName::new(name.to_owned())\n\n .expect(\"col names in grid are valid tag names\")\n\n })\n\n .collect()\n\n }\n\n\n", "file_path": "src/grid.rs", "rank": 79, "score": 15.08632734258791 }, { "content": " ) -> Ref {\n\n let points_grid = client.read(filter, Some(1)).await.unwrap();\n\n let point_ref = points_grid.rows()[0][\"id\"].as_hs_ref().unwrap();\n\n point_ref\n\n }\n\n\n\n #[tokio::test]\n\n async fn utc_his_write_bool() {\n\n use chrono::{DateTime, Duration, NaiveDateTime, Utc};\n\n\n\n let ndt = NaiveDateTime::parse_from_str(\n\n \"2021-01-10 00:00:00\",\n\n \"%Y-%m-%d %H:%M:%S\",\n\n )\n\n .unwrap();\n\n\n\n let date_time1 = DateTime::from_utc(ndt, Utc);\n\n let date_time2 = date_time1 + Duration::minutes(5);\n\n let date_time3 = date_time1 + Duration::minutes(10);\n\n\n", "file_path": "src/lib.rs", "rank": 80, "score": 14.732106964701185 }, { "content": " .collect()\n\n }\n\n\n\n /// Sort the rows with a comparator function. This sort is stable.\n\n pub fn sort_rows<F>(&mut self, compare: F)\n\n where\n\n F: FnMut(&Value, &Value) -> Ordering,\n\n {\n\n let rows = self.json[\"rows\"]\n\n .as_array_mut()\n\n .expect(\"rows is a JSON Array\");\n\n rows.sort_by(compare);\n\n }\n\n\n\n /// Add a row to the grid. The row must be a JSON object.\n\n pub fn add_row(&mut self, row: Value) -> Result<(), ParseJsonGridError> {\n\n self.add_rows(vec![row])\n\n }\n\n\n\n /// Add rows to the grid. The rows to add must be a `Vec` containing\n", "file_path": "src/grid.rs", "rank": 81, "score": 14.674785872524737 }, { "content": " assert_eq!(\n\n grid.rows()[1].as_object().unwrap().contains_key(\"id\"),\n\n false\n\n );\n\n assert_eq!(grid.rows()[2][\"id\"].as_bool().unwrap(), true);\n\n\n\n // Check the other column has not changed:\n\n assert_eq!(grid.rows()[0][\"one\"].as_i64().unwrap(), 1);\n\n assert_eq!(\n\n grid.rows()[1][\"one\"].as_str().unwrap(),\n\n \"the id tag is missing\"\n\n );\n\n assert_eq!(grid.rows()[2][\"one\"].as_i64().unwrap(), 2);\n\n }\n\n\n\n #[test]\n\n fn map_col_does_not_modify_grid_if_there_is_no_matching_col() {\n\n let rows = vec![json!({\"id\": \"a\"}), json!({\"id\": \"b\"})];\n\n let mut grid = Grid::new(rows).unwrap();\n\n\n", "file_path": "src/grid.rs", "rank": 82, "score": 14.643767495991256 }, { "content": "#[cfg(test)]\n\nmod test {\n\n use crate::api::HisReadRange;\n\n use crate::SkySparkClient;\n\n use crate::ValueExt;\n\n use raystack_core::{Number, Ref};\n\n use serde_json::json;\n\n use url::Url;\n\n\n\n fn project_api_url() -> Url {\n\n let url_str =\n\n std::env::var(\"RAYSTACK_SKYSPARK_PROJECT_API_URL\").unwrap();\n\n Url::parse(&url_str).unwrap()\n\n }\n\n\n\n fn username() -> String {\n\n std::env::var(\"RAYSTACK_SKYSPARK_USERNAME\").unwrap()\n\n }\n\n\n\n fn password() -> String {\n", "file_path": "src/lib.rs", "rank": 83, "score": 14.502784800705282 }, { "content": " for &col_name in col_names {\n\n let cell_value = &row[col_name];\n\n let value_string = value_to_string(cell_value);\n\n row_values.push(value_string);\n\n }\n\n writer.write_record(row_values)?;\n\n }\n\n\n\n match writer.into_inner() {\n\n Ok(bytes) => Ok(String::from_utf8(bytes)\n\n .expect(\"Bytes should be UTF8 since all input was UTF8\")),\n\n Err(err) => Err(CsvError::from(Box::new(err))),\n\n }\n\n }\n\n\n\n /// Return a string containing a CSV representation of the grid.\n\n ///\n\n /// Nested structures such as Dicts (JSON objects) or Lists (JSON arrays)\n\n /// will not be expanded, and will be displayed as `<StructureType>`.\n\n ///\n", "file_path": "src/grid.rs", "rank": 85, "score": 14.279740377699994 }, { "content": " use chrono::{DateTime, Duration};\n\n use chrono_tz::Australia::Sydney;\n\n\n\n let date_time1 =\n\n DateTime::parse_from_rfc3339(\"2019-08-01T00:00:00+10:00\")\n\n .unwrap()\n\n .with_timezone(&Sydney);\n\n let date_time2 = date_time1 + Duration::minutes(5);\n\n let date_time3 = date_time1 + Duration::minutes(10);\n\n\n\n let mut client = new_client().await;\n\n let id = get_ref_for_filter(\n\n &mut client,\n\n \"continuousIntegrationHisWritePoint and kind == \\\"Str\\\"\",\n\n )\n\n .await;\n\n\n\n let his_data = vec![\n\n (date_time1.into(), \"hello\".to_owned()),\n\n (date_time2.into(), \"world\".to_owned()),\n", "file_path": "src/lib.rs", "rank": 86, "score": 14.216428476506275 }, { "content": " /// only JSON objects.\n\n pub fn add_rows(\n\n &mut self,\n\n mut rows: Vec<Value>,\n\n ) -> Result<(), ParseJsonGridError> {\n\n // Validate the rows being added:\n\n if rows.iter().any(|row| !row.is_object()) {\n\n let msg =\n\n \"At least one row being added is not a JSON object\".to_owned();\n\n return Err(ParseJsonGridError::new(msg));\n\n }\n\n\n\n let mut new_keys: HashSet<TagName> = HashSet::new();\n\n\n\n // Validate the column names being added:\n\n for row in &rows {\n\n let row_obj = row.as_object().expect(\"row is an object\");\n\n for key in row_obj.keys() {\n\n match TagName::new(key.to_string()) {\n\n Some(tag_name) => {\n", "file_path": "src/grid.rs", "rank": 87, "score": 13.730001703356116 }, { "content": "}\n\n\n\n/// A client for interacting with a SkySpark server.\n\n#[derive(Debug)]\n\npub struct SkySparkClient {\n\n auth_token: String,\n\n client: reqwest::Client,\n\n username: String,\n\n password: String,\n\n project_api_url: Url,\n\n}\n\n\n\nimpl SkySparkClient {\n\n /// Create a new `SkySparkClient`.\n\n ///\n\n /// # Example\n\n /// ```rust,no_run\n\n /// # async fn run() {\n\n /// use raystack::SkySparkClient;\n\n /// use url::Url;\n", "file_path": "src/lib.rs", "rank": 88, "score": 13.62732072847174 }, { "content": "#[derive(Debug, Error)]\n\npub enum NewSkySparkClientError {\n\n /// An error which occurred during the authentication process.\n\n #[error(\"Error occurred during authentication\")]\n\n Auth(#[from] AuthError),\n\n /// An error caused by an invalid SkySpark project url.\n\n #[error(\"The SkySpark URL is invalid: {msg}\")]\n\n Url { msg: String },\n\n}\n\n\n\nimpl NewSkySparkClientError {\n\n pub(crate) fn url(msg: &str) -> Self {\n\n NewSkySparkClientError::Url { msg: msg.into() }\n\n }\n\n}", "file_path": "src/err.rs", "rank": 89, "score": 13.611818019356779 }, { "content": " /// let ordered_cols = vec![\"y\", \"x\", \"colWithNoValues\"];\n\n /// let csv_string = grid\n\n /// .to_csv_string_with_ordered_cols(&ordered_cols)\n\n /// .unwrap();\n\n ///\n\n /// assert_eq!(\n\n /// csv_string,\n\n /// \"y,x,colWithNoValues\\n3,2,\\n\".to_string()\n\n /// );\n\n /// ```\n\n #[cfg(feature = \"grid_csv\")]\n\n pub fn to_csv_string_with_ordered_cols(\n\n &self,\n\n col_names: &[&str],\n\n ) -> Result<String, CsvError> {\n\n let mut writer = csv::Writer::from_writer(vec![]);\n\n writer.write_record(col_names)?;\n\n\n\n for row in self.rows() {\n\n let mut row_values = Vec::new();\n", "file_path": "src/grid.rs", "rank": 90, "score": 13.431647099842843 }, { "content": " pub async fn about(&mut self) -> Result<Grid> {\n\n self.get(self.about_url()).await\n\n }\n\n\n\n /// Returns a grid describing what MIME types are available.\n\n pub async fn formats(&mut self) -> Result<Grid> {\n\n self.get(self.formats_url()).await\n\n }\n\n\n\n /// Returns a grid of history data for a single point.\n\n pub async fn his_read(\n\n &mut self,\n\n id: &Ref,\n\n range: &HisReadRange,\n\n ) -> Result<Grid> {\n\n let row = json!({\n\n \"id\": id.to_hayson(),\n\n \"range\": range.to_json_request_string()\n\n });\n\n let req_grid = Grid::new_internal(vec![row]);\n", "file_path": "src/lib.rs", "rank": 91, "score": 13.378191993940515 }, { "content": " false\n\n }\n\n }\n\n\n\n /// Modify the grid by applying the mapping function to each value in\n\n /// the specified column.\n\n pub fn map_col<F>(&mut self, col_name: &TagName, f: F)\n\n where\n\n F: Fn(&Value) -> Value,\n\n {\n\n for row_map in self.row_maps_mut() {\n\n let col_name_str: &str = col_name.as_ref();\n\n if row_map.contains_key(col_name_str) {\n\n let current_value = row_map.get(col_name_str).expect(\"a value should be present since we checked the map contains the key\");\n\n let new_value = f(current_value);\n\n row_map.insert(col_name_str.to_string(), new_value);\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/grid.rs", "rank": 92, "score": 13.280054597335832 }, { "content": " }\n\n\n\n /// Returns a grid containing the records matching the given id\n\n /// `Ref`s.\n\n pub async fn read_by_ids(&mut self, ids: &[Ref]) -> Result<Grid> {\n\n let rows = ids.iter().map(|id| json!({\"id\": id.to_hayson()})).collect();\n\n\n\n let req_grid = Grid::new_internal(rows);\n\n self.post(self.read_url(), &req_grid).await\n\n }\n\n}\n\n\n\nimpl HaystackUrl for SkySparkClient {\n\n fn about_url(&self) -> Url {\n\n self.append_to_url(\"about\")\n\n }\n\n\n\n fn formats_url(&self) -> Url {\n\n self.append_to_url(\"formats\")\n\n }\n", "file_path": "src/lib.rs", "rank": 93, "score": 12.894087063126392 }, { "content": " }\n\n\n\n /// Return a reference to the Haystack error grid encapsulated by this\n\n /// error, if this error was caused by a Haystack error grid.\n\n pub fn grid(&self) -> Option<&Grid> {\n\n match self {\n\n Self::Grid { err_grid } => Some(err_grid),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Return the Haystack error grid encapsulated by this error, if this\n\n /// error was caused by a Haystack error grid.\n\n pub fn into_grid(self) -> Option<Grid> {\n\n match self {\n\n Self::Grid { err_grid } => Some(err_grid),\n\n _ => None,\n\n }\n\n }\n\n}\n", "file_path": "src/eval.rs", "rank": 94, "score": 12.688588594491511 }, { "content": " #[tokio::test]\n\n async fn his_write_bool() {\n\n use chrono::{DateTime, Duration};\n\n use chrono_tz::Australia::Sydney;\n\n\n\n let mut client = new_client().await;\n\n\n\n let date_time1 =\n\n DateTime::parse_from_rfc3339(\"2019-08-01T00:00:00+10:00\")\n\n .unwrap()\n\n .with_timezone(&Sydney);\n\n let date_time2 = date_time1 + Duration::minutes(5);\n\n let date_time3 = date_time1 + Duration::minutes(10);\n\n\n\n let id = get_ref_for_filter(\n\n &mut client,\n\n \"continuousIntegrationHisWritePoint and kind == \\\"Bool\\\"\",\n\n )\n\n .await;\n\n let his_data = vec![\n", "file_path": "src/lib.rs", "rank": 95, "score": 12.24899936933797 }, { "content": " json!(id.to_string() + dis)\n\n });\n\n\n\n assert_eq!(\n\n grid.rows()[0][\"newCol\"].as_str().unwrap(),\n\n \"abcd1234Hello World\"\n\n );\n\n assert_eq!(\n\n grid.rows()[1][\"newCol\"].as_str().unwrap(),\n\n \"cdef5678Hello Kitty\"\n\n );\n\n assert!(grid.to_json_string().contains(\"abcd1234Hello World\"));\n\n assert!(grid.to_json_string().contains(\"cdef5678Hello Kitty\"));\n\n assert!(grid\n\n .col_names()\n\n .contains(&TagName::new(\"newCol\".to_owned()).unwrap()));\n\n }\n\n\n\n #[test]\n\n fn add_col_overwrite_existing_col() {\n", "file_path": "src/grid.rs", "rank": 96, "score": 12.207588795907743 }, { "content": " }\n\n\n\n /// Return the dk length, in bytes.\n\n fn dk_len(&self) -> usize {\n\n match self {\n\n Self::Sha256 => 32,\n\n Self::Sha512 => 64,\n\n }\n\n }\n\n\n\n /// See the documentation for hmac::Key::new for the restrictions on\n\n /// `key_value`.\n\n fn hmac_sign(&self, key_value: &[u8], data: &[u8]) -> Vec<u8> {\n\n match self {\n\n Self::Sha256 => {\n\n let mut mac = HmacSha256::new_from_slice(key_value)\n\n .expect(\"expected key length to be valid\");\n\n mac.update(data);\n\n let result = mac.finalize();\n\n let bytes = result.into_bytes();\n", "file_path": "src/auth.rs", "rank": 97, "score": 12.180572189089586 }, { "content": " let res = client\n\n .utc_his_write_num(&id, \"Sydney\", &his_data[..])\n\n .await\n\n .unwrap();\n\n assert_eq!(res.rows().len(), 0);\n\n }\n\n\n\n #[tokio::test]\n\n async fn his_write_num() {\n\n use chrono::{DateTime, Duration};\n\n use chrono_tz::Australia::Sydney;\n\n\n\n let date_time1 =\n\n DateTime::parse_from_rfc3339(\"2019-08-01T00:00:00+10:00\")\n\n .unwrap()\n\n .with_timezone(&Sydney);\n\n let date_time2 = date_time1 + Duration::minutes(5);\n\n let date_time3 = date_time1 + Duration::minutes(10);\n\n\n\n let mut client = new_client().await;\n", "file_path": "src/lib.rs", "rank": 98, "score": 12.165151345207526 }, { "content": " assert_eq!(grid.rows()[1][\"one\"].as_str().unwrap(), \"b\");\n\n assert_eq!(grid.rows()[1][\"two\"].as_i64().unwrap(), 2);\n\n }\n\n\n\n #[test]\n\n fn map_col_works() {\n\n let rows = vec![\n\n json!({\"id\": \"a\", \"one\": 1}),\n\n json!({\"one\": \"the id tag is missing\"}),\n\n json!({\"id\": \"b\", \"one\": 2}),\n\n ];\n\n let mut grid = Grid::new(rows).unwrap();\n\n\n\n let col_name = TagName::new(\"id\".to_owned()).unwrap();\n\n\n\n grid.map_col(&col_name, |_| serde_json::Value::Bool(true));\n\n\n\n assert_eq!(grid.col_name_strs(), vec![\"id\", \"one\"]);\n\n // Check the ID column has changed as expected:\n\n assert_eq!(grid.rows()[0][\"id\"].as_bool().unwrap(), true);\n", "file_path": "src/grid.rs", "rank": 99, "score": 12.153085756336075 } ]
Rust
src/timestamp.rs
Cognoscan/fog_pack
7b3af246faa851bfc2aa09cc186ff2332124e791
use std::cmp; use std::convert::TryFrom; use std::fmt; use std::ops; use std::time; use byteorder::{LittleEndian, ReadBytesExt}; use fog_crypto::serde::{ CryptoEnum, FOG_TYPE_ENUM, FOG_TYPE_ENUM_TIME_INDEX, FOG_TYPE_ENUM_TIME_NAME, }; use serde::{ de::{Deserialize, Deserializer, EnumAccess, Error, Unexpected, VariantAccess}, ser::{Serialize, SerializeStructVariant, Serializer}, }; use serde_bytes::ByteBuf; const MAX_NANOSEC: u32 = 1_999_999_999; #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct Timestamp { sec: i64, nano: u32, standard: u8, } impl Timestamp { pub fn from_utc(sec: i64, nano: u32) -> Option<Timestamp> { if nano > MAX_NANOSEC { None } else { Some(Timestamp { sec, nano, standard: 0, }) } } pub fn from_sec(sec: i64) -> Timestamp { Timestamp { sec, nano: 0, standard: 0, } } pub const fn zero() -> Timestamp { Timestamp { sec: 0, nano: 0, standard: 0, } } pub const fn min_value() -> Timestamp { Timestamp { sec: i64::MIN, nano: 0, standard: 0, } } pub const fn max_value() -> Timestamp { Timestamp { sec: i64::MAX, nano: MAX_NANOSEC, standard: 0, } } pub fn min(self, other: Timestamp) -> Timestamp { if self < other { self } else { other } } pub fn max(self, other: Timestamp) -> Timestamp { if self > other { self } else { other } } pub fn next(mut self) -> Timestamp { if self.nano < MAX_NANOSEC { self.nano += 1; } else { self.nano = 0; self.sec += 1; } self } pub fn prev(mut self) -> Timestamp { if self.nano > 0 { self.nano -= 1; } else { self.nano = MAX_NANOSEC; self.sec -= 1; } self } pub fn timestamp_utc(&self) -> i64 { self.sec } pub fn timestamp_subsec_nanos(&self) -> u32 { self.nano } pub fn as_vec(&self) -> Vec<u8> { let mut v = Vec::new(); self.encode_vec(&mut v); v } pub fn encode_vec(&self, vec: &mut Vec<u8>) { if self.nano != 0 { vec.reserve(1 + 8 + 4); vec.push(self.standard); vec.extend_from_slice(&self.sec.to_le_bytes()); vec.extend_from_slice(&self.nano.to_le_bytes()); } else if (self.sec <= u32::MAX as i64) && (self.sec >= 0) { vec.reserve(1 + 4); vec.push(self.standard); vec.extend_from_slice(&(self.sec as u32).to_le_bytes()); } else { vec.reserve(1 + 8); vec.push(self.standard); vec.extend_from_slice(&self.sec.to_le_bytes()); } } pub fn size(&self) -> usize { if self.nano != 0 { 1 + 8 + 4 } else if (self.sec <= u32::MAX as i64) && (self.sec >= 0) { 1 + 4 } else { 1 + 8 } } pub fn now() -> Option<Timestamp> { match time::SystemTime::now().duration_since(time::SystemTime::UNIX_EPOCH) { Ok(t) => Timestamp::from_utc(t.as_secs() as i64, t.subsec_nanos()), Err(_) => None, } } } impl ops::Add<i64> for Timestamp { type Output = Timestamp; fn add(self, rhs: i64) -> Self { Timestamp { sec: self.sec + rhs, nano: self.nano, standard: self.standard, } } } impl ops::Sub<i64> for Timestamp { type Output = Timestamp; fn sub(self, rhs: i64) -> Self { Timestamp { sec: self.sec - rhs, nano: self.nano, standard: self.standard, } } } impl cmp::Ord for Timestamp { fn cmp(&self, other: &Timestamp) -> cmp::Ordering { match self.sec.cmp(&other.sec) { cmp::Ordering::Equal => self.nano.cmp(&other.nano), other => other, } } } impl cmp::PartialOrd for Timestamp { fn partial_cmp(&self, other: &Timestamp) -> Option<cmp::Ordering> { Some(self.cmp(other)) } } impl fmt::Display for Timestamp { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "UTC: {} sec + {} ns", self.sec, self.nano) } } impl TryFrom<&[u8]> for Timestamp { type Error = String; fn try_from(value: &[u8]) -> Result<Self, Self::Error> { let mut raw = value; let standard = raw .read_u8() .map_err(|_| String::from("missing time standard byte"))?; let (sec, nano) = match value.len() { 13 => { let sec = raw.read_i64::<LittleEndian>().unwrap(); let nano = raw.read_u32::<LittleEndian>().unwrap(); (sec, nano) } 9 => { let sec = raw.read_i64::<LittleEndian>().unwrap(); (sec, 0) } 5 => { let sec = raw.read_u32::<LittleEndian>().unwrap() as i64; (sec, 0) } _ => { return Err(format!( "not a recognized Timestamp length ({} bytes)", value.len() )) } }; Ok(Timestamp { sec, nano, standard, }) } } impl Serialize for Timestamp { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if serializer.is_human_readable() { let mut sv = serializer.serialize_struct_variant( FOG_TYPE_ENUM, FOG_TYPE_ENUM_TIME_INDEX as u32, FOG_TYPE_ENUM_TIME_NAME, 2, )?; sv.serialize_field("std", &self.standard)?; sv.serialize_field("secs", &self.sec)?; sv.serialize_field("nanos", &self.sec)?; sv.end() } else { let value = ByteBuf::from(self.as_vec()); serializer.serialize_newtype_variant( FOG_TYPE_ENUM, FOG_TYPE_ENUM_TIME_INDEX as u32, FOG_TYPE_ENUM_TIME_NAME, &value, ) } } } impl<'de> Deserialize<'de> for Timestamp { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { struct TimeVisitor { is_human_readable: bool, } impl<'de> serde::de::Visitor<'de> for TimeVisitor { type Value = Timestamp; fn expecting(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { write!( fmt, "{} enum with variant {} (id {})", FOG_TYPE_ENUM, FOG_TYPE_ENUM_TIME_NAME, FOG_TYPE_ENUM_TIME_INDEX ) } fn visit_enum<A>(self, data: A) -> Result<Self::Value, A::Error> where A: EnumAccess<'de>, { let variant = match data.variant()? { (CryptoEnum::Time, variant) => variant, (e, _) => { return Err(A::Error::invalid_type( Unexpected::Other(e.as_str()), &"Time", )) } }; if self.is_human_readable { use serde::de::MapAccess; struct TimeStructVisitor; impl<'de> serde::de::Visitor<'de> for TimeStructVisitor { type Value = Timestamp; fn expecting( &self, fmt: &mut fmt::Formatter<'_>, ) -> Result<(), fmt::Error> { write!(fmt, "timestamp struct") } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: MapAccess<'de>, { let mut secs: Option<i64> = None; let mut nanos: u32 = 0; while let Some(key) = map.next_key::<String>()? { match key.as_ref() { "std" => { let v: u8 = map.next_value()?; if v != 0 { return Err(A::Error::invalid_value( Unexpected::Unsigned(v as u64), &"0", )); } } "secs" => { secs = Some(map.next_value()?); } "nanos" => { nanos = map.next_value()?; } _ => { return Err(A::Error::unknown_field( key.as_ref(), &["std", "secs", "nanos"], )) } } } let secs = secs.ok_or_else(|| A::Error::missing_field("secs"))?; Timestamp::from_utc(secs, nanos) .ok_or_else(|| A::Error::custom("Invalid timestamp")) } } variant.struct_variant(&["std", "secs", "nanos"], TimeStructVisitor) } else { let bytes: ByteBuf = variant.newtype_variant()?; Timestamp::try_from(bytes.as_ref()).map_err(A::Error::custom) } } } let is_human_readable = deserializer.is_human_readable(); deserializer.deserialize_enum( FOG_TYPE_ENUM, &[FOG_TYPE_ENUM_TIME_NAME], TimeVisitor { is_human_readable }, ) } } #[cfg(test)] mod test { use super::*; fn edge_cases() -> Vec<(usize, Timestamp)> { let mut test_cases = Vec::new(); test_cases.push((5, Timestamp::from_utc(0, 0).unwrap())); test_cases.push((5, Timestamp::from_utc(1, 0).unwrap())); test_cases.push((13, Timestamp::from_utc(1, 1).unwrap())); test_cases.push((5, Timestamp::from_utc(u32::MAX as i64 - 1, 0).unwrap())); test_cases.push((5, Timestamp::from_utc(u32::MAX as i64 - 0, 0).unwrap())); test_cases.push((9, Timestamp::from_utc(u32::MAX as i64 + 1, 0).unwrap())); test_cases.push((9, Timestamp::from_utc(i64::MIN, 0).unwrap())); test_cases.push((13, Timestamp::from_utc(i64::MIN, 1).unwrap())); test_cases } #[test] fn roundtrip() { for (index, case) in edge_cases().iter().enumerate() { println!( "Test #{}: '{}' with expected length = {}", index, case.1, case.0 ); let mut enc = Vec::new(); case.1.encode_vec(&mut enc); assert_eq!(enc.len(), case.0); let decoded = Timestamp::try_from(enc.as_ref()).unwrap(); assert_eq!(decoded, case.1); } } #[test] fn too_long() { for case in edge_cases() { println!("Test with Timestamp = {}", case.1); let mut enc = Vec::new(); case.1.encode_vec(&mut enc); enc.push(0u8); assert!(Timestamp::try_from(enc.as_ref()).is_err()); } } #[test] fn too_short() { for case in edge_cases() { println!("Test with Timestamp = {}", case.1); let mut enc = Vec::new(); case.1.encode_vec(&mut enc); enc.pop(); assert!(Timestamp::try_from(enc.as_ref()).is_err()); } } }
use std::cmp; use std::convert::TryFrom; use std::fmt; use std::ops; use std::time; use byteorder::{LittleEndian, ReadBytesExt}; use fog_crypto::serde::{ CryptoEnum, FOG_TYPE_ENUM, FOG_TYPE_ENUM_TIME_INDEX, FOG_TYPE_ENUM_TIME_NAME, }; use serde::{ de::{Deserialize, Deserializer, EnumAccess, Error, Unexpected, VariantAccess}, ser::{Serialize, SerializeStructVariant, Serializer}, }; use serde_bytes::ByteBuf; const MAX_NANOSEC: u32 = 1_999_999_999; #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct Timestamp { sec: i64, nano: u32, standard: u8, } impl Timestamp { pub fn from_utc(sec: i64, nano: u32) -> Option<Timestamp> { if nano > MAX_NANOSEC { None } else { Some(Timestamp { sec, nano, standard: 0, }) } } pub fn from_sec(sec: i64) -> Timestamp { Timestamp { sec, nano: 0, standard: 0, } } pub const fn zero() -> Timestamp { Timestamp { sec: 0, nano: 0, standard: 0, } } pub const fn min_value() -> Timestamp { Timestamp { sec: i64::MIN, nano: 0, standard: 0, } } pub const fn max_value() -> Timestamp { Timestamp { sec: i64::MAX, nano: MAX_NANOSEC, standard: 0, } } pub fn min(self, other: Timestamp) -> Timestamp { if self < other { self } else { other } } pub fn max(self, other: Timestamp) -> Timestamp { if self > other { self } else { other } } pub fn next(mut self) -> Timestamp { if self.nano < MAX_NANOSEC { self.nano += 1; } else { self.nano = 0; self.sec += 1; } self } pub fn prev(mut self) -> Timestamp { if self.nano > 0 { self.nano -= 1; } else { self.nano = MAX_NANOSEC; self.sec -= 1; } self } pub fn timestamp_utc(&self) -> i64 { self.sec } pub fn timestamp_subsec_nanos(&self) -> u32 { self.nano } pub fn as_vec(&self) -> Vec<u8> { let mut v = Vec::new(); self.encode_vec(&mut v); v } pub fn encode_vec(&self, vec: &mut Vec<u8>) { if self.nano != 0 { vec.reserve(1 + 8 + 4);
pub fn size(&self) -> usize { if self.nano != 0 { 1 + 8 + 4 } else if (self.sec <= u32::MAX as i64) && (self.sec >= 0) { 1 + 4 } else { 1 + 8 } } pub fn now() -> Option<Timestamp> { match time::SystemTime::now().duration_since(time::SystemTime::UNIX_EPOCH) { Ok(t) => Timestamp::from_utc(t.as_secs() as i64, t.subsec_nanos()), Err(_) => None, } } } impl ops::Add<i64> for Timestamp { type Output = Timestamp; fn add(self, rhs: i64) -> Self { Timestamp { sec: self.sec + rhs, nano: self.nano, standard: self.standard, } } } impl ops::Sub<i64> for Timestamp { type Output = Timestamp; fn sub(self, rhs: i64) -> Self { Timestamp { sec: self.sec - rhs, nano: self.nano, standard: self.standard, } } } impl cmp::Ord for Timestamp { fn cmp(&self, other: &Timestamp) -> cmp::Ordering { match self.sec.cmp(&other.sec) { cmp::Ordering::Equal => self.nano.cmp(&other.nano), other => other, } } } impl cmp::PartialOrd for Timestamp { fn partial_cmp(&self, other: &Timestamp) -> Option<cmp::Ordering> { Some(self.cmp(other)) } } impl fmt::Display for Timestamp { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "UTC: {} sec + {} ns", self.sec, self.nano) } } impl TryFrom<&[u8]> for Timestamp { type Error = String; fn try_from(value: &[u8]) -> Result<Self, Self::Error> { let mut raw = value; let standard = raw .read_u8() .map_err(|_| String::from("missing time standard byte"))?; let (sec, nano) = match value.len() { 13 => { let sec = raw.read_i64::<LittleEndian>().unwrap(); let nano = raw.read_u32::<LittleEndian>().unwrap(); (sec, nano) } 9 => { let sec = raw.read_i64::<LittleEndian>().unwrap(); (sec, 0) } 5 => { let sec = raw.read_u32::<LittleEndian>().unwrap() as i64; (sec, 0) } _ => { return Err(format!( "not a recognized Timestamp length ({} bytes)", value.len() )) } }; Ok(Timestamp { sec, nano, standard, }) } } impl Serialize for Timestamp { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if serializer.is_human_readable() { let mut sv = serializer.serialize_struct_variant( FOG_TYPE_ENUM, FOG_TYPE_ENUM_TIME_INDEX as u32, FOG_TYPE_ENUM_TIME_NAME, 2, )?; sv.serialize_field("std", &self.standard)?; sv.serialize_field("secs", &self.sec)?; sv.serialize_field("nanos", &self.sec)?; sv.end() } else { let value = ByteBuf::from(self.as_vec()); serializer.serialize_newtype_variant( FOG_TYPE_ENUM, FOG_TYPE_ENUM_TIME_INDEX as u32, FOG_TYPE_ENUM_TIME_NAME, &value, ) } } } impl<'de> Deserialize<'de> for Timestamp { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { struct TimeVisitor { is_human_readable: bool, } impl<'de> serde::de::Visitor<'de> for TimeVisitor { type Value = Timestamp; fn expecting(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { write!( fmt, "{} enum with variant {} (id {})", FOG_TYPE_ENUM, FOG_TYPE_ENUM_TIME_NAME, FOG_TYPE_ENUM_TIME_INDEX ) } fn visit_enum<A>(self, data: A) -> Result<Self::Value, A::Error> where A: EnumAccess<'de>, { let variant = match data.variant()? { (CryptoEnum::Time, variant) => variant, (e, _) => { return Err(A::Error::invalid_type( Unexpected::Other(e.as_str()), &"Time", )) } }; if self.is_human_readable { use serde::de::MapAccess; struct TimeStructVisitor; impl<'de> serde::de::Visitor<'de> for TimeStructVisitor { type Value = Timestamp; fn expecting( &self, fmt: &mut fmt::Formatter<'_>, ) -> Result<(), fmt::Error> { write!(fmt, "timestamp struct") } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: MapAccess<'de>, { let mut secs: Option<i64> = None; let mut nanos: u32 = 0; while let Some(key) = map.next_key::<String>()? { match key.as_ref() { "std" => { let v: u8 = map.next_value()?; if v != 0 { return Err(A::Error::invalid_value( Unexpected::Unsigned(v as u64), &"0", )); } } "secs" => { secs = Some(map.next_value()?); } "nanos" => { nanos = map.next_value()?; } _ => { return Err(A::Error::unknown_field( key.as_ref(), &["std", "secs", "nanos"], )) } } } let secs = secs.ok_or_else(|| A::Error::missing_field("secs"))?; Timestamp::from_utc(secs, nanos) .ok_or_else(|| A::Error::custom("Invalid timestamp")) } } variant.struct_variant(&["std", "secs", "nanos"], TimeStructVisitor) } else { let bytes: ByteBuf = variant.newtype_variant()?; Timestamp::try_from(bytes.as_ref()).map_err(A::Error::custom) } } } let is_human_readable = deserializer.is_human_readable(); deserializer.deserialize_enum( FOG_TYPE_ENUM, &[FOG_TYPE_ENUM_TIME_NAME], TimeVisitor { is_human_readable }, ) } } #[cfg(test)] mod test { use super::*; fn edge_cases() -> Vec<(usize, Timestamp)> { let mut test_cases = Vec::new(); test_cases.push((5, Timestamp::from_utc(0, 0).unwrap())); test_cases.push((5, Timestamp::from_utc(1, 0).unwrap())); test_cases.push((13, Timestamp::from_utc(1, 1).unwrap())); test_cases.push((5, Timestamp::from_utc(u32::MAX as i64 - 1, 0).unwrap())); test_cases.push((5, Timestamp::from_utc(u32::MAX as i64 - 0, 0).unwrap())); test_cases.push((9, Timestamp::from_utc(u32::MAX as i64 + 1, 0).unwrap())); test_cases.push((9, Timestamp::from_utc(i64::MIN, 0).unwrap())); test_cases.push((13, Timestamp::from_utc(i64::MIN, 1).unwrap())); test_cases } #[test] fn roundtrip() { for (index, case) in edge_cases().iter().enumerate() { println!( "Test #{}: '{}' with expected length = {}", index, case.1, case.0 ); let mut enc = Vec::new(); case.1.encode_vec(&mut enc); assert_eq!(enc.len(), case.0); let decoded = Timestamp::try_from(enc.as_ref()).unwrap(); assert_eq!(decoded, case.1); } } #[test] fn too_long() { for case in edge_cases() { println!("Test with Timestamp = {}", case.1); let mut enc = Vec::new(); case.1.encode_vec(&mut enc); enc.push(0u8); assert!(Timestamp::try_from(enc.as_ref()).is_err()); } } #[test] fn too_short() { for case in edge_cases() { println!("Test with Timestamp = {}", case.1); let mut enc = Vec::new(); case.1.encode_vec(&mut enc); enc.pop(); assert!(Timestamp::try_from(enc.as_ref()).is_err()); } } }
vec.push(self.standard); vec.extend_from_slice(&self.sec.to_le_bytes()); vec.extend_from_slice(&self.nano.to_le_bytes()); } else if (self.sec <= u32::MAX as i64) && (self.sec >= 0) { vec.reserve(1 + 4); vec.push(self.standard); vec.extend_from_slice(&(self.sec as u32).to_le_bytes()); } else { vec.reserve(1 + 8); vec.push(self.standard); vec.extend_from_slice(&self.sec.to_le_bytes()); } }
function_block-function_prefix_line
[ { "content": "/// Serialize an element onto a byte vector. Doesn't check if Array & Map structures make\n\n/// sense, just writes elements out.\n\npub fn serialize_elem(buf: &mut Vec<u8>, elem: Element) {\n\n use self::Element::*;\n\n match elem {\n\n Null => buf.push(Marker::Null.into()),\n\n Bool(v) => buf.push(if v { Marker::True } else { Marker::False }.into()),\n\n Int(v) => match integer::get_int_internal(&v) {\n\n integer::IntPriv::PosInt(v) => {\n\n if v <= 127 {\n\n buf.push(Marker::PosFixInt(v as u8).into());\n\n } else if v <= u8::MAX as u64 {\n\n buf.push(Marker::UInt8.into());\n\n buf.push(v as u8);\n\n } else if v <= u16::MAX as u64 {\n\n buf.push(Marker::UInt16.into());\n\n buf.extend_from_slice(&(v as u16).to_le_bytes());\n\n } else if v <= u32::MAX as u64 {\n\n buf.push(Marker::UInt32.into());\n\n buf.extend_from_slice(&(v as u32).to_le_bytes());\n\n } else {\n\n buf.push(Marker::UInt64.into());\n", "file_path": "src/element.rs", "rank": 0, "score": 326500.6557658608 }, { "content": "pub fn blake2b( hash: &mut [u8; HASH_BYTES], data: &[u8] ) {\n\n if data.len() > ::std::u64::MAX as usize {\n\n panic!(\"Data for hasher is somehow larger than maximum u64 value\");\n\n }\n\n // The below will only fail if we set up this function wrong.\n\n unsafe { \n\n libsodium_sys::crypto_generichash_blake2b(\n\n hash.as_mut_ptr(), HASH_BYTES, \n\n data.as_ptr(), data.len() as u64,\n\n ::std::ptr::null(), 0);\n\n }\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 1, "score": 321834.00676945626 }, { "content": "/// Attempt to read binary data to a Vec.\n\npub fn read_vec(buf: &mut &[u8]) -> crate::Result<Vec<u8>> {\n\n Ok(read_bin(buf)?.to_vec())\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 2, "score": 316811.6361980905 }, { "content": "/// Attempt to read a i64 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a i64.\n\npub fn read_i64(buf: &mut &[u8]) -> crate::Result<i64> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n int.as_i64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value bigger than i64 maximum\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 3, "score": 306026.09821974876 }, { "content": "/// Attempt to read a u32 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a u32.\n\npub fn read_u32(buf: &mut &[u8]) -> crate::Result<u32> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n NumCast::from(int.as_u64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value was negative\"))?)\n\n .ok_or(Error::FailValidate(fail_len, \"Value couldn't be represented as u32\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 4, "score": 305748.45812603465 }, { "content": "/// Attempt to read a `Hash`.\n\npub fn read_hash(buf: &mut &[u8]) -> crate::Result<Hash> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Hash(len) = marker {\n\n read_raw_hash(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected hash\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 5, "score": 305744.72504170705 }, { "content": "fn write_ext_marker(buf: &mut Vec<u8>, len: u32) {\n\n match len {\n\n 1 => buf.push(Marker::FixExt1.into()),\n\n 2 => buf.push(Marker::FixExt2.into()),\n\n 4 => buf.push(Marker::FixExt4.into()),\n\n 8 => buf.push(Marker::FixExt8.into()),\n\n 16 => buf.push(Marker::FixExt16.into()),\n\n len if len < (std::u8::MAX as u32) => {\n\n buf.push(Marker::Ext8.into());\n\n buf.push(len as u8);\n\n },\n\n len if len < (std::u16::MAX as u32) => {\n\n buf.push(Marker::Ext16.into());\n\n buf.extend_from_slice(&(len as u16).to_be_bytes());\n\n },\n\n len => {\n\n buf.push(Marker::Ext32.into());\n\n buf.extend_from_slice(&(len as u32).to_be_bytes());\n\n },\n\n };\n", "file_path": "old/encode.rs", "rank": 6, "score": 300201.52968853817 }, { "content": "/// Converts a mutable slice of bytes to a mutable string slice. Works exactly like \n\n/// `std::str::from_utf8_mut` except that it counts the number of unicode code points.\n\npub fn from_utf8_mut(v: &mut [u8]) -> Result<(usize, &mut str), Utf8Error> {\n\n let count = run_utf8_validation(v)?;\n\n Ok((count, unsafe { str::from_utf8_unchecked_mut(v) }))\n\n}\n\n\n\n// use truncation to fit u64 into usize\n\nconst NONASCII_MASK: usize = 0x80808080_80808080u64 as usize;\n\n\n\n/// Returns `true` if any byte in the word `x` is nonascii (>= 128).\n", "file_path": "old/str_char.rs", "rank": 7, "score": 297023.00217061036 }, { "content": "/// Attempt to read a `Timestamp`.\n\npub fn read_time(buf: &mut &[u8]) -> crate::Result<Timestamp> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Timestamp(len) = marker {\n\n read_raw_time(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected Timestamp\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 8, "score": 293125.3469495006 }, { "content": "/// Write the MessagePack value out to a Vector. This code assumes that all strings, binary data, \n\n/// objects, and arrays are less than 2^32 elements in size.\n\npub fn write_value(buf: &mut Vec<u8>, val: &Value) {\n\n match *val {\n\n\n\n Value::Null => {\n\n buf.push(Marker::Nil.into())\n\n },\n\n\n\n Value::Boolean(val) => {\n\n if val {\n\n buf.push(Marker::True.into())\n\n } else {\n\n buf.push(Marker::False.into())\n\n }\n\n },\n\n\n\n Value::Integer(ref val) => {\n\n match integer::get_int_internal(val) {\n\n integer::IntPriv::PosInt(u) => {\n\n if u <= 127 {\n\n buf.push(Marker::PosFixInt(u as u8).into());\n", "file_path": "old/encode.rs", "rank": 9, "score": 292582.24556739 }, { "content": "pub fn compress(cctx: &mut CCtx, level: i32, raw: &[u8], buf: &mut Vec<u8>) {\n\n let vec_len = buf.len();\n\n let mut buffer_len = compress_bound(raw.len());\n\n buf.reserve(buffer_len);\n\n unsafe {\n\n buf.set_len(vec_len + buffer_len);\n\n buffer_len = compress_cctx(\n\n cctx,\n\n &mut buf[vec_len..],\n\n raw,\n\n level\n\n ).expect(\"zstd library unexpectedly errored during compress_cctx!\");\n\n buf.set_len(vec_len + buffer_len);\n\n }\n\n}\n\n\n\n\n", "file_path": "old/zstd_help.rs", "rank": 10, "score": 288188.0931261439 }, { "content": "/// Attempt to read an array as `Value`.\n\npub fn read_array(buf: &mut &[u8]) -> crate::Result<Vec<Value>> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Array(len) = marker {\n\n let mut v = Vec::with_capacity(len);\n\n for _i in 0..len {\n\n v.push(read_value(buf)?);\n\n }\n\n Ok(v)\n\n }\n\n else {\n\n Err(Error::BadEncode(fail_len, \"Expected array\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 11, "score": 282717.10064602946 }, { "content": "/// Read raw Hash out from a buffer\n\npub fn read_raw_hash(buf: &mut &[u8], len: usize) -> crate::Result<Hash> {\n\n let fail_len = buf.len();\n\n let hash = Hash::decode(buf)?;\n\n if hash.size() != len {\n\n Err(Error::BadEncode(fail_len, \"Hash type has invalid size\"))\n\n }\n\n else {\n\n Ok(hash)\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 12, "score": 282134.5949455295 }, { "content": "pub fn lockbox_from_stream(k: &FullStreamKey, mut message: Vec<u8>) -> Result<Lockbox, CryptoError> {\n\n let version = k.get_version();\n\n if version != 1 { \n\n memzero(&mut message[..]); // Must assume data is sensitive and zero it out before failing\n\n return Err(CryptoError::UnsupportedVersion);\n\n }\n\n let nonce = Nonce::new();\n\n let raw_key = k.get_key();\n\n let type_id = LockType::Stream(k.get_id());\n\n\n\n message.reserve_exact(Tag::len()); // Need exactly enough to append the tag\n\n let tag = aead_encrypt(&mut message[..], &[], &nonce, &raw_key);\n\n message.extend_from_slice(&tag.0);\n\n Ok(Lockbox {\n\n version,\n\n type_id,\n\n nonce,\n\n ciphertext: message\n\n })\n\n}\n\n\n", "file_path": "old/crypto/lockbox.rs", "rank": 13, "score": 281057.3464324775 }, { "content": "/// Consume the lockbox and spit out the decrypted data\n\npub fn decrypt_lockbox(k: &FullStreamKey, mut lock: Lockbox) -> Result<Vec<u8>, CryptoError> {\n\n let m_len = lock.ciphertext.len() - Tag::len();\n\n let success = {\n\n let (mut message, tag) = lock.ciphertext.split_at_mut(m_len);\n\n aead_decrypt(\n\n &mut message,\n\n &[],\n\n &tag,\n\n &lock.nonce,\n\n &k.get_key()\n\n )\n\n };\n\n if success {\n\n lock.ciphertext.truncate(m_len);\n\n Ok(lock.ciphertext) // Value is moved, so plaintext is only in the Result\n\n }\n\n else {\n\n Err(CryptoError::DecryptFailed)\n\n }\n\n}\n", "file_path": "old/crypto/lockbox.rs", "rank": 14, "score": 281057.3464324775 }, { "content": "pub fn dict_compress(cctx: &mut CCtx, dict: &CDict, raw: &[u8], buf: &mut Vec<u8>) {\n\n let vec_len = buf.len();\n\n let mut buffer_len = zstd_safe::compress_bound(raw.len());\n\n buf.reserve(buffer_len);\n\n unsafe {\n\n buf.set_len(vec_len + buffer_len);\n\n buffer_len = zstd_safe::compress_using_cdict(\n\n cctx,\n\n &mut buf[vec_len..],\n\n raw,\n\n dict\n\n ).expect(\"zstd library unexpectedly errored during compress_cctx!\");\n\n buf.set_len(vec_len + buffer_len);\n\n }\n\n}\n\n\n", "file_path": "old/zstd_help.rs", "rank": 15, "score": 280858.66987124854 }, { "content": "/// Read raw Timestamp out from a buffer\n\npub fn read_raw_time(buf: &mut &[u8], len: usize) -> crate::Result<Timestamp> {\n\n let fail_len = buf.len();\n\n match len {\n\n 4 => {\n\n let sec = buf.read_u32::<BigEndian>()?;\n\n Ok(Timestamp::from_sec(sec as i64))\n\n },\n\n 8 => {\n\n let raw_time = buf.read_u64::<BigEndian>()?;\n\n let sec = (raw_time & 0x0003_FFFF_FFFFu64) as i64;\n\n let nano = (raw_time >> 34) as u32;\n\n Ok(Timestamp::from_raw(sec,nano).ok_or(Error::BadEncode(fail_len, \"Timestamp nanoseconds is too big\"))?)\n\n },\n\n 12 => {\n\n let nano = buf.read_u32::<BigEndian>()?;\n\n let sec = buf.read_i64::<BigEndian>()?;\n\n Ok(Timestamp::from_raw(sec,nano).ok_or(Error::BadEncode(fail_len, \"Timestamp nanoseconds is too big\"))?)\n\n },\n\n _ => Err(Error::BadEncode(fail_len, \"Timestamp type has invalid size\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 16, "score": 269430.011932423 }, { "content": "// Can fail due to bad public key\n\npub fn lockbox_from_identity(id: &FullIdentity, mut message: Vec<u8>) -> Result<(Lockbox, FullStreamKey),CryptoError> {\n\n let version = id.get_version();\n\n if version != 1 {\n\n memzero(&mut message[..]); // Must assume data is sensitive and zero it out before failing\n\n return Err(CryptoError::UnsupportedVersion);\n\n }\n\n let nonce = Nonce::new();\n\n let mut esk: SecretCryptKey = Default::default();\n\n let mut epk: PublicCryptKey = Default::default();\n\n crypt_keypair(&mut epk, &mut esk);\n\n let k = id.calc_stream_key(&esk)?;\n\n let k = FullStreamKey::from_secret(k);\n\n let type_id = LockType::Identity((id.get_id(),epk));\n\n\n\n message.reserve_exact(Tag::len()); // Need exactly enough to append the tag\n\n let tag = aead_encrypt(&mut message[..], &[], &nonce, &k.get_key());\n\n message.extend_from_slice(&tag.0);\n\n Ok((Lockbox {\n\n version,\n\n type_id,\n\n nonce,\n\n ciphertext: message\n\n }, k))\n\n}\n\n\n", "file_path": "old/crypto/lockbox.rs", "rank": 17, "score": 264724.1361080735 }, { "content": "pub fn randombytes(x: &mut [u8]) {\n\n unsafe { libsodium_sys::randombytes_buf(x.as_mut_ptr() as *mut _, x.len()); }\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 18, "score": 264630.09864881326 }, { "content": "pub fn memzero(x: &mut [u8]) {\n\n unsafe { libsodium_sys::sodium_memzero(x.as_mut_ptr() as *mut _, x.len()); }\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 19, "score": 264630.09864881326 }, { "content": "/// Attempt to read an array as `ValueRef`.\n\npub fn read_array_ref<'a>(buf: &mut &'a [u8]) -> crate::Result<Vec<ValueRef<'a>>> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Array(len) = marker {\n\n let mut v = Vec::with_capacity(len);\n\n for _i in 0..len {\n\n v.push(read_value_ref(buf)?);\n\n }\n\n Ok(v)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected array\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 20, "score": 261098.9107756379 }, { "content": "/// Attempt to read a u8 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a u8.\n\npub fn read_u8(buf: &mut &[u8]) -> crate::Result<u8> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n NumCast::from(int.as_u64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value was negative\"))?)\n\n .ok_or(Error::FailValidate(fail_len, \"Value couldn't be represented as u8\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 21, "score": 259913.80971731694 }, { "content": "pub fn decompress(dctx: &mut DCtx, max_size: usize, extra_size: usize, buf: &[u8], decode: &mut Vec<u8>) -> crate::Result<()> {\n\n // Decompress the data\n\n // Find the expected size, and fail if it's larger than the maximum allowed size.\n\n let decode_len = decode.len();\n\n let expected_len = get_frame_content_size(buf);\n\n // First check if expected_len is above size on its own\n\n if expected_len >= (max_size as u64) {\n\n return Err(Error::BadSize);\n\n }\n\n if (decode_len+extra_size+(expected_len as usize)) >= max_size {\n\n return Err(Error::BadSize);\n\n }\n\n let expected_len = expected_len as usize;\n\n decode.reserve(expected_len);\n\n unsafe {\n\n decode.set_len(decode_len + expected_len);\n\n let len = decompress_dctx(\n\n dctx,\n\n &mut decode[decode_len..],\n\n buf\n\n ).map_err(|_| Error::FailDecompress)?;\n\n decode.set_len(decode_len + len);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "old/zstd_help.rs", "rank": 22, "score": 255782.09534253844 }, { "content": "pub fn print_error(buf: &[u8], err: crate::Error) {\n\n let (offset, err) = match err {\n\n Error::BadEncode(offset, err) => (offset, err),\n\n Error::FailValidate(offset, err) => (offset, err),\n\n Error::ParseLimit(offset, err) => (offset, err),\n\n _ => (0, \"no error in the raw data\")\n\n };\n\n\n\n let mut buf: &[u8] = buf;\n\n // Error is only for indicating when we're done parsing and can stop, so we don't need it here\n\n let _ = print_error_internal(&mut buf, offset+1, err, 0);\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 23, "score": 255136.08874638606 }, { "content": "pub fn train_dict(dict_size: usize, samples: Vec<Vec<u8>>) -> Result<Vec<u8>, usize> {\n\n let sizes = samples.iter().map(|x| x.len()).collect::<Vec<usize>>();\n\n let mut buffer = Vec::with_capacity(sizes.iter().sum());\n\n for sample in samples.iter() {\n\n buffer.extend_from_slice(sample);\n\n }\n\n\n\n let mut dict = vec![0u8; dict_size];\n\n match zstd_safe::train_from_buffer(&mut dict[..], &buffer[..], &sizes[..]) {\n\n Ok(size) => {\n\n dict.resize(size, 0u8);\n\n Ok(dict)\n\n },\n\n Err(e) => {\n\n Err(e)\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "old/zstd_help.rs", "rank": 24, "score": 254132.72786263018 }, { "content": "/// Encode an Entry for later decoding into a query.\n\n///\n\n/// An [`Entry`] can be used to describe a query to be made against a [`Document`], where the \n\n/// Entry's parent document is the document to be queried, and the field is the specific entry type \n\n/// to be queried for.\n\n///\n\n/// [`Entry`]: ./struct.Entry.html\n\n/// [`Document`]: ./struct.Document.html\n\npub fn encode_query(entry: Entry) -> Vec<u8> {\n\n let mut buf = Vec::new();\n\n entry.doc_hash().encode(&mut buf);\n\n encode::write_value(&mut buf, &Value::from(entry.field()));\n\n buf.extend_from_slice(entry.raw_entry());\n\n buf\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use {Schema, Entry, Document};\n\n\n\n fn simple_schema() -> Schema {\n\n let schema: Value = fogpack!({\n\n \"req\": {\n\n \"title\": { \"type\": \"Str\", \"max_len\": 200 },\n\n \"text\": { \"type\": \"Str\" },\n\n },\n\n \"entries\": {\n", "file_path": "old/query.rs", "rank": 25, "score": 251048.5571742762 }, { "content": "/// Read a positive integer straight out of the stream. The size of the integer should be known from the \n\n/// fogpack marker that was used. If the marker contained the integer, it should be included as `v`.\n\npub fn read_pos_int(buf: &mut &[u8], len: usize, v: u8) -> crate::Result<Integer> {\n\n let fail_len = buf.len();\n\n match len {\n\n 0 => Ok(v.into()),\n\n 1 => {\n\n let v = buf.read_u8()?;\n\n if v > 127 {\n\n Ok(v.into())\n\n }\n\n else {\n\n Err(not_shortest(fail_len))\n\n }\n\n },\n\n 2 => {\n\n let v = buf.read_u16::<BigEndian>()?;\n\n if v > (std::u8::MAX as u16) {\n\n Ok(v.into())\n\n }\n\n else {\n\n Err(not_shortest(fail_len))\n", "file_path": "old/decode.rs", "rank": 26, "score": 247865.70313949202 }, { "content": "/// Attempt to read binary data.\n\npub fn read_bin<'a>(buf: &mut &'a [u8]) -> crate::Result<&'a [u8]> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Binary(len) = marker {\n\n read_raw_bin(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected binary data\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 27, "score": 243153.2523471963 }, { "content": "/// Finds the schema hash for a raw, encoded document. Fails if raw data doesn't fit the document \n\n/// format, or if the empty field (\"\") doesn't contain a Hash. If there is no schema, `None` is \n\n/// returned.\n\n///\n\n/// This function is primarily meant for finding what schema to use for decoding of a byte vector \n\n/// into a document.\n\n///\n\n/// # Examples\n\n///\n\n/// Basic Usage, assuming a HashMap of schemas is available:\n\n///\n\n/// ```\n\n/// # use fog_pack::*;\n\n/// # use std::collections::HashMap;\n\n/// # use std::io;\n\n/// # fn decode_doc(\n\n/// # no_schema: &mut NoSchema,\n\n/// # schema_db: &mut HashMap<Hash, Schema>,\n\n/// # buffer: &[u8]\n\n/// # )\n\n/// # -> fog_pack::Result<Document> {\n\n///\n\n/// let schema_hash = extract_schema_hash(&buffer)?;\n\n/// if let Some(schema_hash) = schema_hash {\n\n/// if let Some(schema) = schema_db.get_mut(&schema_hash) {\n\n/// let mut buf: &[u8] = buffer;\n\n/// schema.decode_doc(&mut buf)\n\n/// }\n\n/// else {\n\n/// Err(Error::FailValidate(0, \"Don't have schema\"))\n\n/// }\n\n/// }\n\n/// else {\n\n/// no_schema.decode_doc(&mut &buffer[..])\n\n/// }\n\n/// # }\n\n/// ```\n\npub fn extract_schema_hash(buf: &[u8]) -> crate::Result<Option<Hash>> {\n\n let mut buf: &[u8] = buf;\n\n let compressed = CompressType::decode(&mut buf)?;\n\n buf = &buf[3..]; // Skip past the 3 bytes indicating the document size\n\n match compressed {\n\n CompressType::CompressedNoSchema => Ok(None),\n\n CompressType::Uncompressed | CompressType::Compressed | CompressType::DictCompressed \n\n => parse_schema_hash(&mut buf),\n\n }\n\n}\n\n\n\n/// Parses the schema hash and advances the slice pointer past the hash. Used when we already \n\n/// parsed the compression type and want to try reading the schema hash\n\npub(crate) fn parse_schema_hash(buf: &mut &[u8]) -> crate::Result<Option<Hash>> {\n\n // Get the object tag & number of field/value pairs it has\n\n let obj_len = if let MarkerType::Object(len) = decode::read_marker(buf)? {\n\n len\n\n }\n\n else {\n\n return Err(Error::BadEncode(buf.len(), \"Raw document isn't a fogpack object\"));\n", "file_path": "old/document.rs", "rank": 28, "score": 242608.15105489257 }, { "content": "fn write_string(buf: &mut Vec<u8>, val: &str) {\n\n let len = val.len() as u32;\n\n if len <= 31 { buf.push(Marker::FixStr(len as u8).into());\n\n }\n\n else if len <= std::u8::MAX as u32 {\n\n buf.push(Marker::Str8.into());\n\n buf.push(len as u8);\n\n }\n\n else if len <= std::u16::MAX as u32 {\n\n buf.push(Marker::Str16.into());\n\n buf.extend_from_slice(&(len as u16).to_be_bytes());\n\n }\n\n else {\n\n buf.push(Marker::Str32.into());\n\n buf.extend_from_slice(&len.to_be_bytes());\n\n }\n\n buf.extend_from_slice(val.as_bytes());\n\n}\n\n\n", "file_path": "old/encode.rs", "rank": 29, "score": 242595.61194007925 }, { "content": "pub fn read_null(buf: &mut &[u8]) -> crate::Result<()> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Null = marker {\n\n Ok(())\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected null\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 30, "score": 240474.42284562465 }, { "content": "pub fn dict_decompress(dctx: &mut DCtx, dict: &DDict, max_size: usize, extra_size: usize, buf: &[u8], decode: &mut Vec<u8>) -> crate::Result<()> {\n\n // Decompress the data\n\n // Find the expected size, and fail if it's larger than the maximum allowed size.\n\n let decode_len = decode.len();\n\n let expected_len = get_frame_content_size(buf);\n\n if expected_len >= (max_size as u64) {\n\n return Err(Error::BadSize);\n\n }\n\n if (decode_len+extra_size+(expected_len as usize)) >= max_size {\n\n return Err(Error::BadSize);\n\n }\n\n let expected_len = expected_len as usize;\n\n decode.reserve(expected_len);\n\n unsafe {\n\n decode.set_len(decode_len + expected_len);\n\n let len = decompress_using_ddict(\n\n dctx,\n\n &mut decode[decode_len..],\n\n buf,\n\n dict\n\n ).map_err(|_| Error::FailDecompress)?;\n\n decode.set_len(decode_len + len);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "old/zstd_help.rs", "rank": 31, "score": 239665.57344044413 }, { "content": "/// Attempt to get the schema for a raw document. Fails if the raw byte slice doesn't conform to \n\n/// the right format, or if the hash is invalid.\n\npub fn get_doc_schema(doc: &[u8]) -> Result<Option<Hash>> {\n\n let hash_raw = SplitDoc::split(doc)?.hash_raw;\n\n if hash_raw.is_empty() {\n\n Ok(None)\n\n }\n\n else {\n\n Ok(Some(hash_raw.try_into()?))\n\n }\n\n}\n\n\n\n// Header format:\n\n// 1. Compression Type marker\n\n// 2. If schema is used: one byte indicating length of hash (must be 127 or\n\n// lower), then the schema hash.\n\n// 3. 3-byte length of data\n\n// 4. The data\n\n// 5. The optional signature\n\n//\n\n// If compressed, only the data portion is compressed, and the 3-byte length is updated\n\n// accordingly\n", "file_path": "src/document.rs", "rank": 32, "score": 238015.60675384704 }, { "content": "/// Converts a slice of bytes to a string slice. Works exactly like `std::str::from_utf8` except \n\n/// that it counts the number of unicode code points.\n\npub fn from_utf8(v: &[u8]) -> Result<(usize, &str), Utf8Error> {\n\n let count = run_utf8_validation(v)?;\n\n Ok((count, unsafe { str::from_utf8_unchecked(v) }))\n\n}\n\n\n", "file_path": "old/str_char.rs", "rank": 33, "score": 233120.5251417713 }, { "content": "/// Decode a MessagePack value. Decoding will fail if the value isn't in \n\n/// condense-db canonical form. That is:\n\n/// - All types are encoded in as few bytes as possible\n\n/// - Positive integers are always encoded using UInt types\n\n/// - Map types always have unique strings as keys\n\n/// - Maps are ordered lexicographically\n\n/// - Strings are valid UTF-8\n\npub fn read_value(buf: &mut &[u8]) -> crate::Result<Value> {\n\n let marker = read_marker(buf)?;\n\n Ok(match marker {\n\n MarkerType::Null => Value::Null,\n\n MarkerType::Boolean(v) => Value::Boolean(v),\n\n MarkerType::NegInt((len, v)) => Value::Integer(read_neg_int(buf, len, v)?),\n\n MarkerType::PosInt((len, v)) => Value::Integer(read_pos_int(buf, len, v)?),\n\n MarkerType::String(len) => Value::String(read_raw_str(buf, len)?.to_string()),\n\n MarkerType::F32 => Value::F32(buf.read_f32::<BigEndian>()?),\n\n MarkerType::F64 => Value::F64(buf.read_f64::<BigEndian>()?),\n\n MarkerType::Binary(len) => Value::Binary(read_raw_bin(buf, len)?.to_vec()),\n\n MarkerType::Array(len) => {\n\n let mut v = Vec::with_capacity(len);\n\n for _i in 0..len {\n\n v.push(read_value(buf)?);\n\n }\n\n Value::Array(v)\n\n },\n\n MarkerType::Object(len) => Value::Object(read_to_map(buf, len)?),\n\n MarkerType::Hash(len) => Value::Hash(read_raw_hash(buf, len)?),\n\n MarkerType::Identity(len) => Value::Identity(read_raw_id(buf, len)?),\n\n MarkerType::Lockbox(len) => Value::Lockbox(read_raw_lockbox(buf, len)?),\n\n MarkerType::Timestamp(len) => Value::Timestamp(read_raw_time(buf, len)?),\n\n })\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 34, "score": 232088.03680712287 }, { "content": "/// Verify a MessagePack value and return the number of bytes in it. Fails if the value isn't in \n\n/// condense-db canonical form. That is:\n\n/// - All types are encoded in as few bytes as possible\n\n/// - Positive integers are always encoded using UInt types\n\n/// - Map types always have unique strings as keys\n\n/// - Maps are ordered lexicographically\n\n/// - Strings are valid UTF-8\n\npub fn verify_value(buf: &mut &[u8]) -> crate::Result<usize> {\n\n let length = buf.len();\n\n let marker = read_marker(buf)?;\n\n match marker {\n\n MarkerType::NegInt((len, v)) => { read_neg_int(buf, len, v)?; },\n\n MarkerType::PosInt((len, v)) => { read_pos_int(buf, len, v)?; },\n\n MarkerType::String(len) => { read_raw_str(buf, len)?; },\n\n MarkerType::F32 => { buf.read_f32::<BigEndian>()?; },\n\n MarkerType::F64 => { buf.read_f64::<BigEndian>()?; },\n\n MarkerType::Binary(len) => { read_raw_bin(buf, len)?; },\n\n MarkerType::Array(len) => {\n\n for _i in 0..len {\n\n verify_value(buf)?;\n\n }\n\n },\n\n MarkerType::Object(len) => { verify_map(buf, len)?; },\n\n MarkerType::Hash(len) => { read_raw_hash(buf, len)?; },\n\n MarkerType::Identity(len) => { read_raw_id(buf, len)?; },\n\n MarkerType::Lockbox(len) => { read_raw_lockbox(buf, len)?; },\n\n MarkerType::Timestamp(len) => { read_raw_time(buf, len)?; },\n\n _ => (),\n\n }\n\n Ok(length - buf.len())\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 35, "score": 232088.00671372632 }, { "content": "/// Attempt to read a F32 from a fogpack data structure. Fails if invalid F32 retrieved.\n\npub fn read_f32(buf: &mut &[u8]) -> crate::Result<f32> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::F32 = marker {\n\n Ok(buf.read_f32::<BigEndian>()?)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected a f32\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 36, "score": 232084.16188948875 }, { "content": "/// Attempt to read a F32 from a fogpack data structure. Fails if invalid F64 retrieved.\n\npub fn read_f64(buf: &mut &[u8]) -> crate::Result<f64> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::F64 = marker {\n\n Ok(buf.read_f64::<BigEndian>()?)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected a f64\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 37, "score": 232084.1618894888 }, { "content": "/// Attempt to read a `Lockbox`.\n\npub fn read_lockbox(buf: &mut &[u8]) -> crate::Result<Lockbox> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Lockbox(len) = marker {\n\n read_raw_lockbox(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected Lockbox\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 38, "score": 232084.1618894888 }, { "content": "/// Attempt to read an integer from a fogpack data structure. Fails if an integer wasn't retrieved.\n\npub fn read_integer(buf: &mut &[u8]) -> crate::Result<Integer> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n match marker {\n\n MarkerType::PosInt((len, v)) => read_pos_int(buf, len, v),\n\n MarkerType::NegInt((len, v)) => read_neg_int(buf, len, v),\n\n _ => Err(Error::FailValidate(fail_len, \"Expected Integer\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 39, "score": 232084.16188948875 }, { "content": "/// Attempt to read a u64 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a u64.\n\npub fn read_u64(buf: &mut &[u8]) -> crate::Result<u64> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n int.as_u64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value was negative\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 40, "score": 232084.16188948875 }, { "content": "pub fn read_bool(buf: &mut &[u8]) -> crate::Result<bool> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Boolean(v) = marker {\n\n Ok(v)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected boolean\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 41, "score": 232084.1618894888 }, { "content": "/// Attempt to read a u16 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a u16.\n\npub fn read_u16(buf: &mut &[u8]) -> crate::Result<u16> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n NumCast::from(int.as_u64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value was negative\"))?)\n\n .ok_or(Error::FailValidate(fail_len, \"Value couldn't be represented as u16\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 42, "score": 232084.1618894888 }, { "content": "/// Attempt to read a i16 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a i16.\n\npub fn read_i16(buf: &mut &[u8]) -> crate::Result<i16> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n NumCast::from(int.as_i64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value bigger than i64 maximum\"))?)\n\n .ok_or(Error::FailValidate(fail_len, \"Value couldn't be represented as i16\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 43, "score": 232084.1618894888 }, { "content": "/// Attempt to read an `Identity`.\n\npub fn read_id(buf: &mut &[u8]) -> crate::Result<Identity> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Identity(len) = marker {\n\n read_raw_id(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected Identity\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 44, "score": 232084.1618894888 }, { "content": "/// Attempt to copy a string from a fogpack data structure. Fails if string wasn't present/valid.\n\npub fn read_string(buf: &mut &[u8]) -> crate::Result<String> {\n\n Ok(read_str(buf)?.to_string())\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 45, "score": 232084.16188948875 }, { "content": "/// Attempt to read a i8 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a i8.\n\npub fn read_i8(buf: &mut &[u8]) -> crate::Result<i8> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n NumCast::from(int.as_i64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value bigger than i64 maximum\"))?)\n\n .ok_or(Error::FailValidate(fail_len, \"Value couldn't be represented as i8\"))\n\n}\n\n\n\n\n", "file_path": "old/decode.rs", "rank": 46, "score": 232084.16188948875 }, { "content": "/// Attempt to read a i32 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a i32.\n\npub fn read_i32(buf: &mut &[u8]) -> crate::Result<i32> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n NumCast::from(int.as_i64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value bigger than i64 maximum\"))?)\n\n .ok_or(Error::FailValidate(fail_len, \"Value couldn't be represented as i32\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 47, "score": 232084.16188948875 }, { "content": "/// Read a fogpack marker, length, and/or extension type from a buffer.\n\npub fn read_marker(buf: &mut &[u8]) -> crate::Result<MarkerType> {\n\n let fail_len = buf.len();\n\n let marker = Marker::from_u8(buf.read_u8()?);\n\n Ok(match marker {\n\n Marker::PosFixInt(val) => MarkerType::PosInt((0,val)),\n\n Marker::FixMap(len) => MarkerType::Object(len as usize),\n\n Marker::FixStr(len) => MarkerType::String(len as usize),\n\n Marker::FixArray(len) => MarkerType::Array(len as usize),\n\n Marker::Nil => MarkerType::Null,\n\n Marker::False => MarkerType::Boolean(false),\n\n Marker::True => MarkerType::Boolean(true),\n\n Marker::Bin8 => {\n\n let len = buf.read_u8()? as usize;\n\n MarkerType::Binary(len)\n\n },\n\n Marker::Bin16 => {\n\n let len = buf.read_u16::<BigEndian>()? as usize;\n\n if len <= (std::u8::MAX as usize) { return Err(not_shortest(fail_len)); }\n\n MarkerType::Binary(len)\n\n },\n", "file_path": "old/decode.rs", "rank": 48, "score": 227958.00201008387 }, { "content": "/// Train a zstd dictionary from a sequence of documents.\n\n///\n\n/// Dictionaries can be limited to a maximum size. On failure, a zstd library error code is \n\n/// returned.\n\n///\n\n/// The zstd documentation recommends around 100 times as many input bytes as the desired \n\n/// dictionary size. It can be useful to check the resulting dictionary for overlearning - just \n\n/// dump the dictionary to a file and look for human-readable strings. These can occur when the \n\n/// dictionary is larger than necessary, and begins encoding the randomized portions of the \n\n/// Documents. In the future, this function may become smarter and get better at eliminating \n\n/// low-probability dictionary items.\n\npub fn train_doc_dict(max_size: usize, docs: Vec<Document>) -> Result<Vec<u8>, usize> {\n\n let samples = docs\n\n .iter()\n\n .map(|doc| {\n\n // We can call unwrap below because all Documents should already have vetted that:\n\n // 1) The raw document contains an object\n\n // 2) The object keys are strings\n\n // 3) The empty string field has a hash as the value\n\n let mut buf: &[u8] = &doc.raw_doc()[4..doc.doc_len()];\n\n let obj_len = decode::read_marker(&mut buf).unwrap();\n\n // Marker is always an object, we're just checking to see if it's empty\n\n if let MarkerType::Object(0) = obj_len {\n\n Vec::from(buf)\n\n }\n\n else {\n\n // Document might contain a schema already. Skip over it.\n\n let mut buf2: &[u8] = buf;\n\n let field = decode::read_str(&mut buf2).unwrap();\n\n if !field.is_empty() {\n\n // Wasn't a schema, use the first parsed field along with everything else\n", "file_path": "old/document.rs", "rank": 49, "score": 226644.22827986765 }, { "content": "/// Train a zstd dictionary from a sequence of entries.\n\n///\n\n/// Dictionaries can be limited to a maximum size. On failure, a zstd library error code is \n\n/// returned.\n\n///\n\n/// The zstd documentation recommends around 100 times as many input bytes as the desired \n\n/// dictionary size. It can be useful to check the resulting dictionary for overlearning - just \n\n/// dump the dictionary to a file and look for human-readable strings. These can occur when the \n\n/// dictionary is larger than necessary, and begins encoding the randomized portions of the \n\n/// Entries. In the future, this function may become smarter and get better at eliminating \n\n/// low-probability dictionary items.\n\npub fn train_entry_dict(max_size: usize, entries: Vec<Entry>) -> Result<Vec<u8>, usize> {\n\n let samples = entries\n\n .iter()\n\n .map(|entry| Vec::from(entry.entry_val()))\n\n .collect::<Vec<Vec<u8>>>();\n\n zstd_help::train_dict(max_size, samples)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::crypto::{Vault, PasswordLevel};\n\n\n\n fn test_entry() -> Entry {\n\n let test: Value = fogpack!(vec![0u8, 1u8, 2u8]);\n\n Entry::new(Hash::new_empty(), String::from(\"\"), test).expect(\"Should be able to make entry\")\n\n }\n\n\n\n fn prep_vault() -> (Vault, Key) {\n\n let mut vault = Vault::new_from_password(PasswordLevel::Interactive, \"test\".to_string())\n", "file_path": "old/entry.rs", "rank": 50, "score": 226644.22827986765 }, { "content": "/// Read a negative integer straight out of the stream. The size of the integer should be known from the \n\n/// fogpack marker that was used. If the marker contained the integer, it should be included as `v`.\n\npub fn read_neg_int(buf: &mut &[u8], len: usize, v: i8) -> crate::Result<Integer> {\n\n let fail_len = buf.len();\n\n match len {\n\n 0 => Ok(v.into()),\n\n 1 => {\n\n let v = buf.read_i8()?;\n\n if v < -32 {\n\n Ok(v.into())\n\n }\n\n else if v >= 0 {\n\n Err(not_negative(fail_len))\n\n }\n\n else {\n\n Err(not_shortest(fail_len))\n\n }\n\n },\n\n 2 => {\n\n let v = buf.read_i16::<BigEndian>()?;\n\n if v < (std::i8::MIN as i16) {\n\n Ok(v.into())\n", "file_path": "old/decode.rs", "rank": 51, "score": 226407.413575174 }, { "content": "/// General function for referencing binary data in a buffer. Checks for if the \n\n/// length is greater than remaining bytes in the buffer.\n\npub fn read_raw_bin<'a>(buf: &mut &'a [u8], len: usize) -> crate::Result<&'a [u8]> {\n\n let fail_len = buf.len();\n\n if buf.len() >= len {\n\n let (data, rem) = buf.split_at(len);\n\n *buf = rem;\n\n Ok(data)\n\n }\n\n else {\n\n Err(Error::BadEncode(fail_len, \"Binary length larger than amount of data\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 52, "score": 225863.184369318 }, { "content": "// Does in-place decryption of crypt and returns true if verification succeeds\n\npub fn aead_decrypt(crypt: &mut [u8], ad: &[u8], tag: &[u8],n: &Nonce, k: &SecretKey) -> bool {\n\n 0 <= unsafe {\n\n libsodium_sys::crypto_aead_xchacha20poly1305_ietf_decrypt_detached(\n\n crypt.as_mut_ptr(),\n\n ptr::null_mut(),\n\n crypt.as_ptr(),\n\n crypt.len() as c_ulonglong,\n\n tag.as_ptr(),\n\n ad.as_ptr(),\n\n ad.len() as c_ulonglong,\n\n n.0.as_ptr(),\n\n k.0.as_ptr()\n\n )\n\n }\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 53, "score": 225438.29927788748 }, { "content": "// Does in-place encryption of message and returns HMAC Tag\n\npub fn aead_encrypt(message: &mut [u8], ad: &[u8], n: &Nonce, k: &SecretKey) -> Tag {\n\n // tag will store the message authentication tag\n\n let mut tag = Tag([0; TAG_BYTES]);\n\n unsafe {\n\n libsodium_sys::crypto_aead_xchacha20poly1305_ietf_encrypt_detached(\n\n message.as_mut_ptr(),\n\n tag.0.as_mut_ptr(),\n\n ptr::null_mut(),\n\n message.as_ptr(),\n\n message.len() as c_ulonglong,\n\n ad.as_ptr(),\n\n ad.len() as c_ulonglong, \n\n ptr::null_mut(),\n\n n.0.as_ptr(),\n\n k.0.as_ptr()\n\n );\n\n }\n\n tag\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 54, "score": 225166.45325620077 }, { "content": "pub fn get_raw_array(raw: &mut &[u8], len: usize) -> crate::Result<Box<[u8]>> {\n\n let start: &[u8] = raw;\n\n for _ in 0..len {\n\n verify_value(raw)?;\n\n }\n\n let (array, _) = start.split_at(start.len()-raw.len());\n\n Ok(array.to_vec().into_boxed_slice())\n\n}\n\n\n\n\n\n\n", "file_path": "old/validator/array.rs", "rank": 55, "score": 224824.39058360786 }, { "content": "/// Attempt to read a str from a fogpack data structure. Fails if str wasn't present/valid.\n\npub fn read_str<'a>(buf: &mut &'a [u8]) -> crate::Result<&'a str> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::String(len) = marker {\n\n read_raw_str(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected a string\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 56, "score": 221787.9621502835 }, { "content": "#[inline]\n\nfn u8_is_zero(v: &u8) -> bool {\n\n *v == 0\n\n}\n\n\n", "file_path": "src/schema.rs", "rank": 57, "score": 219479.5939026709 }, { "content": "fn compress_entry(entry: Vec<u8>, compression: &Compress) -> Vec<u8> {\n\n // Skip if we aren't compressing\n\n if let Compress::None = compression {\n\n return entry;\n\n }\n\n\n\n // Gather info from the raw entry\n\n let split = SplitEntry::split(&entry).unwrap();\n\n let max_len = zstd_safe::compress_bound(split.data.len());\n\n let mut compress = Vec::with_capacity(entry.len() + max_len - split.data.len());\n\n compress.extend_from_slice(&entry[..ENTRY_PREFIX_LEN]);\n\n\n\n // Compress, update the header, append the signature\n\n match compression.compress(compress, split.data) {\n\n Ok(mut compress) => {\n\n let data_len = (compress.len() - ENTRY_PREFIX_LEN).to_le_bytes();\n\n compress[0] = CompressType::type_of(compression).into();\n\n compress[1] = data_len[0];\n\n compress[2] = data_len[1];\n\n compress.extend_from_slice(split.signature_raw);\n\n compress\n\n }\n\n Err(()) => entry,\n\n }\n\n}\n\n\n", "file_path": "src/schema.rs", "rank": 58, "score": 217912.90962479403 }, { "content": "fn compress_doc(doc: Vec<u8>, compression: &Compress) -> Vec<u8> {\n\n // Skip if we aren't compressing\n\n if let Compress::None = compression {\n\n return doc;\n\n }\n\n\n\n // Gather info from the raw document\n\n let split = SplitDoc::split(&doc).unwrap();\n\n let header_len = doc.len() - split.data.len() - split.signature_raw.len();\n\n let max_len = zstd_safe::compress_bound(split.data.len());\n\n let mut compress = Vec::with_capacity(doc.len() + max_len - split.data.len());\n\n compress.extend_from_slice(&doc[..header_len]);\n\n\n\n // Compress, update the header, append the signature\n\n match compression.compress(compress, split.data) {\n\n Ok(mut compress) => {\n\n let data_len = (compress.len() - header_len).to_le_bytes();\n\n compress[0] = CompressType::type_of(compression).into();\n\n compress[header_len - 3] = data_len[0];\n\n compress[header_len - 2] = data_len[1];\n\n compress[header_len - 1] = data_len[2];\n\n compress.extend_from_slice(split.signature_raw);\n\n compress\n\n }\n\n Err(()) => doc,\n\n }\n\n}\n\n\n", "file_path": "src/schema.rs", "rank": 59, "score": 217912.90962479403 }, { "content": "#[inline]\n\nfn u32_is_zero(v: &u32) -> bool {\n\n *v == 0\n\n}\n\n\n", "file_path": "src/validator/array.rs", "rank": 60, "score": 217755.1826422938 }, { "content": "#[inline]\n\nfn u32_is_zero(v: &u32) -> bool {\n\n *v == 0\n\n}\n\n\n", "file_path": "src/validator/str.rs", "rank": 61, "score": 217755.1826422938 }, { "content": "#[inline]\n\nfn u32_is_zero(v: &u32) -> bool {\n\n *v == 0\n\n}\n\n\n", "file_path": "src/validator/map.rs", "rank": 62, "score": 217755.1826422938 }, { "content": "#[inline]\n\nfn u32_is_zero(v: &u32) -> bool {\n\n *v == 0\n\n}\n\n\n", "file_path": "src/validator/lockbox.rs", "rank": 63, "score": 217755.1826422938 }, { "content": "#[inline]\n\nfn u32_is_zero(v: &u32) -> bool {\n\n *v == 0\n\n}\n\n\n", "file_path": "src/validator/bin.rs", "rank": 64, "score": 217755.1826422938 }, { "content": "/// Step through every field/value pair in an object\n\npub fn object_iterate<'a, F>(buf: &mut &'a [u8], len: usize, mut f: F) -> crate::Result<()>\n\n where F: FnMut(&'a str, &mut &'a [u8]) -> crate::Result<()>\n\n{\n\n if len == 0 { return Ok(()); }\n\n let mut old_field = read_str(buf)?;\n\n f(old_field, buf)?;\n\n let mut field: &str;\n\n for _ in 1..len {\n\n let fail_len = buf.len();\n\n field = read_str(buf)?;\n\n match old_field.cmp(&field) {\n\n Ordering::Less => {\n\n // old_field is lower in order. This is correct\n\n f(field, buf)?;\n\n },\n\n Ordering::Equal => {\n\n return Err(Error::BadEncode(fail_len, \"Object has non-unique field\"));\n\n },\n\n Ordering::Greater => {\n\n return Err(Error::BadEncode(fail_len, \"Object fields not in lexicographic order\"));\n\n },\n\n }\n\n old_field = field;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 65, "score": 217412.10032231497 }, { "content": "/// General function for verifying a field-value map in a buffer. Makes sure the keys are unique, \n\n/// valid UTF-8 Strings in lexicographic order.\n\npub fn verify_map(buf: &mut &[u8], len: usize) -> crate::Result<usize> {\n\n let length = buf.len();\n\n object_iterate(buf, len, |_, buf| { verify_value(buf)?; Ok(()) })?;\n\n Ok(length - buf.len())\n\n}\n\n\n\n\n", "file_path": "old/decode.rs", "rank": 66, "score": 216962.68565653212 }, { "content": "/// Decode a MessagePack value without copying binary data or strings. Decoding will fail if the \n\n/// value isn't in condense-db canonical form. That is:\n\n/// - All types are encoded in as few bytes as possible\n\n/// - Positive integers are always encoded using UInt types\n\n/// - Map types always have unique strings as keys\n\n/// - Maps are ordered lexicographically\n\n/// - Strings are valid UTF-8\n\npub fn read_value_ref<'a>(buf: &mut &'a [u8]) -> crate::Result<ValueRef<'a>> {\n\n let marker = read_marker(buf)?;\n\n Ok(match marker {\n\n MarkerType::Null => ValueRef::Null,\n\n MarkerType::Boolean(v) => ValueRef::Boolean(v),\n\n MarkerType::NegInt((len, v)) => ValueRef::Integer(read_neg_int(buf, len, v)?),\n\n MarkerType::PosInt((len, v)) => ValueRef::Integer(read_pos_int(buf, len, v)?),\n\n MarkerType::String(len) => ValueRef::String(read_raw_str(buf, len)?),\n\n MarkerType::F32 => ValueRef::F32(buf.read_f32::<BigEndian>()?),\n\n MarkerType::F64 => ValueRef::F64(buf.read_f64::<BigEndian>()?),\n\n MarkerType::Binary(len) => ValueRef::Binary(read_raw_bin(buf, len)?),\n\n MarkerType::Array(len) => {\n\n let mut v = Vec::with_capacity(len);\n\n for _i in 0..len {\n\n v.push(read_value_ref(buf)?);\n\n }\n\n ValueRef::Array(v)\n\n },\n\n MarkerType::Object(len) => ValueRef::Object(read_to_map_ref(buf, len)?),\n\n MarkerType::Hash(len) => ValueRef::Hash(read_raw_hash(buf, len)?),\n\n MarkerType::Identity(len) => ValueRef::Identity(read_raw_id(buf, len)?),\n\n MarkerType::Lockbox(len) => ValueRef::Lockbox(read_raw_lockbox(buf, len)?),\n\n MarkerType::Timestamp(len) => ValueRef::Timestamp(read_raw_time(buf, len)?),\n\n })\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 67, "score": 213784.82425235 }, { "content": "/// Read raw Identity out from a buffer\n\npub fn read_raw_id(buf: &mut &[u8], len: usize) -> crate::Result<Identity> {\n\n let fail_len = buf.len();\n\n let id = Identity::decode(buf)?;\n\n if id.size() != len {\n\n Err(Error::BadEncode(fail_len, \"Identity type has invalid size\"))\n\n }\n\n else {\n\n Ok(id)\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 68, "score": 213306.16226976423 }, { "content": "/// Read raw lockbox data out from a buffer\n\npub fn read_raw_lockbox(buf: &mut &[u8], len: usize) -> crate::Result<Lockbox> {\n\n Ok(Lockbox::decode(len, buf)?)\n\n}\n\n\n\n\n", "file_path": "old/decode.rs", "rank": 69, "score": 213306.16226976423 }, { "content": "fn decompress_doc(compress: Vec<u8>, compression: &Compress) -> Result<Vec<u8>> {\n\n // Gather info from compressed vec\n\n let split = SplitDoc::split(&compress)?;\n\n let marker = CompressType::try_from(split.compress_raw)\n\n .map_err(|m| Error::BadHeader(format!(\"unrecognized compression marker 0x{:x}\", m)))?;\n\n if let CompressType::None = marker {\n\n return Ok(compress);\n\n }\n\n let header_len = compress.len() - split.data.len() - split.signature_raw.len();\n\n\n\n // Decompress, update the header, append the signature\n\n let mut doc = Vec::new();\n\n doc.extend_from_slice(&compress[..header_len]);\n\n let mut doc = compression.decompress(\n\n doc,\n\n split.data,\n\n marker,\n\n split.signature_raw.len(),\n\n MAX_DOC_SIZE,\n\n )?;\n\n let data_len = (doc.len() - header_len).to_le_bytes();\n\n doc[0] = CompressType::None.into();\n\n doc[header_len - 3] = data_len[0];\n\n doc[header_len - 2] = data_len[1];\n\n doc[header_len - 1] = data_len[2];\n\n doc.extend_from_slice(split.signature_raw);\n\n Ok(doc)\n\n}\n\n\n", "file_path": "src/schema.rs", "rank": 70, "score": 212291.2853635086 }, { "content": "fn decompress_entry(compress: Vec<u8>, compression: &Compress) -> Result<Vec<u8>> {\n\n // Gather info from compressed vec\n\n let split = SplitEntry::split(&compress)?;\n\n let marker = CompressType::try_from(split.compress_raw)\n\n .map_err(|m| Error::BadHeader(format!(\"unrecognized compression marker 0x{:x}\", m)))?;\n\n if let CompressType::None = marker {\n\n return Ok(compress);\n\n }\n\n\n\n // Decompress, update the header, append the signature\n\n let mut entry = Vec::new();\n\n entry.extend_from_slice(&compress[..ENTRY_PREFIX_LEN]);\n\n let mut entry = compression.decompress(\n\n entry,\n\n split.data,\n\n marker,\n\n split.signature_raw.len(),\n\n MAX_ENTRY_SIZE,\n\n )?;\n\n let data_len = (entry.len() - ENTRY_PREFIX_LEN).to_le_bytes();\n", "file_path": "src/schema.rs", "rank": 71, "score": 212291.2853635086 }, { "content": "/// Attempt to read an object as `Value`.\n\npub fn read_object(buf: &mut &[u8]) -> crate::Result<BTreeMap<String, Value>> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Object(len) = marker {\n\n read_to_map(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected object\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 72, "score": 209854.9416475615 }, { "content": "/// General function for referencing a UTF-8 string in a buffer. Checks for if the \n\n/// length is greater than remaining bytes in the buffer, or if the bytes \n\n/// received are not valid UTF-8.\n\npub fn read_raw_str<'a>(buf: &mut &'a [u8], len: usize) -> crate::Result<&'a str> {\n\n let fail_len = buf.len();\n\n if buf.len() >= len {\n\n let (data, rem) = buf.split_at(len);\n\n *buf = rem;\n\n let data = std::str::from_utf8(data)\n\n .map_err(|_| Error::BadEncode(fail_len, \"String wasn't valid UTF-8\"))?;\n\n Ok(data)\n\n }\n\n else {\n\n Err(Error::BadEncode(fail_len, \"String length larger than amount of data\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 73, "score": 204389.2470698928 }, { "content": "pub fn generate_vec<R: Rng, T: Generate>(rng: &mut R, range: ops::Range<usize>) -> Vec<T> {\n\n let len = rng.gen_range(range.start, range.end);\n\n let mut result = Vec::with_capacity(len);\n\n for _ in 0..len {\n\n result.push(T::generate(rng));\n\n }\n\n result\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]\n\npub struct Address {\n\n pub x0: u8,\n\n pub x1: u8,\n\n pub x2: u8,\n\n pub x3: u8,\n\n}\n\n\n\nimpl Generate for Address {\n\n fn generate<R: Rng>(rand: &mut R) -> Self {\n\n Self {\n", "file_path": "examples/logs.rs", "rank": 74, "score": 200592.07800921705 }, { "content": "pub fn generate_vec<R: Rng, T: Generate>(rng: &mut R, range: ops::Range<usize>) -> Vec<T> {\n\n let len = rng.gen_range(range.start, range.end);\n\n let mut result = Vec::with_capacity(len);\n\n for _ in 0..len {\n\n result.push(T::generate(rng));\n\n }\n\n result\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]\n\npub struct Address {\n\n pub x0: u8,\n\n pub x1: u8,\n\n pub x2: u8,\n\n pub x3: u8,\n\n}\n\n\n\nimpl Generate for Address {\n\n fn generate<R: Rng>(rand: &mut R) -> Self {\n\n Self {\n", "file_path": "benches/log.rs", "rank": 75, "score": 200592.07800921705 }, { "content": "/// General function for reading a field-value map from a buffer. Checks to make \n\n/// sure the keys are unique, valid UTF-8 Strings in lexicographic order.\n\npub fn read_to_map(buf: &mut &[u8], len: usize) -> crate::Result<BTreeMap<String, Value>> {\n\n\n\n let mut map: BTreeMap<String,Value> = BTreeMap::new();\n\n object_iterate(buf, len, |field, buf| {\n\n let val = read_value(buf)?;\n\n map.insert(field.to_string(), val);\n\n Ok(())\n\n })?;\n\n Ok(map)\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 76, "score": 197356.14479205976 }, { "content": "/// Attempt to read an object as `ValueRef`.\n\npub fn read_object_ref<'a>(buf: &mut &'a [u8]) -> crate::Result<BTreeMap<&'a str, ValueRef<'a>>> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Object(len) = marker {\n\n read_to_map_ref(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected object\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 77, "score": 191992.28781740554 }, { "content": "// Get an object's bytes, without the leading marker\n\nfn get_obj(raw: &mut &[u8]) -> crate::Result<Box<[u8]>> {\n\n let fail_len = raw.len();\n\n if let MarkerType::Object(len) = read_marker(raw)? {\n\n get_obj_raw(raw, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected objects in `in`/`nin` fields\"))\n\n }\n\n}\n\n\n", "file_path": "old/validator/object.rs", "rank": 78, "score": 191742.3357801903 }, { "content": "pub fn aead_keygen(key: &mut SecretKey) {\n\n unsafe { libsodium_sys::crypto_aead_xchacha20poly1305_ietf_keygen(key.0.as_mut_ptr()) };\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 79, "score": 186142.43599203683 }, { "content": "/// General function for referencing a field-value map in a buffer. Checks to make \n\n/// sure the keys are unique, valid UTF-8 Strings in lexicographic order.\n\npub fn read_to_map_ref<'a>(buf: &mut &'a [u8], len: usize) -> crate::Result<BTreeMap<&'a str, ValueRef<'a>>> {\n\n let mut map: BTreeMap<&'a str,ValueRef<'a>> = BTreeMap::new();\n\n object_iterate(buf, len, |field, buf| {\n\n let val = read_value_ref(buf)?;\n\n map.insert(field, val);\n\n Ok(())\n\n })?;\n\n Ok(map)\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 80, "score": 181559.98807244672 }, { "content": "fn print_error_internal(buf: &mut &[u8], offset: usize, err: &str, cur_indent: usize) -> Result<(),()> {\n\n\n\n if buf.len() <= offset {\n\n println!(\"Error: {}\", err);\n\n return Err(());\n\n }\n\n\n\n let marker = if let Ok(marker) = read_marker(buf) {\n\n marker\n\n }\n\n else {\n\n println!(\"Expected marker, but ran out of data\");\n\n return Err(());\n\n };\n\n\n\n match marker {\n\n MarkerType::Null => { print!(\"null\"); },\n\n MarkerType::Boolean(v) => { print!(\"{}\", v); },\n\n MarkerType::NegInt((len, v)) => {\n\n if let Ok(val) = read_neg_int(buf, len, v) {\n", "file_path": "old/decode.rs", "rank": 81, "score": 181394.53142004614 }, { "content": "#[inline]\n\nfn run_utf8_validation(v: &[u8]) -> Result<usize, Utf8Error> {\n\n let mut index = 0;\n\n let mut count = 0;\n\n let len = v.len();\n\n\n\n let usize_bytes = mem::size_of::<usize>();\n\n let ascii_block_size = 2 * usize_bytes;\n\n let blocks_end = if len >= ascii_block_size { len - ascii_block_size + 1 } else { 0 };\n\n\n\n while index < len {\n\n let old_offset = index;\n\n macro_rules! err {\n\n ($error_len: expr) => {\n\n return Err(Utf8Error {\n\n valid_up_to: old_offset,\n\n error_len: $error_len,\n\n })\n\n }\n\n }\n\n\n", "file_path": "old/str_char.rs", "rank": 82, "score": 180951.19733526866 }, { "content": "pub fn crypt_keypair(pk: &mut PublicCryptKey, sk: &mut SecretCryptKey) {\n\n unsafe { libsodium_sys::crypto_box_keypair(pk.0.as_mut_ptr(), sk.0.as_mut_ptr()) };\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 83, "score": 180665.213873507 }, { "content": "pub fn sign_keypair(pk: &mut PublicSignKey, sk: &mut SecretSignKey) {\n\n unsafe { libsodium_sys::crypto_sign_keypair(pk.0.as_mut_ptr(),sk.0.as_mut_ptr()) };\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 84, "score": 180665.21387350696 }, { "content": "#[inline]\n\nfn u32_is_max(v: &u32) -> bool {\n\n *v == u32::MAX\n\n}\n\n\n\n/// Validator for byte sequences.\n\n///\n\n/// This validator type will only pass binary values (a sequence of bytes). A binary sequence can\n\n/// also be treated as a little-endian arbitrary-length unsigned integer. Validation passes if:\n\n///\n\n/// - The bits set in `bits_clr` are cleared in the byte sequence.\n\n/// - The bits set in `bits_set` are set in the byte sequence.\n\n/// - If `max` has 1 or more bytes, the value is less than the maximum in `max`, or equal to it if\n\n/// `ex_max` is not set to true.\n\n/// - The value is greater than the minimum in `min`, or equal to it if `ex_min` is not set to true.\n\n/// - The value's length in bytes is less than or equal to the value in `max_len`.\n\n/// - The value's length in bytes is greater than or equal to the value in `min_len`.\n\n/// - If the `in` list is not empty, the value must be among the values in the list.\n\n/// - The value must not be among the values in the `nin` list.\n\n///\n\n/// # Defaults\n", "file_path": "src/validator/bin.rs", "rank": 85, "score": 178948.14352285524 }, { "content": "#[inline]\n\nfn u32_is_max(v: &u32) -> bool {\n\n *v == u32::MAX\n\n}\n\n\n", "file_path": "src/validator/str.rs", "rank": 86, "score": 178948.14352285524 }, { "content": "#[inline]\n\nfn u32_is_max(v: &u32) -> bool {\n\n *v == u32::MAX\n\n}\n\n\n\n/// Validator for arrays.\n\n///\n\n/// This validator type will only pass array values. Validation passes if:\n\n///\n\n/// - If the `in` list is not empty, the array must be among the arrays in the list.\n\n/// - The array must not be among the arrays in the `nin` list.\n\n/// - The arrays's length is less than or equal to the value in `max_len`.\n\n/// - The arrays's length is greater than or equal to the value in `min_len`.\n\n/// - If `unique` is true, the array items are all unique.\n\n/// - For each validator in the `contains` list, at least one item in the array passes.\n\n/// - Each item in the array is checked with a validator at the same index in the `prefix` array.\n\n/// All validators must pass. If there is no validator at the same index, the validator in\n\n/// `items` must pass. If a validator is not used, it passes automatially.\n\n///\n\n/// # Defaults\n\n///\n", "file_path": "src/validator/array.rs", "rank": 87, "score": 178948.14352285524 }, { "content": "#[inline]\n\nfn u32_is_max(v: &u32) -> bool {\n\n *v == u32::MAX\n\n}\n\n\n\nmacro_rules! lockbox_validator {\n\n ($t: ty, $e: ident, $v: ident, $link:expr, $name:expr) => {\n\n #[doc = \"Validator for a [`\"]\n\n #[doc = $name]\n\n #[doc = \"`][\"]\n\n #[doc = $link]\n\n #[doc = \"].\\n\\n\"]\n\n #[doc = \"This validator will only pass a \"]\n\n #[doc = $name]\n\n #[doc = \" value. Validation passes if:\\n\\n\"]\n\n #[doc = \"- The number of bytes in the lockbox is less than or equal to `max_len`\\n\"]\n\n #[doc = \"- The number of bytes in the lockbox is greater than or equal to `min_len`\\n\"]\n\n /// # Defaults\n\n ///\n\n /// Fields that aren't specified for the validator use their defaults instead. The defaults for\n\n /// each field are:\n", "file_path": "src/validator/lockbox.rs", "rank": 88, "score": 178948.14352285524 }, { "content": "#[inline]\n\nfn u32_is_max(v: &u32) -> bool {\n\n *v == u32::MAX\n\n}\n\n\n", "file_path": "src/validator/map.rs", "rank": 89, "score": 178948.14352285524 }, { "content": "// Get an object's bytes, after the leading marker has already been parsed\n\nfn get_obj_raw(raw: &mut &[u8], len: usize) -> crate::Result<Box<[u8]>> {\n\n let start: &[u8] = raw;\n\n verify_map(raw, len)?;\n\n let (obj, _) = start.split_at(start.len()-raw.len());\n\n Ok(obj.to_vec().into_boxed_slice())\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use encode;\n\n use value::Value;\n\n use crypto::Hash;\n\n use timestamp::Timestamp;\n\n use super::*;\n\n\n\n fn read_it(raw: &mut &[u8], is_query: bool) -> (usize, Vec<Validator>) {\n\n let mut types = Vec::new();\n\n types.push(Validator::Invalid);\n\n types.push(Validator::Valid);\n", "file_path": "old/validator/object.rs", "rank": 90, "score": 178377.8981742813 }, { "content": "pub fn sign_detached(k: &SecretSignKey, m: &[u8]) -> Sign {\n\n let mut sig: Sign = Default::default();\n\n unsafe { libsodium_sys::crypto_sign_ed25519_detached(\n\n sig.0.as_mut_ptr(),\n\n ptr::null_mut(),\n\n m.as_ptr(),\n\n m.len() as c_ulonglong,\n\n k.0.as_ptr());\n\n };\n\n sig\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 91, "score": 175958.75326560595 }, { "content": "// Shouldn't fail as long as the input parameters are valid\n\npub fn derive_id(k: &SecretKey, id: &mut StreamId) {\n\n unsafe {\n\n let ctx = CString::from_vec_unchecked(b\"fogpack\".to_vec());\n\n libsodium_sys::crypto_kdf_derive_from_key(id.0.as_mut_ptr(), id.0.len(), 1, ctx.as_ptr(), k.0.as_ptr());\n\n };\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 92, "score": 173038.22913057584 }, { "content": "pub fn key_from_id(version: u8, id: PublicSignKey) -> Key {\n\n Key { version, id }\n\n}\n\n\n", "file_path": "old/crypto/key.rs", "rank": 93, "score": 169957.70231730805 }, { "content": "pub fn identity_from_id(version: u8, id: PublicSignKey) -> Identity {\n\n Identity { version, id }\n\n}\n\n\n\n/// Signatures are used to authenticate a piece of data based on its hash.\n\n///\n\n/// One can be generated for a given [`Hash`] using a the [`sign`] function on a Vault. The \n\n/// versions of the underlying key and hash are stored, along with the identifying information of \n\n/// the key used.\n\n///\n\n/// It can be verified by providing the hash of the data. The signing identity can be determined \n\n/// from checking against the Identity returned by `signed_by`.\n\n///\n\n/// [`Hash`]: ./struct.Hash.html\n\n/// [`sign`]: ./struct.Vault.html#method.sign\n\n#[derive(Debug,PartialEq,Clone)]\n\npub struct Signature {\n\n id: Identity,\n\n hash_version: u8,\n\n sig: Sign,\n", "file_path": "old/crypto/key.rs", "rank": 94, "score": 169957.70231730805 }, { "content": "pub fn stream_from_id(version: u8, id: StreamId) -> StreamKey {\n\n StreamKey { version, id }\n\n}\n\n\n\nimpl fmt::Debug for StreamKey {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n write!(formatter, \"{} {{ ver={}, {:x?} }}\", stringify!(StreamKey), &self.version, &self.id.0[..])\n\n }\n\n}\n\n\n\n/// FullStreamKey: A secret XChaCha20 key, identifiable by its ID\n\n#[derive(Clone)]\n\npub struct FullStreamKey {\n\n version: u8,\n\n id: StreamId,\n\n key: SecretKey,\n\n}\n\n\n\nimpl FullStreamKey {\n\n \n", "file_path": "old/crypto/stream.rs", "rank": 95, "score": 169957.70231730805 }, { "content": "pub fn sign_seed_keypair(pk: &mut PublicSignKey, sk: &mut SecretSignKey, seed: &Seed) {\n\n unsafe { libsodium_sys::crypto_sign_seed_keypair(pk.0.as_mut_ptr(),sk.0.as_mut_ptr(), seed.0.as_ptr()) };\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 96, "score": 168828.1418004748 }, { "content": "pub fn ed25519_sk_to_seed(seed: &mut Seed, ed: &SecretSignKey) {\n\n unsafe { libsodium_sys::crypto_sign_ed25519_sk_to_seed(seed.0.as_mut_ptr(),ed.0.as_ptr()) };\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 97, "score": 167388.42184034613 }, { "content": "fn format_string(val: &str, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n if val.starts_with('<') {\n\n write!(f, \"\\\"<{}\\\"\", val)\n\n }\n\n else {\n\n write!(f, \"\\\"{}\\\"\", val)\n\n }\n\n}\n\n\n", "file_path": "old/value.rs", "rank": 98, "score": 167156.13168409193 }, { "content": "fn format_str(val: &str, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n if val.starts_with('<') {\n\n write!(f, \"\\\"<{}\\\"\", val)\n\n }\n\n else {\n\n write!(f, \"\\\"{}\\\"\", val)\n\n }\n\n}\n\n\n\nimpl Display for Value {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n self.pretty_print(0, f)\n\n }\n\n}\n\n\n\n/// ValueRef stores a reference to a decoded fog-pack byte sequence\n\n#[derive(Clone, Debug)]\n\npub enum ValueRef<'a> {\n\n Null,\n\n Boolean(bool),\n", "file_path": "old/value.rs", "rank": 99, "score": 167156.13168409193 } ]
Rust
src/main.rs
thehamsterjam/better_aws_sso
4a2fbee508c549dbba65fe17e7557fdb844581d3
use clap::{App, Arg}; use ini::Ini; use serde::Deserialize; use std::{thread, time}; use ureq::Response; use webbrowser; extern crate dirs; use std::time::{SystemTime, UNIX_EPOCH}; const GRANT_TYPE: &str = "urn:ietf:params:oauth:grant-type:device_code"; #[derive(Debug)] #[allow(non_snake_case)] struct SsoProfile { sso_profile_name : String, sso_start_url: String, sso_region : String, sso_account_id : String, sso_role_name : String } impl SsoProfile { fn new( sso_profile_name : String, sso_start_url: String, sso_region : String, sso_account_id : String, sso_role_name : String) -> SsoProfile { SsoProfile{ sso_profile_name, sso_start_url, sso_region, sso_account_id, sso_role_name } } } #[derive(Debug, Deserialize)] #[allow(non_snake_case)] struct RegisterClientResponse { clientId: String, clientIdIssuedAt: i32, clientSecret: String, clientSecretExpiresAt: i32, authorizationEndpoint: Option<String>, tokenEndpoint: Option<String>, } #[derive(Debug, Deserialize)] #[allow(non_snake_case)] struct StartDeviceAuthorizationResponse { deviceCode: String, expiresIn: i32, interval: i32, userCode: String, verificationUri: Option<String>, verificationUriComplete: String, } #[derive(Debug, Deserialize)] #[allow(non_snake_case)] struct CreateTokenResponse { accessToken: String, expiresIn: i32, idToken: Option<String>, refreshToken: Option<String>, tokenType: String, } #[derive(Debug, Deserialize)] #[allow(non_snake_case)] struct RoleCreds { accessKeyId: String, expiration: i64, secretAccessKey: String, sessionToken: String, } #[derive(Debug, Deserialize)] #[allow(non_snake_case)] struct GetRoleCredsResponse { roleCredentials: RoleCreds, } fn main() { static VERSION: &'static str = include_str!(concat!("", "version")); let matches = App::new("AWS SSO, but better") .version(VERSION) .author("Damien Maier") .author("Saves your SSO login credentials into the credentials file, so it can be used with things like terraform") .arg(Arg::with_name("profile") .short("p") .long("profile") .takes_value(true) .required(true) .help("AWS profile set up for SSO")) .arg(Arg::with_name("save_as_profile_name") .short("s") .long("save_as_profile_name") .help("Whether to save the credentials under the profile name with an _ at the end or under <account_id>_<role_name>")) .arg(Arg::with_name("all") .short("a") .long("all") .help("Get credentials for all profiles with the same start url as the specified profile")) .arg(Arg::with_name("verbose") .short("v") .long("verbose") .help("Print verbose logging")) .get_matches(); let profile = matches.value_of("profile").unwrap().to_owned(); let verbose = matches.is_present("verbose"); let save_as_profile_name = matches.is_present("save_as_profile_name"); let all = matches.is_present("all"); let home = dirs::home_dir().unwrap().to_str().unwrap().to_owned(); let sso_profiles = get_sso_profiles(profile, &home, all); let oidc_url = format!("https://oidc.{}.amazonaws.com", sso_profiles[0].sso_region); let sso_url = format!("https://portal.sso.{}.amazonaws.com", sso_profiles[0].sso_region); let register_client_resp = register_client(&oidc_url, verbose); let device_auth_resp = device_auth(&oidc_url, &sso_profiles[0].sso_start_url, &register_client_resp, verbose); let create_token_resp = create_token(&oidc_url, &register_client_resp, &device_auth_resp, verbose); for sso_profile in sso_profiles { let get_role_creds_resp = get_role_credentials( &sso_url, &sso_profile.sso_account_id, &sso_profile.sso_role_name, &create_token_resp, verbose, ); save_sso( &sso_profile.sso_profile_name, &sso_profile.sso_account_id, &sso_profile.sso_role_name, &get_role_creds_resp, &home, save_as_profile_name, verbose, ); } } fn get_sso_profiles(profile_name : String, home : &String, all : bool) -> Vec<SsoProfile> { let aws_conf = Ini::load_from_file(format!("{}{}", home, "/.aws/config")).unwrap(); let sso_start_url = aws_conf .get_from(Some(format!("{}", profile_name)), "sso_start_url") .unwrap().to_owned(); let sso_region = aws_conf .get_from(Some(format!("{}", profile_name)), "sso_region") .unwrap().to_owned(); let sso_account_id = aws_conf .get_from(Some(format!("{}", profile_name)), "sso_account_id") .unwrap().to_owned(); let sso_role_name = aws_conf .get_from(Some(format!("{}", profile_name)), "sso_role_name") .unwrap().to_owned(); if !all { return vec![SsoProfile::new(profile_name, sso_start_url, sso_region, sso_account_id, sso_role_name)] } else { let mut profiles = Vec::new(); for (section, properties) in aws_conf.iter() { if properties.contains_key("sso_start_url") { if properties.get("sso_start_url").unwrap() == sso_start_url { profiles.push(SsoProfile::new( section.unwrap().to_owned(), properties.get("sso_start_url").unwrap().to_owned(), properties.get("sso_region").unwrap().to_owned(), properties.get("sso_account_id").unwrap().to_owned(), properties.get("sso_role_name").unwrap().to_owned(), )) } } } return profiles; } } fn register_client(oidc_url: &String, verbose: bool) -> RegisterClientResponse { let start = SystemTime::now(); let since_the_epoch = start .duration_since(UNIX_EPOCH) .expect("Time went backwards") .as_secs(); let register_resp = ureq::post(format!("{}{}", oidc_url, "/client/register").as_str()) .set("Content-type", "application/json") .set("Action", "RegisterClient") .set("Version", "2019-06-10") .send_json(serde_json::json!({ "clientName" : format!("rustSSO-{}", since_the_epoch).as_str(), "clientType" : "public" })) .into_json_deserialize::<RegisterClientResponse>(); if verbose { println!("{:#?}", register_resp); } let register_resp_un = register_resp.unwrap(); if verbose { println!("{:#?}", register_resp_un); } register_resp_un } fn device_auth( oidc_url: &String, start_url: &String, register_resp: &RegisterClientResponse, verbose: bool, ) -> StartDeviceAuthorizationResponse { let device_auth_resp = ureq::post(format!("{}{}", oidc_url, "/device_authorization").as_str()) .set("Content-type", "application/json") .set("Action", "StartDeviceAuthorization") .set("Version", "2019-06-10") .send_json(serde_json::json!( { "clientId" : register_resp.clientId, "clientSecret" : register_resp.clientSecret, "startUrl" : start_url })) .into_json_deserialize::<StartDeviceAuthorizationResponse>(); if verbose { println!("{:#?}", device_auth_resp); } let device_auth_resp_un = device_auth_resp.unwrap(); if verbose { println!("{:#?}", device_auth_resp_un); } device_auth_resp_un } fn create_token( oidc_url: &String, register_resp: &RegisterClientResponse, device_auth_resp: &StartDeviceAuthorizationResponse, verbose: bool, ) -> CreateTokenResponse { if webbrowser::open(device_auth_resp.verificationUriComplete.as_str()).is_err() { println!( "Go to {}", device_auth_resp.verificationUriComplete.as_str() ); } let sec = time::Duration::from_secs(1); let create_tok_resp_un = loop { thread::sleep(sec); let create_tok_resp = ureq::post(format!("{}{}", oidc_url, "/token").as_str()) .set("Content-type", "application/json") .set("Action", "CreateToken") .set("Version", "2019-06-10") .send_json(serde_json::json!({ "clientId": register_resp.clientId, "clientSecret": register_resp.clientSecret, "deviceCode": device_auth_resp.deviceCode, "grantType": GRANT_TYPE, })); if verbose { println!("{:#?}", create_tok_resp); } if create_tok_resp.ok() { break create_tok_resp .into_json_deserialize::<CreateTokenResponse>() .unwrap(); } else { if verbose { println!("{:#?}", create_tok_resp.into_json()); } } }; if verbose { println!("{:#?}", create_tok_resp_un); } create_tok_resp_un } fn get_role_credentials( sso_url: &String, sso_account_id: &String, sso_role_name: &String, create_token_resp: &CreateTokenResponse, verbose: bool, ) -> GetRoleCredsResponse { let get_role_creds = ureq::get(format!("{}{}", sso_url, "/federation/credentials").as_str()) .query("account_id", sso_account_id) .query("role_name", sso_role_name) .set( "x-amz-sso_bearer_token", format!("{}", create_token_resp.accessToken).as_str(), ) .call() .into_json_deserialize::<GetRoleCredsResponse>(); if verbose { println!("{:#?}", get_role_creds); } let get_role_creds_un = get_role_creds.unwrap(); if verbose { println!("{:#?}", get_role_creds_un); } get_role_creds_un } fn save_sso( profile: &str, sso_account_id: &String, sso_role_name: &String, get_role_creds: &GetRoleCredsResponse, home_dir: &String, save_as_profile_name: bool, verbose: bool, ) { let section_name = if save_as_profile_name { format!("{}_", profile) } else { format!("{}_{}", sso_account_id, sso_role_name) }; let mut aws_creds = Ini::load_from_file(format!("{}{}", home_dir, "/.aws/credentials")).unwrap(); if verbose { println!("Section name : {}", section_name); } aws_creds .with_section(Some(section_name)) .set( "aws_access_key_id", get_role_creds.roleCredentials.accessKeyId.to_owned(), ) .set( "aws_secret_access_key", get_role_creds.roleCredentials.secretAccessKey.to_owned(), ) .set( "aws_session_token", get_role_creds.roleCredentials.sessionToken.to_owned(), ); aws_creds .write_to_file(format!("{}{}", home_dir, "/.aws/credentials")) .unwrap(); } fn _list_accounts(sso_url: String, access_token: String) -> Response { ureq::get(format!("{}{}", sso_url, "/assignment/accounts").as_str()) .query("max_result", "100") .set( "x-amz-sso_bearer_token", format!("{}", access_token).as_str(), ) .call() }
use clap::{App, Arg}; use ini::Ini; use serde::Deserialize; use std::{thread, time}; use ureq::Response; use webbrowser; extern crate dirs; use std::time::{SystemTime, UNIX_EPOCH}; const GRANT_TYPE: &str = "urn:ietf:params:oauth:grant-type:device_code"; #[derive(Debug)] #[allow(non_snake_case)] struct SsoProfile { sso_profile_name : String, sso_start_url: String, sso_region : String, sso_account_id : String, sso_role_name : String } impl SsoProfile { fn new( sso_profile_name : String, sso_start_url: String, sso_region : String, sso_account_id : String, sso_role_name : String) -> SsoProfile { SsoProfile{ sso_profile_name, sso_start_url, sso_region, sso_account_id, sso_role_name } } } #[derive(Debug, Deserialize)] #[allow(non_snake_case)] struct RegisterClientResponse { clientId: String, clientIdIssuedAt: i32, clientSecret: String, clientSecretExpiresAt: i32, authorizationEndpoint: Option<String>, tokenEndpoint: Option<String>, } #[derive(Debug, Deserialize)] #[allow(non_snake_case)] struct StartDeviceAuthorizationResponse { deviceCode: String, expiresIn: i32, interval: i32, userCode: String, verificationUri: Option<String>, verificationUriComplete: String, } #[derive(Debug, Deserialize)] #[allow(non_snake_case)] struct CreateTokenResponse { accessToken: String, expiresIn: i32, idToken: Option<String>, refreshToken: Option<String>, tokenType: String, } #[derive(Debug, Deserialize)] #[allow(non_snake_case)] struct RoleCreds { accessKeyId: String, expiration: i64, secretAccessKey: String, sessionToken: String, } #[derive(Debug, Deserialize)] #[allow(non_snake_case)] struct GetRoleCredsResponse { roleCredentials: RoleCreds, } fn main() { static VERSION: &'static str = include_str!(concat!("", "version")); let matches = App::new("AWS SSO, but better") .version(VERSION) .author("Damien Maier") .author("Saves your SSO login credentials into the credentials file, so it can be used with things like terraform") .arg(Arg::with_name("profile") .short("p") .long("profile") .takes_value(true) .required(true) .help("AWS profile set up for SSO")) .arg(Arg::with_name("save_as_profile_name") .short("s") .long("save_as_profile_name") .help("Whether to save the credentials under the profile name with an _ at the end or under <account_id>_<role_name>")) .arg(Arg::with_name("all") .short("a") .long("all") .help("Get credentials for all profiles with the same start url as the specified profile")) .arg(Arg::with_name("verbose") .short("v") .long("verbose") .help("Print verbose logging")) .get_matches(); let profile = matches.value_of("profile").unwrap().to_owned(); let verbose = matches.is_present("verbose"); let save_as_profile_name = matches.is_present("save_as_profile_name"); let all = matches.is_present("all"); let home = dirs::home_dir().unwrap().to_str().unwrap().to_owned(); let sso_profiles = get_sso_profiles(profile, &home, all); let oidc_url = format!("https://oidc.{}.amazonaws.com", sso_profiles[0].sso_region); let sso_url = format!("https://portal.sso.{}.amazonaws.com", sso_profiles[0].sso_region); let register_client_resp = register_client(&oidc_url, verbose); let device_auth_resp = device_auth(&oidc_url, &sso_profiles[0].sso_start_url, &register_client_resp, verbose); let create_token_resp = create_token(&oidc_url, &register_client_resp, &device_auth_resp, verbose); for sso_profile in sso_profiles { let get_role_creds_resp = get_role_credentials( &sso_url, &sso_profile.sso_account_id, &sso_profile.sso_role_name, &create_token_resp, verbose, ); save_sso( &sso_profile.sso_profile_name, &sso_profile.sso_account_id, &sso_profile.sso_role_name, &get_role_creds_resp, &home, save_as_profile_name, verbose, ); } } fn get_sso_profiles(profile_name : String, home : &String, all : bool) -> Vec<SsoProfile> { let aws_conf = Ini::load_from_file(format!("{}{}", home, "/.aws/config")).unwrap(); let sso_start_url = aws_conf .get_from(Some(format!("{}", profile_name)), "sso_start_url") .unwrap().to_owned(); let sso_region = aws_conf .get_from(Some(format!("{}", profile_name)), "sso_region") .unwrap().to_owned(); let sso_account_id = aws_conf .get_from(Some(format!("{}", profile_name)), "sso_account_id") .unwrap().to_owned(); let sso_role_name = aws_conf .get_from(Some(format!("{}", profile_name)), "sso_role_name") .unwrap().to_owned(); if !all { return vec![SsoProfile::new(profile_name, sso_start_url, sso_region, sso_account_id, sso_role_name)] } else { let mut profiles = Vec::new(); for (section, properties) in aws_conf.iter() { if properties.contains_key("sso_start_url") { if properties.get("sso_start_url").unwrap() == sso_start_url { profiles.push(SsoProfile::new( section.unwrap().to_owned(), properties.get("sso_start_url").unwrap().to_owned(), properties.get("sso_region").unwrap().to_owned(), properties.get("sso_account_id").unwrap().to_owned(), properties.get("sso_role_name").unwrap().to_owned(), )) } } } return profiles; } } fn register_client(oidc_url: &String, verbose: bool) -> RegisterClientResponse { let start = SystemTime::now(); let since_the_epoch = start .duration_since(UNIX_EPOCH) .expect("Time went backwards") .as_secs(); let register_resp = ureq::post(format!("{}{}", oidc_url, "/client/register").as_str()) .set("Content-type", "application/json") .set("Action", "RegisterClient") .set("Version", "2019-06-10") .send_json(serde_json::json!({ "clientName" : format!("rustSSO-{}", since_the_epoch).as_str(), "clientType" : "public" })) .into_json_deserialize::<RegisterClientResponse>(); if verbose { println!("{:#?}", register_resp); } let register_resp_un = register_resp.unwrap(); if verbose { println!("{:#?}", register_resp_un); } register_resp_un } fn device_auth( oidc_url: &String, start_url: &String, register_resp: &RegisterClientResponse, verbose: bool, ) -> StartDeviceAuthorizationResponse { let device_auth_resp = ureq::post(format!("{}{}", oidc_url, "/device_authorization").as_str()) .set("Content-type", "application/json") .set("Action", "StartDeviceAuthorization") .set("Version", "2019-06-10") .send_json(serde_json::json!( { "clientId" : register_resp.clientId, "clientSecret" : register_resp.clientSecret, "startUrl" : start_url })) .into_json_deserialize::<StartDeviceAuthorizationResponse>(); if verbose { println!("{:#?}", device_auth_resp); } let device_auth_resp_un = device_auth_resp.unwrap(); if verbose { println!("{:#?}", device_auth_resp_un); } device_auth_resp_un } fn create_token( oidc_url: &String, register_resp: &RegisterClientResponse, device_auth_resp: &StartDeviceAuthorizationResponse, verbose: bool, ) -> CreateTokenResponse { if webbrowser::open(device_auth_resp.verificationUriComplete.as_str()).is_err() { println!( "Go to {}", device_auth_resp.verificationUriComplete.as_str() ); } let sec = time::Duration::from_secs(1); let create_tok_resp_un = loop { thread::sleep(sec); let create_tok_resp = ureq::post(format!("{}{}", oidc_url, "/token").as_str()) .set("Content-type", "application/json") .set("Action", "CreateToken") .set("Version", "2019-06-10") .send_json(serde_json::json!({ "clientId": register_resp.clientId, "clientSecret": register_resp.clientSecret, "deviceCode": device_auth_resp.deviceCode, "grantType": GRANT_TYPE, })); if verbose { println!("{:#?}", create_tok_resp); } if create_tok_resp.ok() { break create_tok_resp .into_json_deserialize::<CreateTokenResponse>() .unwrap(); } else { if verbose { println!("{:#?}", create_tok_resp.into_json()); } } }; if verbose { println!("{:#?}", create_tok_resp_un); } create_tok_resp_un } fn get_role_credentials( sso_url: &String, sso_account_id: &String, sso_role_name: &String, create_token_resp: &CreateTokenResponse, verbose: bool, ) -> GetRoleCredsResponse { let get_role_creds = ureq::get(format!("{}{}", sso_url, "/federation/credentials").as_str()) .query("account_id", sso_account_id) .query("role_name", sso_role_name) .set( "x-amz-sso_bearer_token", format!("{}", create_token_resp.accessToken).as_str(), ) .call() .into_json_deserialize::<GetRoleCredsResponse>(); if verbose { println!("{:#?}", get_role_creds); } let get_role_creds_un = get_role_creds.unwrap(); if verbose { println!("{:#?}", get_role_creds_un); } get_role_creds_un } fn save_sso( profile: &str, sso_account_id: &String, sso_role_name: &String, get_role_creds: &GetRoleCredsResponse, home_dir: &String, save_as_profile_name: bool, verbose: bool, ) { let section_name = i
fn _list_accounts(sso_url: String, access_token: String) -> Response { ureq::get(format!("{}{}", sso_url, "/assignment/accounts").as_str()) .query("max_result", "100") .set( "x-amz-sso_bearer_token", format!("{}", access_token).as_str(), ) .call() }
f save_as_profile_name { format!("{}_", profile) } else { format!("{}_{}", sso_account_id, sso_role_name) }; let mut aws_creds = Ini::load_from_file(format!("{}{}", home_dir, "/.aws/credentials")).unwrap(); if verbose { println!("Section name : {}", section_name); } aws_creds .with_section(Some(section_name)) .set( "aws_access_key_id", get_role_creds.roleCredentials.accessKeyId.to_owned(), ) .set( "aws_secret_access_key", get_role_creds.roleCredentials.secretAccessKey.to_owned(), ) .set( "aws_session_token", get_role_creds.roleCredentials.sessionToken.to_owned(), ); aws_creds .write_to_file(format!("{}{}", home_dir, "/.aws/credentials")) .unwrap(); }
function_block-function_prefixed
[ { "content": "fn get_latest_version() -> String {\n\n let api_url = \"https://api.github.com/repos/thehamsterjam/better_aws_sso/releases/latest\";\n\n let resp = ureq::get(api_url)\n\n .call()\n\n .into_json()\n\n .unwrap();\n\n resp[\"tag_name\"].to_string()\n\n}", "file_path": "build.rs", "rank": 5, "score": 65925.5732385311 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let out_dir = \"./src\";\n\n let dest_path = Path::new(&out_dir).join(\"version\");\n\n let mut f = BufWriter::new(File::create(&dest_path)?);\n\n let version = get_latest_version().replace(\"\\\"\", \"\");\n\n write!(f, \"{}\", version)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "build.rs", "rank": 12, "score": 37433.3723783363 }, { "content": "[![Create Release](https://github.com/thehamsterjam/better_aws_sso/workflows/Create%20Release/badge.svg)](https://github.com/thehamsterjam/better_aws_sso/actions?query=workflow%3A%22Create+Release%22) \n\n[![Rust](https://github.com/thehamsterjam/better_aws_sso/workflows/Rust/badge.svg)](https://github.com/thehamsterjam/better_aws_sso/actions?query=workflow%3ARust)\n\n# (Slightly) better AWS sso login\n\n\n\nUsing AWS SSO with tools like terraform require you to go to the AWS SSO start url, click the account you want, click command line access, copy the text there and then save it to your AWS credentials file. \n\n\n\nThis tool skips all that fuss, set up your AWS SSO like you normally would (`aws sso configure`) ([more info](#Configuring-AWS-SSO-for-AWS-CLI)). And then just run\n\n\n\n```shell\n\n$ ssologin -p <aws_profile> \n\n```\n\n\n\nand it will save the credentials to your AWS credentials file directly.\n\n\n\nThere is an extended mode, which will collect all your credentials from a single start URL (meaning that you run `ssologin` once, and authenticate once in your browser). Use the `-a` flag. \n\n\n\n```shell\n\n$ ssologin -p <aws_profile> -a\n\n```\n\n\n\n## Installation\n\n\n\n### Linux machines\n\n\n\n#### Automatic Installation and Updates\n\nRun the below command to download and run the installer: \n\n\n\n```shell\n\n$ curl -LJ https://raw.githubusercontent.com/thehamsterjam/better_aws_sso/master/install/linux_install.sh | bash\n\n```\n\n\n\n#### Manual Installation\n\nThe installer installs to a default location `/usr/local/bin`. To change this, instead download the installer, and pass the desired path in. This path is preserved with all updates. \n\n\n\n```shell\n\n$ wget https://raw.githubusercontent.com/thehamsterjam/better_aws_sso/master/install/linux_install.sh\n\n```\n\n\n\n```shell\n\n$ chmod +x ./linux_install.sh\n\n$ ./linux_install.sh -p <desired_path>\n\n```\n\n\n\n### Windows and Mac Users\n\n\n\nPlease download the [latest release](https://github.com/thehamsterjam/better_aws_sso/releases/latest) for your OS.\n\n\n\n* Help wanted creating install scripts for Windows/Mac users\n\n\n", "file_path": "README.md", "rank": 16, "score": 29473.848441508762 }, { "content": "## Configuring AWS SSO for AWS CLI\n\n\n\nConfigure your [AWS CLI config file](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html), which is usually located at `~/.aws/config`, with the below snippet, filling in the fields with your specific information: \n\n\n\n```\n\n[my-dev-profile]\n\nsso_start_url = https://my-sso-portal.awsapps.com/start\n\nsso_region = us-east-1\n\nsso_account_id = 123456789011\n\nsso_role_name = readOnly\n\nregion = us-west-2\n\noutput = json\n\n```\n\n\n\nThen run `ssologin -p my-dev-profile`. This will add credentials to your `~/.aws/credentials` file in the following form:\n\n\n\n```\n\n[123456789011_readOnly]\n\naws_access_key_id=ASIAXYZ0123456789ABC\n\naws_secret_access_key=xyzABC123456789defGHIjklMN/xyzABC1234567\n\naws_session_token=XYZ\n", "file_path": "README.md", "rank": 17, "score": 29471.974168794386 }, { "content": "// CARGO BUILD SCRIPT\n\n\n\nuse std::{\n\n error::Error, fs::File, io::{BufWriter, Write}, path::Path\n\n};\n\nuse ureq;\n\n\n", "file_path": "build.rs", "rank": 24, "score": 3.1775298827359024 } ]
Rust
src/sol.rs
Aehmlo/sudoku
d2cb746ff47d427128f53f593efc555212cf32b5
use crate::sudoku::Grid; use crate::Element; use crate::Point; use crate::Sudoku; use crate::DIMENSIONS; use std::ops::{Index, IndexMut}; #[derive(Clone, Copy, Debug, PartialEq, PartialOrd)] pub enum Difficulty { #[doc(hidden)] Unplayable, Beginner, Easy, Intermediate, Difficult, Advanced, } impl From<usize> for Difficulty { fn from(score: usize) -> Self { use crate::Difficulty::*; match score { 0...49 => Unplayable, 50...150 => Beginner, 151...250 => Easy, 251...400 => Intermediate, 401...550 => Difficult, _ => Advanced, } } } #[derive(Clone, Debug)] #[allow(missing_copy_implementations)] pub enum Error { Unknown, #[doc(hidden)] __TestOther, } pub trait Solve: Sized { fn solution(&self) -> Result<Self, Error>; fn is_uniquely_solvable(&self) -> bool { self.solution().is_ok() } } pub trait Score: Solve { fn score(&self) -> Option<usize>; fn difficulty(&self) -> Option<Difficulty> { self.score().map(Into::into) } } #[derive(Clone, Copy, Debug, PartialEq)] pub struct PossibilitySet { pub values: u64, } impl PossibilitySet { pub fn new(order: u8) -> Self { let mut values = 0; for i in 1..=order.pow(2) as usize { values |= 1 << (i - 1); } Self { values } } pub fn eliminate(self, value: usize) -> Option<Self> { let values = self.values & !(1 << (value - 1)); match values { 0 => None, _ => Some(Self { values }), } } pub fn freedom(self) -> usize { let mut x = self.values; let mut n = 0; while x > 0 { x &= x - 1; n += 1; } n } pub fn contains(self, value: usize) -> bool { self.values | (1 << (value - 1)) == self.values } } #[derive(Debug)] pub struct PossibilityMap { possibilities: Vec<Option<PossibilitySet>>, order: u8, parent: Option<Sudoku>, } impl PossibilityMap { pub fn new(order: u8) -> Self { Self { possibilities: vec![ Some(PossibilitySet::new(order)); (order as usize).pow(2 + DIMENSIONS as u32) ], order, parent: None, } } pub fn eliminate(&mut self, index: Point, value: usize) { self[index] = self[index].and_then(|e| e.eliminate(value)); } pub fn next(&self) -> (Option<Point>, Option<PossibilitySet>) { let mut best = None; let mut best_index = None; let mut best_score = None; for index in self.points() { if let Some(element) = self[index] { if best_score.is_none() || best_score.unwrap() > element.freedom() { best = Some(element); best_index = Some(index); best_score = Some(element.freedom()); } } else if let Some(ref parent) = self.parent { if parent[index].is_none() { return (None, None); } } } (best_index, best) } } impl Index<Point> for PossibilityMap { type Output = Option<PossibilitySet>; fn index(&self, index: Point) -> &Self::Output { let index = index.fold(self.order); &self.possibilities[index] } } impl IndexMut<Point> for PossibilityMap { fn index_mut(&mut self, index: Point) -> &mut Option<PossibilitySet> { let index = index.fold(self.order); &mut self.possibilities[index] } } impl Grid for PossibilityMap { fn points(&self) -> Vec<Point> { (0..(self.order as usize).pow(2 + DIMENSIONS as u32)) .map(|p| Point::unfold(p, self.order)) .collect() } } impl From<Sudoku> for PossibilityMap { fn from(sudoku: Sudoku) -> Self { let order = sudoku.order; let mut map = PossibilityMap::new(order); for i in 0..(sudoku.order as usize).pow(2 + DIMENSIONS as u32) { let point = Point::unfold(i, order); if sudoku[point].is_some() { map[point] = None; } else { let groups = sudoku.groups(point); for group in &groups { let elements = group.elements(); for element in elements { if let Some(Element(value)) = element { map.eliminate(point, value as usize); } } } } } map.parent = Some(sudoku); map } } pub fn solve(puzzle: &Sudoku) -> Result<Sudoku, Error> { solve_and_score(puzzle).map(|(sol, _)| sol) } pub fn solve_and_score(puzzle: &Sudoku) -> Result<(Sudoku, usize), Error> { let mut context = Context { problem: puzzle.clone(), count: 0, solution: None, branch_score: 0, }; recurse(&mut context, 0); let s = context.branch_score; let c = calculate_c(puzzle) as isize; let e = count_empty(puzzle) as isize; context .solution .ok_or(Error::Unknown) .map(|sol| (sol, (s * c + e) as usize)) } struct Context { problem: Sudoku, count: usize, solution: Option<Sudoku>, branch_score: isize, } fn recurse(mut context: &mut Context, difficulty: isize) { let problem = context.problem.clone(); let map: PossibilityMap = problem.into(); match map.next() { (None, _) => { if context.problem.is_complete() { if context.count == 0 { context.branch_score = difficulty; context.solution = Some(context.problem.clone()); } context.count += 1; } return; } (Some(index), Some(set)) => { let branch_factor = set.freedom() as isize - 1; let possible = (1..=(context.problem.order as usize).pow(2)) .filter(|v| set.contains(*v)) .collect::<Vec<_>>(); let difficulty = difficulty + branch_factor.pow(DIMENSIONS as u32); for value in possible { context .problem .substitute(index, Some(Element(value as u8))); recurse(&mut context, difficulty); if context.count > 1 { return; } } context.problem.substitute(index, None); } _ => unreachable!(), } } fn count_empty(sudoku: &Sudoku) -> usize { sudoku.elements.iter().filter(|e| e.is_none()).count() } fn calculate_c(sudoku: &Sudoku) -> usize { let order = sudoku.order; 10.0_f64.powf(f64::from(order).powf(4.0).log10().ceil()) as usize } pub fn score(sudoku: &Sudoku) -> Option<usize> { solve_and_score(&sudoku).ok().map(|(_, s)| s) } #[cfg(test)] mod tests { use crate::sol::{calculate_c, Error, PossibilityMap, PossibilitySet, Solve}; use crate::Point; use crate::Sudoku; use crate::DIMENSIONS; struct DummyPuzzle(bool); impl DummyPuzzle { fn new(solvable: bool) -> Self { Self { 0: solvable } } } impl Solve for DummyPuzzle { fn solution(&self) -> Result<Self, Error> { if self.0 { Ok(Self { 0: true }) } else { Err(Error::__TestOther) } } } #[test] fn test_is_uniquely_solvable() { let solvable = DummyPuzzle::new(true); assert_eq!(solvable.is_uniquely_solvable(), true); let unsolvable = DummyPuzzle::new(false); assert_eq!(unsolvable.is_uniquely_solvable(), false); } #[test] fn test_calculate_c() { let sudoku = Sudoku::new(3); assert_eq!(calculate_c(&sudoku), 100); let sudoku = Sudoku::new(4); assert_eq!(calculate_c(&sudoku), 1_000); let sudoku = Sudoku::new(5); assert_eq!(calculate_c(&sudoku), 1_000); let sudoku = Sudoku::new(6); assert_eq!(calculate_c(&sudoku), 10_000); } #[test] fn test_map_new() { for order in 1..6 { let map = PossibilityMap::new(order); for i in 0..(order as usize).pow(DIMENSIONS as u32 + 2) { let index = Point::unfold(i, order); let set = PossibilitySet::new(order); assert_eq!(map[index], Some(set)); } } } #[test] fn test_map_from_sudoku() { let sudoku = Sudoku::new(3); let map: PossibilityMap = sudoku.into(); for p in map.possibilities { assert_eq!(p, Some(PossibilitySet::new(3))); } } #[test] fn test_set_new() { let set = PossibilitySet::new(3); for i in 1..10 { assert!(set.contains(i)); } } #[test] fn test_set_eliminate() { let mut set = PossibilitySet::new(3); for i in 1..9 { set = set.eliminate(i).unwrap(); assert!(!set.contains(i)); } assert_eq!(set.eliminate(9), None); } #[test] fn test_set_freedom() { let mut set = PossibilitySet::new(3); for i in 1..9 { set = set.eliminate(i).unwrap(); assert_eq!(set.freedom(), 9 - i); } } }
use crate::sudoku::Grid; use crate::Element; use crate::Point; use crate::Sudoku; use crate::DIMENSIONS; use std::ops::{Index, IndexMut}; #[derive(Clone, Copy, Debug, PartialEq, PartialOrd)] pub enum Difficulty { #[doc(hidden)] Unplayable, Beginner, Easy, Intermediate, Difficult, Advanced, } impl From<usize> for Difficulty { fn from(score: usize) -> Self { use crate::Difficulty::*; match score { 0...49 => Unplayable, 50...150 => Beginner, 151...250 => Easy, 251...400 => Intermediate, 401...550 => Difficult, _ => Advanced, } } } #[derive(Clone, Debug)] #[allow(missing_copy_implementations)] pub enum Error { Unknown, #[doc(hidden)] __TestOther, } pub trait Solve: Sized { fn solution(&self) -> Result<Self, Error>; fn is_uniquely_solvable(&self) -> bool { self.solution().is_ok() } } pub trait Score: Solve { fn score(&self) -> Option<usize>; fn difficulty(&self) -> Option<Difficulty> { self.score().map(Into::into) } } #[derive(Clone, Copy, Debug, PartialEq)] pub struct PossibilitySet { pub values: u64, } impl PossibilitySet { pub fn new(order: u8) -> Self { let mut values = 0; for i in 1..=order.pow(2) as usize { values |= 1 << (i - 1); } Self { values } } pub fn eliminate(self, value: usize) -> Option<Self> { let values = self.values & !(1 << (value - 1)); match values { 0 => None, _ => Some(Self { values }), } } pub fn freedom(self) -> usize { let mut x = self.values; let mut n = 0; while x > 0 { x &= x - 1; n += 1; } n } pub fn contains(self, value: usize) -> bool { self.values | (1 << (value - 1)) == self.values } } #[derive(Debug)] pub struct PossibilityMap { possibilities: Vec<Option<PossibilitySet>>, order: u8, parent: Option<Sudoku>, } impl PossibilityMap { pub fn new(order: u8) -> Sel
pub fn eliminate(&mut self, index: Point, value: usize) { self[index] = self[index].and_then(|e| e.eliminate(value)); } pub fn next(&self) -> (Option<Point>, Option<PossibilitySet>) { let mut best = None; let mut best_index = None; let mut best_score = None; for index in self.points() { if let Some(element) = self[index] { if best_score.is_none() || best_score.unwrap() > element.freedom() { best = Some(element); best_index = Some(index); best_score = Some(element.freedom()); } } else if let Some(ref parent) = self.parent { if parent[index].is_none() { return (None, None); } } } (best_index, best) } } impl Index<Point> for PossibilityMap { type Output = Option<PossibilitySet>; fn index(&self, index: Point) -> &Self::Output { let index = index.fold(self.order); &self.possibilities[index] } } impl IndexMut<Point> for PossibilityMap { fn index_mut(&mut self, index: Point) -> &mut Option<PossibilitySet> { let index = index.fold(self.order); &mut self.possibilities[index] } } impl Grid for PossibilityMap { fn points(&self) -> Vec<Point> { (0..(self.order as usize).pow(2 + DIMENSIONS as u32)) .map(|p| Point::unfold(p, self.order)) .collect() } } impl From<Sudoku> for PossibilityMap { fn from(sudoku: Sudoku) -> Self { let order = sudoku.order; let mut map = PossibilityMap::new(order); for i in 0..(sudoku.order as usize).pow(2 + DIMENSIONS as u32) { let point = Point::unfold(i, order); if sudoku[point].is_some() { map[point] = None; } else { let groups = sudoku.groups(point); for group in &groups { let elements = group.elements(); for element in elements { if let Some(Element(value)) = element { map.eliminate(point, value as usize); } } } } } map.parent = Some(sudoku); map } } pub fn solve(puzzle: &Sudoku) -> Result<Sudoku, Error> { solve_and_score(puzzle).map(|(sol, _)| sol) } pub fn solve_and_score(puzzle: &Sudoku) -> Result<(Sudoku, usize), Error> { let mut context = Context { problem: puzzle.clone(), count: 0, solution: None, branch_score: 0, }; recurse(&mut context, 0); let s = context.branch_score; let c = calculate_c(puzzle) as isize; let e = count_empty(puzzle) as isize; context .solution .ok_or(Error::Unknown) .map(|sol| (sol, (s * c + e) as usize)) } struct Context { problem: Sudoku, count: usize, solution: Option<Sudoku>, branch_score: isize, } fn recurse(mut context: &mut Context, difficulty: isize) { let problem = context.problem.clone(); let map: PossibilityMap = problem.into(); match map.next() { (None, _) => { if context.problem.is_complete() { if context.count == 0 { context.branch_score = difficulty; context.solution = Some(context.problem.clone()); } context.count += 1; } return; } (Some(index), Some(set)) => { let branch_factor = set.freedom() as isize - 1; let possible = (1..=(context.problem.order as usize).pow(2)) .filter(|v| set.contains(*v)) .collect::<Vec<_>>(); let difficulty = difficulty + branch_factor.pow(DIMENSIONS as u32); for value in possible { context .problem .substitute(index, Some(Element(value as u8))); recurse(&mut context, difficulty); if context.count > 1 { return; } } context.problem.substitute(index, None); } _ => unreachable!(), } } fn count_empty(sudoku: &Sudoku) -> usize { sudoku.elements.iter().filter(|e| e.is_none()).count() } fn calculate_c(sudoku: &Sudoku) -> usize { let order = sudoku.order; 10.0_f64.powf(f64::from(order).powf(4.0).log10().ceil()) as usize } pub fn score(sudoku: &Sudoku) -> Option<usize> { solve_and_score(&sudoku).ok().map(|(_, s)| s) } #[cfg(test)] mod tests { use crate::sol::{calculate_c, Error, PossibilityMap, PossibilitySet, Solve}; use crate::Point; use crate::Sudoku; use crate::DIMENSIONS; struct DummyPuzzle(bool); impl DummyPuzzle { fn new(solvable: bool) -> Self { Self { 0: solvable } } } impl Solve for DummyPuzzle { fn solution(&self) -> Result<Self, Error> { if self.0 { Ok(Self { 0: true }) } else { Err(Error::__TestOther) } } } #[test] fn test_is_uniquely_solvable() { let solvable = DummyPuzzle::new(true); assert_eq!(solvable.is_uniquely_solvable(), true); let unsolvable = DummyPuzzle::new(false); assert_eq!(unsolvable.is_uniquely_solvable(), false); } #[test] fn test_calculate_c() { let sudoku = Sudoku::new(3); assert_eq!(calculate_c(&sudoku), 100); let sudoku = Sudoku::new(4); assert_eq!(calculate_c(&sudoku), 1_000); let sudoku = Sudoku::new(5); assert_eq!(calculate_c(&sudoku), 1_000); let sudoku = Sudoku::new(6); assert_eq!(calculate_c(&sudoku), 10_000); } #[test] fn test_map_new() { for order in 1..6 { let map = PossibilityMap::new(order); for i in 0..(order as usize).pow(DIMENSIONS as u32 + 2) { let index = Point::unfold(i, order); let set = PossibilitySet::new(order); assert_eq!(map[index], Some(set)); } } } #[test] fn test_map_from_sudoku() { let sudoku = Sudoku::new(3); let map: PossibilityMap = sudoku.into(); for p in map.possibilities { assert_eq!(p, Some(PossibilitySet::new(3))); } } #[test] fn test_set_new() { let set = PossibilitySet::new(3); for i in 1..10 { assert!(set.contains(i)); } } #[test] fn test_set_eliminate() { let mut set = PossibilitySet::new(3); for i in 1..9 { set = set.eliminate(i).unwrap(); assert!(!set.contains(i)); } assert_eq!(set.eliminate(9), None); } #[test] fn test_set_freedom() { let mut set = PossibilitySet::new(3); for i in 1..9 { set = set.eliminate(i).unwrap(); assert_eq!(set.freedom(), 9 - i); } } }
f { Self { possibilities: vec![ Some(PossibilitySet::new(order)); (order as usize).pow(2 + DIMENSIONS as u32) ], order, parent: None, } }
function_block-function_prefixed
[ { "content": "/// Trait to generate a puzzle.\n\n///\n\n/// Requires that the puzzle be solvable (to ensure the desired difficulty is\n\n/// attained).\n\npub trait Generate: Score + Sized {\n\n /// Generates a puzzle of the desired order and difficulty.\n\n fn generate(order: u8, difficulty: Difficulty) -> Self;\n\n}\n\n\n", "file_path": "src/gen.rs", "rank": 2, "score": 153061.59434044335 }, { "content": "fn score(matches: &clap::ArgMatches) -> Option<usize> {\n\n puzzle(matches).ok().and_then(|p| p.score())\n\n}\n", "file_path": "src/main.rs", "rank": 6, "score": 121853.22984805351 }, { "content": "#[allow(clippy::needless_range_loop)]\n\nfn grid(order: u8) -> Option<Sudoku> {\n\n let mut puzzle = Sudoku::new(order);\n\n // TODO(#14): Revisit this block when NLL lands.\n\n {\n\n let mut first_box = (1..=order.pow(2))\n\n .map(|v| Some(Element(v)))\n\n .collect::<Vec<_>>();\n\n shuffle(&mut first_box);\n\n let order = order as usize;\n\n let axis = order.pow(2);\n\n for i in 0..axis {\n\n let index = i / order * axis + i % order;\n\n puzzle.elements[index] = first_box[i];\n\n }\n\n // TODO(#13): Reduce the number of cells that are filled with backtracking.\n\n // The rest\n\n recurse(puzzle)\n\n }\n\n}\n\n\n", "file_path": "src/gen.rs", "rank": 7, "score": 118144.05636159323 }, { "content": "fn solve(matches: &clap::ArgMatches) -> Result<Sudoku, Error> {\n\n puzzle(matches).and_then(|p| p.solution().map_err(Into::into))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 116680.90892344929 }, { "content": "/// Fetch a solid color by name.\n\npub trait Name: Sized {\n\n /// Returns the color associated with the name, if it exists.\n\n fn with_name(name: &str) -> Option<Self>;\n\n}\n\n\n\nimpl<T: NumCast + Bounded + FromPrimitive + Default> Name for Rgb<T> {\n\n fn with_name(name: &str) -> Option<Self> {\n\n let components: Option<(u8, u8, u8)> = match name.to_lowercase().as_ref() {\n\n \"black\" => Some((0x00, 0x00, 0x00)),\n\n \"silver\" => Some((0xc0, 0xc0, 0xc0)),\n\n \"gray\" => Some((0x80, 0x80, 0x80)),\n\n \"white\" => Some((0xff, 0xff, 0xff)),\n\n \"maroon\" => Some((0x80, 0x00, 0x00)),\n\n \"red\" => Some((0xff, 0x00, 0x00)),\n\n \"purple\" => Some((0x80, 0x00, 0x80)),\n\n \"fuchsia\" => Some((0xff, 0x00, 0xff)),\n\n \"green\" => Some((0x00, 0x80, 0x00)),\n\n \"lime\" => Some((0x00, 0xff, 0x00)),\n\n \"olive\" => Some((0x80, 0x80, 0x00)),\n\n \"yellow\" => Some((0xff, 0xff, 0x00)),\n", "file_path": "src/ui/color.rs", "rank": 9, "score": 111899.8428500927 }, { "content": "#[derive(Debug)]\n\nenum Error {\n\n Solve(SolveError),\n\n Parse(ParseError),\n\n Io(IoError),\n\n}\n\n\n\nimpl From<ParseError> for Error {\n\n fn from(error: ParseError) -> Self {\n\n Error::Parse(error)\n\n }\n\n}\n\n\n\nimpl From<SolveError> for Error {\n\n fn from(error: SolveError) -> Self {\n\n Error::Solve(error)\n\n }\n\n}\n\n\n\nimpl From<IoError> for Error {\n\n fn from(error: IoError) -> Self {\n\n Error::Io(error)\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 11, "score": 104861.62237819681 }, { "content": "/// Makes the sudoku harder to the desired level, modifying it in-place.\n\n///\n\n/// # Notes\n\n/// No validation is performed on the passed puzzle.\n\nfn harden(mut sudoku: &mut Sudoku, target: Difficulty) -> Result<(), ()> {\n\n let current = sudoku.score().unwrap();\n\n let mut points = sudoku.points();\n\n for _ in 0..MAX_HARDEN_ITERATIONS {\n\n if let (Some(one), Some(two)) = (take_random(&mut points), take_random(&mut points)) {\n\n let (one, two) = (one.fold(sudoku.order), two.fold(sudoku.order));\n\n let mut puzzle = sudoku.clone();\n\n // Faster than substituting twice.\n\n puzzle.elements[one] = None;\n\n puzzle.elements[two] = None;\n\n if let Some(score) = puzzle.score() {\n\n if score > current {\n\n let difficulty: Difficulty = score.into();\n\n if difficulty > target {\n\n // We overshot the target difficulty\n\n continue;\n\n }\n\n sudoku.elements[one] = None;\n\n sudoku.elements[two] = None;\n\n return if difficulty == target {\n", "file_path": "src/gen.rs", "rank": 12, "score": 101332.21688494812 }, { "content": "fn puzzle(matches: &clap::ArgMatches) -> Result<Sudoku, Error> {\n\n let mut reader: Box<Read> = if matches.is_present(\"INPUT\") {\n\n Box::new(File::open(matches.value_of(\"INPUT\").unwrap()).expect(\"File not found.\"))\n\n } else {\n\n Box::new(stdin())\n\n };\n\n let mut puzzle = String::new();\n\n reader.read_to_string(&mut puzzle)?;\n\n puzzle.parse().map_err(Into::into)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 13, "score": 96238.9682387525 }, { "content": "/// Includes information about puzzle difficulty and configuration.\n\npub trait Puzzle {\n\n /// The order of the puzzle.\n\n fn order(&self) -> u8;\n\n}\n", "file_path": "src/puzzle.rs", "rank": 14, "score": 95392.64993755164 }, { "content": "fn get_order(context: &Option<&Context>) -> u8 {\n\n context.map(|c| c.game.current.order).unwrap_or(3)\n\n}\n\n\n", "file_path": "web/src/view.rs", "rank": 15, "score": 93934.89513470214 }, { "content": "/// Represents an *n*-dimensional grid of values, indexable via\n\n/// [`Point`](struct.Point.html).\n\npub trait Grid: Index<Point> {\n\n /// Returns all points in the grid.\n\n ///\n\n /// Useful for enumeration with `Iterator::zip`.\n\n fn points(&self) -> Vec<Point>;\n\n}\n\n\n\nimpl Sudoku {\n\n /// Constructs a new sudoku of the specified order.\n\n ///\n\n /// This method reserves space in memory for the puzzle's elements.\n\n ///\n\n /// # Notes\n\n /// This method **does not** generate a valid, uniquely solvable sudoku.\n\n /// If you wish to generate such a sudoku (which you likely do), use\n\n /// [`Sudoku::generate`](#method.generate).\n\n pub fn new(order: u8) -> Self {\n\n Self {\n\n order,\n\n elements: vec![None; (order as usize).pow(2 + DIMENSIONS as u32)],\n", "file_path": "src/sudoku.rs", "rank": 16, "score": 84990.40468406194 }, { "content": "#[rustfmt::skip]\n\nfn main() -> Result<(), Error> {\n\n let matches = clap_app!(ku =>\n\n (setting: clap::AppSettings::ArgRequiredElseHelp)\n\n (setting: clap::AppSettings::VersionlessSubcommands)\n\n (about: \"A sudoku generator/solver/manipulator.\")\n\n (@subcommand solve =>\n\n (about: \"Solves the given sudoku.\")\n\n (@arg INPUT: \"Sets the input file (defaults to stdin).\")\n\n )\n\n (@subcommand score =>\n\n (about: \"Scores the given sudoku.\")\n\n (@arg INPUT: \"Sets the input file (defaults to stdin).\")\n\n )\n\n (@subcommand generate =>\n\n (about: \"Generates a sudoku.\")\n\n (@arg ORDER: \"The order of sudoku to be generated (defaults to 3).\")\n\n )\n\n ).get_matches();\n\n if let Some(matches) = matches.subcommand_matches(\"solve\") {\n\n let solution = solve(&matches)?;\n", "file_path": "src/main.rs", "rank": 17, "score": 84247.67441789659 }, { "content": "fn take_random<T>(values: &mut Vec<T>) -> Option<T> {\n\n let mut indices = (0..values.len()).collect::<Vec<_>>();\n\n shuffle(&mut indices);\n\n indices.get(0).map(|index| values.remove(*index))\n\n}\n\n\n", "file_path": "src/gen.rs", "rank": 18, "score": 84076.99490887814 }, { "content": "pub fn fill_box(\n\n ctx: &CanvasRenderingContext2d,\n\n context: &Context,\n\n point: Point,\n\n color: &'static str,\n\n) {\n\n let (left, top) = grid_origin(&Some(context));\n\n let axis = get_order(&Some(context)).pow(2);\n\n let length = grid_length();\n\n let spacing = length / (axis as f64);\n\n ctx.set_fill_style_color(color);\n\n ctx.fill_rect(\n\n left + point[0] as f64 * spacing,\n\n top + point[1] as f64 * spacing,\n\n spacing,\n\n spacing,\n\n );\n\n ctx.set_fill_style_color(BG);\n\n}\n\n\n", "file_path": "web/src/view.rs", "rank": 19, "score": 82816.62766848656 }, { "content": "pub fn render(context: Option<&Context>) {\n\n let canvas: CanvasElement = get_canvas();\n\n canvas.set_width(window().inner_width() as u32);\n\n canvas.set_height(window().inner_height() as u32);\n\n let ctx = canvas.get_context::<CanvasRenderingContext2d>().unwrap();\n\n ctx.set_fill_style_color(BG);\n\n let width: f64 = canvas.width().into();\n\n let height: f64 = canvas.height().into();\n\n\n\n ctx.fill_rect(0.0, 0.0, width, height);\n\n ctx.set_stroke_style_color(GRID);\n\n\n\n let (left, top) = grid_origin(&context);\n\n let order = get_order(&context);\n\n let axis = order.pow(2);\n\n let length = grid_length();\n\n let spacing = length / (axis as f64);\n\n\n\n for i in 0..=axis {\n\n ctx.set_line_width(if i % order == 0 { 4.0 } else { 2.0 });\n", "file_path": "web/src/view.rs", "rank": 20, "score": 72332.86741289144 }, { "content": "#[cfg(feature = \"use_stdweb\")]\n\nfn shuffle<T>(vec: &mut Vec<T>) {\n\n let len = vec.len() as u32;\n\n for i in 0..len {\n\n let j = len - i;\n\n let index: u32 = js! { return Math.floor(Math.random() * @{j}); }\n\n .try_into()\n\n .unwrap();\n\n vec.swap(index as usize, (j - 1) as usize);\n\n }\n\n}\n\n\n", "file_path": "src/gen.rs", "rank": 21, "score": 72127.43790754346 }, { "content": "pub fn play(context: Rc<RefCell<Context>>) {\n\n render(Some(&context.borrow()));\n\n let resize_context = context.clone();\n\n let click_context = context.clone();\n\n let key_context = context.clone();\n\n window().add_event_listener(move |_: ResizeEvent| {\n\n let context = &resize_context;\n\n render(Some(&context.borrow()));\n\n });\n\n let canvas = get_canvas();\n\n document().add_event_listener(move |event: KeyDownEvent| {\n\n if let Ok(mut context) = key_context.try_borrow_mut() {\n\n if let Some(point) = context.focused {\n\n match event.key().as_str() {\n\n \"Backspace\" | \"Delete\" => {\n\n event.prevent_default();\n\n if context.game.is_mutable(point) {\n\n let _old = context.game.remove(point);\n\n render(Some(&context));\n\n }\n", "file_path": "web/src/view.rs", "rank": 22, "score": 67268.79214598176 }, { "content": "#[cfg_attr(feature = \"2D\", test)]\n\n#[cfg_attr(feature = \"2D\", ignore)]\n\n#[cfg(feature = \"2D\")]\n\nfn test_solve_o4_2d() {\n\n let puzzle: Sudoku = include_str!(\"../tests/sudokus/solvable/2D-O4.txt\")\n\n .parse()\n\n .unwrap();\n\n let solution = puzzle.solution();\n\n assert!(solution.is_ok());\n\n}\n\n\n", "file_path": "tests/sol.rs", "rank": 23, "score": 66008.7843338275 }, { "content": "#[cfg_attr(feature = \"2D\", test)]\n\n#[cfg(feature = \"2D\")]\n\nfn test_solve_o3_2d() {\n\n let puzzle: Sudoku = include_str!(\"../tests/sudokus/solvable/2D-O3.txt\")\n\n .parse()\n\n .unwrap();\n\n let solution = puzzle.solution();\n\n assert!(solution.is_ok());\n\n}\n\n\n", "file_path": "tests/sol.rs", "rank": 24, "score": 66008.7843338275 }, { "content": "/// A construct that can be treated as encoding a general color.\n\n///\n\n/// By nature, this requires the construct to encode an alpha value.\n\npub trait Color<T: NumCast + Bounded>: Into<Rgba<T>> + Into<Hsla<T>> {\n\n /// Converts the color to RGBA.\n\n fn to_rgba(self) -> Rgba<T> {\n\n self.into()\n\n }\n\n /// Converts the color to HSLA.\n\n fn to_hsla(self) -> Hsla<T> {\n\n self.into()\n\n }\n\n}\n\n\n", "file_path": "src/ui/color.rs", "rank": 25, "score": 61684.05842024529 }, { "content": "/// A construct that can be treated as encoding a solid color.\n\npub trait SolidColor<T: NumCast + Bounded>: Color<T> + Into<Rgb<T>> + Into<Hsl<T>> {\n\n /// Converts the color to RGB.\n\n fn to_rgb(self) -> Rgb<T> {\n\n self.into()\n\n }\n\n /// Converts the color to HSL.\n\n fn to_hsl(self) -> Hsl<T> {\n\n self.into()\n\n }\n\n /// Converts the color to RGBA, adding a full alpha value.\n\n fn to_rgba(self) -> Rgba<T> {\n\n let rgb: Rgb<T> = self.into();\n\n Rgba::<T>(rgb.0, rgb.1, rgb.2, T::max_value())\n\n }\n\n /// Converts the color to HSLA, adding a full alpha value.\n\n fn to_hsla(self) -> Hsla<T> {\n\n let hsl: Hsl<T> = self.into();\n\n Hsla::<T>(hsl.0, hsl.1, hsl.2, T::max_value())\n\n }\n\n}\n\n\n", "file_path": "src/ui/color.rs", "rank": 28, "score": 56144.53902145558 }, { "content": "#[inline(always)]\n\nfn _min<T: PartialOrd>(l: T, r: T) -> T {\n\n if r < l {\n\n r\n\n } else {\n\n l\n\n }\n\n}\n\n\n\nimpl<T: NumCast + Bounded + FromPrimitive + Default> From<Rgb<T>> for Hsl<T> {\n\n fn from(other: Rgb<T>) -> Self {\n\n let triple = (\n\n other.0.to_f64().unwrap_or_default(),\n\n other.1.to_f64().unwrap_or_default(),\n\n other.2.to_f64().unwrap_or_default(),\n\n );\n\n let max = T::max_value().to_f64().unwrap_or(1.0);\n\n let (r, g, b) = (triple.0 / max, triple.1 / max, triple.2 / max);\n\n let max = _max(_max(r, g), b);\n\n let min = _min(_min(r, g), b);\n\n let delta = max - min;\n", "file_path": "src/ui/color.rs", "rank": 30, "score": 45585.419935398284 }, { "content": "#[inline(always)]\n\nfn _max<T: PartialOrd>(l: T, r: T) -> T {\n\n if r > l {\n\n r\n\n } else {\n\n l\n\n }\n\n}\n\n\n", "file_path": "src/ui/color.rs", "rank": 31, "score": 45585.419935398284 }, { "content": "fn main() {\n\n render(None);\n\n let context = Context::new(3, Difficulty::Advanced);\n\n let context = Rc::new(RefCell::new(context));\n\n play(context);\n\n}\n", "file_path": "web/src/main.rs", "rank": 32, "score": 41825.93874928728 }, { "content": "#[cfg_attr(feature = \"2D\", test)]\n\n#[cfg_attr(feature = \"2D\", ignore)]\n\n#[cfg(feature = \"2D\")]\n\nfn test_uniquely_solveable() {\n\n let puzzle: Sudoku = include_str!(\"../tests/sudokus/solvable/2D-O3.txt\")\n\n .parse()\n\n .unwrap();\n\n assert!(puzzle.is_uniquely_solvable());\n\n let puzzle: Sudoku = include_str!(\"../tests/sudokus/solvable/2D-O4.txt\")\n\n .parse()\n\n .unwrap();\n\n assert!(puzzle.is_uniquely_solvable());\n\n}\n\n\n", "file_path": "tests/sol.rs", "rank": 33, "score": 40565.40453002915 }, { "content": "#[cfg_attr(feature = \"2D\", test)]\n\n#[cfg(feature = \"2D\")]\n\nfn test_group_is_complete_and_is_valid() {\n\n let solution = include_str!(\"../tests/sudokus/solvable/2D-O3.txt\")\n\n .parse::<Sudoku>()\n\n .unwrap()\n\n .solution()\n\n .unwrap();\n\n for point in solution.points() {\n\n for group in solution.groups(point).iter() {\n\n assert!(group.is_complete());\n\n assert!(group.is_valid());\n\n }\n\n }\n\n}\n", "file_path": "tests/sol.rs", "rank": 34, "score": 39422.47907802504 }, { "content": "fn grid_length() -> f64 {\n\n let (width, height) = (\n\n window().inner_width() as f64,\n\n window().inner_height() as f64,\n\n );\n\n min(0.9 * width, 0.9 * height)\n\n}\n\n\n", "file_path": "web/src/view.rs", "rank": 35, "score": 38232.8806055945 }, { "content": "fn get_canvas() -> CanvasElement {\n\n document()\n\n .get_element_by_id(\"canvas\")\n\n .unwrap()\n\n .try_into()\n\n .unwrap()\n\n}\n\n\n", "file_path": "web/src/view.rs", "rank": 36, "score": 37191.83806157132 }, { "content": "fn recurse(puzzle: Sudoku) -> Option<Sudoku> {\n\n let map: PossibilityMap = puzzle.clone().into();\n\n match map.next() {\n\n (None, _) => {\n\n if puzzle.is_complete() {\n\n Some(puzzle)\n\n } else {\n\n None\n\n }\n\n }\n\n (Some(index), Some(set)) => {\n\n let mut possibilities = (1..=(puzzle.order as usize).pow(2))\n\n .filter(|v| set.contains(*v))\n\n .collect::<Vec<_>>();\n\n while let Some(candidate) = take_random(&mut possibilities) {\n\n let mut puzzle = puzzle.clone();\n\n puzzle.substitute(index, Some(Element(candidate as u8)));\n\n let solution = recurse(puzzle);\n\n if solution.is_some() {\n\n return solution;\n\n }\n\n }\n\n None\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n\n/// Creates a randomized sudoku grid of the specified order.\n", "file_path": "src/gen.rs", "rank": 37, "score": 34487.94043206886 }, { "content": "#[cfg_attr(rustfmt, rustfmt_skip)]\n\nfn min(l: f64, r: f64) -> f64 {\n\n if l > r { r } else { l }\n\n}\n\n\n", "file_path": "web/src/view.rs", "rank": 38, "score": 33792.59303244494 }, { "content": "// https://stackoverflow.com/a/37682288\n\nfn clone_into_array<A, T>(slice: &[T]) -> A\n\nwhere\n\n A: Default + AsMut<[T]>,\n\n T: Clone,\n\n{\n\n let mut a = Default::default();\n\n <A as AsMut<[T]>>::as_mut(&mut a).clone_from_slice(slice);\n\n a\n\n}\n\n\n\nimpl Index<Point> for Sudoku {\n\n type Output = Option<Element>;\n\n fn index(&self, index: Point) -> &Self::Output {\n\n &self.elements[index.fold(self.order)]\n\n }\n\n}\n\n\n\nimpl Puzzle for Sudoku {\n\n fn order(&self) -> u8 {\n\n self.order\n", "file_path": "src/sudoku.rs", "rank": 39, "score": 33792.59303244494 }, { "content": "fn grid_origin(context: &Option<&Context>) -> (f64, f64) {\n\n let axis = get_order(&context).pow(2);\n\n\n\n let (width, height) = (\n\n window().inner_width() as f64,\n\n window().inner_height() as f64,\n\n );\n\n\n\n let center = (width / 2.0, height / 2.0);\n\n let length = grid_length();\n\n let spacing = length / (axis as f64);\n\n\n\n let left = center.0 - (axis as f64) * spacing / 2.0;\n\n let top = center.1 - (axis as f64) * spacing / 2.0;\n\n\n\n (left, top)\n\n}\n\n\n", "file_path": "web/src/view.rs", "rank": 40, "score": 31160.59929322866 }, { "content": "fn point_for_click(context: &Context, click: &ClickEvent) -> Option<Point> {\n\n let origin = grid_origin(&Some(context));\n\n let length = grid_length();\n\n let max = (origin.0 + length, origin.1 + length);\n\n let locus = (click.client_x() as f64, click.client_y() as f64);\n\n if locus.0 < origin.0 || locus.0 > max.0 || locus.1 < origin.1 || locus.1 > max.1 {\n\n None\n\n } else {\n\n let order = context.game.current.order as f64;\n\n let axis = order.powf(2.0);\n\n let specific = length / axis;\n\n let x = ((locus.0 - origin.0) / specific).floor() as u8;\n\n let y = ((locus.1 - origin.1) / specific).floor() as u8;\n\n Some(Point([x, y]))\n\n }\n\n}\n\n\n", "file_path": "web/src/view.rs", "rank": 41, "score": 29105.51463677868 }, { "content": " impl Default for Behavior {\n\n fn default() -> Self {\n\n Self {\n\n allow_incorrect_answers: false,\n\n }\n\n }\n\n }\n\n\n\n /// Specifies puzzle generation behavior, such as the default sudoku\n\n /// difficulty and order.\n\n #[derive(Clone, Copy, Debug)]\n\n pub struct Generation {\n\n /// The default puzzle order.\n\n pub default_order: u8,\n\n /// The default puzzle difficulty.\n\n pub default_difficulty: Difficulty,\n\n }\n\n\n\n impl Default for Generation {\n\n fn default() -> Self {\n\n Self {\n\n default_order: 3,\n\n default_difficulty: Difficulty::Intermediate,\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/ui/model.rs", "rank": 45, "score": 22.145108532544953 }, { "content": "use crate::sol::{score, solve, Error as SolveError};\n\nuse crate::Puzzle;\n\nuse crate::Score;\n\nuse crate::Solve;\n\nuse crate::DIMENSIONS;\n\n\n\nuse std::{\n\n fmt,\n\n ops::{Index, IndexMut},\n\n str::FromStr,\n\n};\n\n\n\n/// Represents a single sudoku \"square.\"\n\n///\n\n/// The quantum of the sudoku.\n\n#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]\n\npub struct Element(pub u8);\n\n\n\n/// A subdivision of the main sudoku; the smallest grouping to which rules are\n\n/// applied.\n", "file_path": "src/sudoku.rs", "rank": 47, "score": 19.640358413185535 }, { "content": " println!(\"{}\", solution);\n\n } else if let Some(matches) = matches.subcommand_matches(\"score\") {\n\n if let Some(score) = score(&matches) {\n\n println!(\"Score: {}\", score);\n\n } else {\n\n println!(\"Couldn't score puzzle.\");\n\n }\n\n } else if let Some(matches) = matches.subcommand_matches(\"generate\") {\n\n let order = matches.value_of(\"ORDER\").and_then(|s: &str| s.parse().ok()).unwrap_or(3);\n\n println!(\"{:X}\", Sudoku::generate(order, Difficulty::Beginner));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 48, "score": 18.860093264036934 }, { "content": " }\n\n}\n\n\n\nimpl Solve for Sudoku {\n\n fn solution(&self) -> Result<Self, SolveError> {\n\n solve(self)\n\n }\n\n}\n\n\n\nimpl Score for Sudoku {\n\n fn score(&self) -> Option<usize> {\n\n score(self)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"2D\")]\n\nmacro_rules! sudoku_fmt {\n\n ($style:ident) => {\n\n impl fmt::$style for Sudoku {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "src/sudoku.rs", "rank": 50, "score": 17.73337129852326 }, { "content": " pub fn elements(&self) -> Vec<Option<Element>> {\n\n use self::Group::*;\n\n match self {\n\n Box(elements) | Stack(elements) | Band(elements) => elements.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Group {\n\n fn default() -> Self {\n\n Group::Box(vec![])\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\n/// A (partial) grid of [elements](struct.Element.html).\n\npub struct Sudoku {\n\n /// The [order](trait.Puzzle.html#method.order) of this sudoku.\n\n pub order: u8,\n\n /// The [elements](struct.Element.html) composing this sudoku.\n", "file_path": "src/sudoku.rs", "rank": 51, "score": 17.256621722576202 }, { "content": "mod sol;\n\nmod sudoku;\n\n\n\n#[cfg(feature = \"ui\")]\n\nextern crate num_traits;\n\n#[cfg(feature = \"ui\")]\n\npub mod ui;\n\n\n\npub use crate::gen::Generate;\n\npub use crate::puzzle::Puzzle;\n\npub use crate::sol::{Difficulty, Error as SolveError, Score, Solve};\n\npub use crate::sudoku::{Element, Grid, Group, ParseError, Point, Sudoku};\n\n\n\npub use crate::dimensions::DIMENSIONS;\n", "file_path": "src/lib.rs", "rank": 52, "score": 17.116614572601712 }, { "content": " Point([0; DIMENSIONS])\n\n }\n\n}\n\n\n\nimpl Index<usize> for Point {\n\n type Output = u8;\n\n fn index(&self, index: usize) -> &Self::Output {\n\n &self.0[index]\n\n }\n\n}\n\n\n\nimpl IndexMut<usize> for Point {\n\n fn index_mut(&mut self, index: usize) -> &mut u8 {\n\n &mut self.0[index]\n\n }\n\n}\n\n\n\nimpl fmt::Display for Point {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n write!(f, \"(\")?;\n\n for i in 0..DIMENSIONS - 1 {\n\n write!(f, \"{}, \", self[i])?;\n\n }\n\n write!(f, \"{})\", self[DIMENSIONS - 1])\n\n }\n\n}\n\n\n\n/// Represents an *n*-dimensional grid of values, indexable via\n\n/// [`Point`](struct.Point.html).\n", "file_path": "src/sudoku.rs", "rank": 53, "score": 16.77687852322415 }, { "content": "#[macro_use]\n\nextern crate stdweb;\n\nextern crate sudoku;\n\n\n\nuse sudoku::{ui::model::Game, Difficulty, Point};\n\n\n\nuse std::{cell::RefCell, rc::Rc};\n\n\n\n/// Represents the greater context of the current view state.\n\n// Because this will contain references that are platform-specific, this lives here, not in ku::ui.\n\npub struct Context {\n\n game: Game,\n\n focused: Option<Point>,\n\n}\n\n\n\nimpl Context {\n\n /// Constructs a context with a new game of the specified order and difficulty.\n\n pub fn new(order: u8, difficulty: Difficulty) -> Self {\n\n Self {\n\n game: Game::new(order, difficulty),\n\n focused: None,\n\n }\n\n }\n\n}\n\n\n\nmod view;\n\n\n\nuse view::{play, render};\n\n\n", "file_path": "web/src/main.rs", "rank": 54, "score": 16.58735957587689 }, { "content": "#[derive(Clone, Copy, Debug)]\n\npub enum ParseError {\n\n /// Represents a grid with differing width and height.\n\n UnequalDimensions,\n\n /// Represents the presence of a value too large for the puzzle's\n\n /// dimensions.\n\n ///\n\n /// The associated values are the large value and its would-be location in\n\n /// the puzzle.\n\n LargeValue(u8, Point),\n\n /// Represents a grid with a non-perfect-square axial length.\n\n NonSquareAxis,\n\n}\n\n\n\n// TODO((#7): Higher dimensions\n\n#[cfg(feature = \"2D\")]\n\nimpl FromStr for Sudoku {\n\n type Err = ParseError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let mut rows = s\n", "file_path": "src/sudoku.rs", "rank": 55, "score": 16.493680958722827 }, { "content": " Ok(())\n\n } else {\n\n harden(&mut sudoku, target)\n\n };\n\n }\n\n }\n\n }\n\n }\n\n Err(())\n\n}\n\n\n\nimpl Generate for Sudoku {\n\n fn generate(order: u8, difficulty: Difficulty) -> Self {\n\n let mut puzzle = grid(order).unwrap();\n\n let _ = harden(&mut puzzle, difficulty);\n\n puzzle\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/gen.rs", "rank": 56, "score": 16.47939642068903 }, { "content": " let mut sum = 0;\n\n for i in 0..DIMENSIONS {\n\n sum += usize::from(self[i]) * axis.pow(i as u32);\n\n }\n\n sum\n\n }\n\n\n\n /// Decompresses a single coordinate into an *n*-dimensional point.\n\n ///\n\n /// Inverse of [`Point::fold`](#method.fold).\n\n pub fn unfold(value: usize, order: u8) -> Self {\n\n let mut total = value;\n\n let axis = (order as usize).pow(2);\n\n let mut point = [0; DIMENSIONS];\n\n for i in 0..DIMENSIONS {\n\n let j = DIMENSIONS - i - 1;\n\n let discriminant = axis.pow(j as u32);\n\n let dim = total / discriminant;\n\n point[j] = dim as u8;\n\n total %= discriminant;\n", "file_path": "src/sudoku.rs", "rank": 57, "score": 16.395263697123386 }, { "content": " }\n\n Point(point)\n\n }\n\n\n\n /// Snaps a point to the grid (returns the upper-left corner of the box).\n\n pub fn snap(self, order: u8) -> Self {\n\n let mut point = self;\n\n for i in 0..DIMENSIONS {\n\n point[i] = self[i] - self[i] % order;\n\n }\n\n point\n\n }\n\n\n\n /// Creates a point with the given x-coordinate and all other coordinates\n\n /// zero.\n\n pub fn with_x(value: u8) -> Self {\n\n let mut point = [0; DIMENSIONS];\n\n point[0] = value;\n\n Point(point)\n\n }\n", "file_path": "src/sudoku.rs", "rank": 59, "score": 16.32482173817508 }, { "content": "#[macro_use]\n\nextern crate clap;\n\nextern crate sudoku;\n\n\n\nuse std::{\n\n fs::File,\n\n io::{stdin, Error as IoError, Read},\n\n};\n\n\n\nuse sudoku::{Difficulty, Generate, ParseError, Score, Solve, SolveError, Sudoku};\n\n\n\n#[derive(Debug)]\n", "file_path": "src/main.rs", "rank": 60, "score": 16.260269312562826 }, { "content": " let order = self.order;\n\n let axis = order.pow(2);\n\n for y in 0..axis {\n\n for x in 0..axis {\n\n let element = self[Point([x, y])];\n\n match element {\n\n Some(Element(mut value)) => {\n\n if value > 9 {\n\n value -= 1;\n\n if value == 9 {\n\n value = 0;\n\n }\n\n }\n\n value.fmt(f)?;\n\n }\n\n None => {\n\n write!(f, \"_\")?;\n\n }\n\n }\n\n if x != axis - 1 {\n", "file_path": "src/sudoku.rs", "rank": 64, "score": 14.624660625027554 }, { "content": "\n\n /// Creates a point with the given y-coordinate and all other coordinates\n\n /// zero.\n\n pub fn with_y(value: u8) -> Self {\n\n let mut point = [0; DIMENSIONS];\n\n point[1] = value;\n\n Point(point)\n\n }\n\n\n\n #[cfg(feature = \"3D\")]\n\n /// Creates a point with the given z-coordinate and all other coordinates\n\n /// zero.\n\n pub fn with_z(value: u8) -> Self {\n\n let mut point = [0; DIMENSIONS];\n\n point[2] = value;\n\n Point(point)\n\n }\n\n\n\n /// The point with all coordinates identically zero.\n\n pub fn origin() -> Self {\n", "file_path": "src/sudoku.rs", "rank": 66, "score": 14.461506653662987 }, { "content": " pub elements: Vec<Option<Element>>,\n\n}\n\n\n\n/// Specifies a sudoku element's location in space.\n\n///\n\n/// The point is fully specified in `DIMENSIONS` dimensions.\n\n///\n\n/// # Coordinate System\n\n/// The coordinate system used in this library sets the origin in the top-left\n\n/// corner, with increasing x to the right and increasing y downward.\n\n///\n\n/// Additional axes (if applicable) follow the right-hand rule.\n\n#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]\n\npub struct Point(pub [u8; DIMENSIONS]);\n\nimpl Point {\n\n /// Compresses an *n*-dimensional point to a single coordinate.\n\n ///\n\n /// Inverse of [`Point::unfold`](#method.unfold).\n\n pub fn fold(self, order: u8) -> usize {\n\n let axis = (order as usize).pow(2);\n", "file_path": "src/sudoku.rs", "rank": 67, "score": 14.315017841877435 }, { "content": "}\n\n\n\nimpl Game {\n\n /// Creates a new game with a sudoku of the specified order and difficulty.\n\n pub fn new(order: u8, difficulty: Difficulty) -> Self {\n\n let problem = Sudoku::generate(order, difficulty);\n\n let current = problem.clone();\n\n let solution = problem.solution().unwrap();\n\n Self {\n\n problem,\n\n current,\n\n solution,\n\n moves: 0,\n\n }\n\n }\n\n /// Returns the points relevant to the selection (for e.g. highlighting).\n\n ///\n\n /// The order of these points is intentionally left unspecified.\n\n pub fn relevant_points(&self, point: Point) -> Vec<Point> {\n\n self.problem.group_indices(point)\n", "file_path": "src/ui/model.rs", "rank": 68, "score": 14.206418806070463 }, { "content": "\n\n/// Tools for managing the user's preferences.\n\npub mod config {\n\n use crate::Difficulty;\n\n\n\n /// Monolithic struct containing all user-configurable preferences.\n\n #[derive(Clone, Copy, Debug, Default)]\n\n pub struct Preferences {\n\n behavior: Behavior,\n\n generation: Generation,\n\n }\n\n\n\n /// Specifies in-game behavior, such as what to do when the user answers\n\n /// incorrectly.\n\n #[derive(Clone, Copy, Debug)]\n\n pub struct Behavior {\n\n /// Whether the user should be allowed to answer incorrectly.\n\n pub allow_incorrect_answers: bool,\n\n }\n\n\n", "file_path": "src/ui/model.rs", "rank": 69, "score": 13.382900907373237 }, { "content": "//! Constructs relevant to implementating game logic.\n\n\n\nuse crate::Difficulty;\n\nuse crate::Element;\n\nuse crate::Generate;\n\nuse crate::Grid;\n\nuse crate::Point;\n\nuse crate::Solve;\n\nuse crate::Sudoku;\n\n\n\n/// Represents an in-progress game.\n\n#[derive(Debug)]\n\npub struct Game {\n\n problem: Sudoku,\n\n /// The current state of the game.\n\n pub current: Sudoku,\n\n /// The solution for this game.\n\n pub solution: Sudoku,\n\n /// The number of moves performed so far.\n\n pub moves: usize,\n", "file_path": "src/ui/model.rs", "rank": 70, "score": 13.270963806161868 }, { "content": "#[cfg(feature = \"use_rand\")]\n\nuse rand::{thread_rng, Rng};\n\n#[cfg(feature = \"use_stdweb\")]\n\nuse stdweb::{__js_raw_asm, _js_impl, js, unstable::TryInto};\n\n\n\nuse crate::sol::PossibilityMap;\n\nuse crate::Difficulty;\n\nuse crate::Element;\n\nuse crate::Grid;\n\nuse crate::Score;\n\nuse crate::Sudoku;\n\n\n\n/// The maximum number of times the hardening algorithm will try to make a\n\n/// harder puzzle in a single pass.\n\nconst MAX_HARDEN_ITERATIONS: u8 = 20;\n\n\n\n/// Trait to generate a puzzle.\n\n///\n\n/// Requires that the puzzle be solvable (to ensure the desired difficulty is\n\n/// attained).\n", "file_path": "src/gen.rs", "rank": 71, "score": 12.990892087682738 }, { "content": " for point in band {\n\n points.push(point);\n\n }\n\n }\n\n points.dedup();\n\n points\n\n }\n\n\n\n /// Places the specified value (or lack thereof) at the specified index,\n\n /// modifying in-place.\n\n pub fn substitute(&mut self, index: Point, value: Option<Element>) {\n\n self.elements[index.fold(self.order)] = value;\n\n }\n\n}\n\n\n\nimpl Grid for Sudoku {\n\n fn points(&self) -> Vec<Point> {\n\n (0..(self.order as usize).pow(2 + DIMENSIONS as u32))\n\n .map(|p| Point::unfold(p, self.order))\n\n .collect()\n\n }\n\n}\n\n\n", "file_path": "src/sudoku.rs", "rank": 72, "score": 12.775554768583309 }, { "content": " if row.len() != axis {\n\n return Err(ParseError::UnequalDimensions);\n\n }\n\n for (i, elem) in row.iter().enumerate().take(axis) {\n\n if let Some(&Element(value)) = elem.as_ref() {\n\n if value > axis as u8 {\n\n return Err(ParseError::LargeValue(value, Point([i as u8, j as u8])));\n\n }\n\n }\n\n elements.push(*elem);\n\n }\n\n }\n\n Ok(Sudoku {\n\n order: order as u8,\n\n elements,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/sudoku.rs", "rank": 73, "score": 12.08389522164098 }, { "content": " }\n\n key => {\n\n if let Ok(value) = key.parse::<u8>() {\n\n let order = get_order(&Some(&context));\n\n if value > 0 && value <= order.pow(2) {\n\n let element = Element(value);\n\n if context.game.insertion_is_correct(point, element)\n\n || cfg!(feature = \"allow_incorrect\")\n\n {\n\n context.game.insert(point, element);\n\n render(Some(&context));\n\n if context.game.current == context.game.solution {\n\n let congrats = format!(\n\n \"Sudoku solved in {} moves!\",\n\n context.game.moves\n\n );\n\n js! { alert(@{congrats}); }\n\n context.game = Game::new(\n\n context.game.current.order,\n\n Difficulty::Advanced,\n", "file_path": "web/src/view.rs", "rank": 74, "score": 11.997603748895687 }, { "content": "}\n\n\n\n/// A color specified using a name.\n\n#[derive(Clone, Debug)]\n\npub struct Named(String);\n\n\n\n/// A color specified using red, green, and blue components.\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct Rgb<T>(T, T, T);\n\n\n\n/// A color specified using red, green, blue, and alpha components.\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct Rgba<T>(T, T, T, T);\n\n\n\n/// A color specified using hue, saturation, and lightness components.\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct Hsl<T>(T, T, T);\n\n\n\n/// A color specified using hue, saturation, lightness, and alpha components.\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n", "file_path": "src/ui/color.rs", "rank": 75, "score": 11.53936939382101 }, { "content": " }\n\n /// Whether the proposed change is correct (according to the stored\n\n /// solution).\n\n pub fn insertion_is_correct(&self, point: Point, value: Element) -> bool {\n\n self.solution[point] == Some(value)\n\n }\n\n /// Updates the game model to reflect the insertion.\n\n ///\n\n /// # Notes\n\n /// No validation of the insertion is made; use\n\n /// [`insertion_is_valid`](#method.insertion_is_valid) to double-check the\n\n /// change before insertion (and check whether invalid insertions\n\n /// should be allowed) before commiting.\n\n pub fn insert(&mut self, point: Point, value: Element) {\n\n self.current.substitute(point, Some(value));\n\n self.moves += 1;\n\n }\n\n /// Removes the indexed element from the puzzle, returning the old value\n\n /// (if applicable).\n\n pub fn remove(&mut self, point: Point) -> Option<Element> {\n", "file_path": "src/ui/model.rs", "rank": 76, "score": 10.449548074586026 }, { "content": " elements.sort();\n\n elements.dedup();\n\n elements.len() == len\n\n }\n\n /// Whether a group is complete.\n\n ///\n\n /// A group is considered complete if it contains every possible element\n\n /// value exactly once.\n\n pub fn is_complete(&self) -> bool {\n\n let elements = self.elements();\n\n let len = elements.len();\n\n let mut elements = elements\n\n .into_iter()\n\n .filter(Option::is_some)\n\n .collect::<Vec<_>>();\n\n elements.sort();\n\n elements.dedup();\n\n elements.len() == len\n\n }\n\n /// Returns an owned copy of the group's constituent elements.\n", "file_path": "src/sudoku.rs", "rank": 78, "score": 10.12420243164383 }, { "content": " .split('\\n')\n\n .map(|row| {\n\n row.split(' ')\n\n .map(|cell| cell.parse().ok().map(Element))\n\n .collect::<Vec<_>>()\n\n })\n\n .collect::<Vec<_>>();\n\n let order = (rows.len() as f64).sqrt() as usize;\n\n if rows.len() == order * order + 1 {\n\n let last = rows.pop().unwrap();\n\n if last.len() != 1 || last[0] != None {\n\n return Err(ParseError::NonSquareAxis);\n\n }\n\n }\n\n let axis = rows.len();\n\n if order * order != axis {\n\n return Err(ParseError::NonSquareAxis);\n\n }\n\n let mut elements = Vec::with_capacity(axis.pow(2));\n\n for (j, row) in rows.iter().enumerate().take(axis) {\n", "file_path": "src/sudoku.rs", "rank": 79, "score": 9.746073388922426 }, { "content": " self.moves += 1;\n\n let value = self.current[point];\n\n self.current.substitute(point, None);\n\n value\n\n }\n\n /// Returns all points associated with this game.\n\n pub fn points(&self) -> Vec<Point> {\n\n self.current.points()\n\n }\n\n /// Returns whether the value at a given point was inserted by the user\n\n /// (and is therefore mutable).\n\n ///\n\n /// In the case that there is no value at the given index, this method\n\n /// returns `true`. Thus, this method can be considered to return\n\n /// whether the original generated puzzle contained a supplied value at\n\n /// the given point.\n\n pub fn is_mutable(&self, point: Point) -> bool {\n\n self.problem[point].is_none()\n\n }\n\n}\n", "file_path": "src/ui/model.rs", "rank": 80, "score": 9.617963527195839 }, { "content": " /// ### Rule\n\n /// Each band may contain each element value only once.\n\n ///\n\n /// ### Dimensionality\n\n /// In *n* dimensions, `n - 1` bands apply to each element.\n\n /// Each is linearly independent from the others and from the relevant\n\n /// stack.\n\n Band(Vec<Option<Element>>),\n\n}\n\n\n\nimpl Group {\n\n /// Whether a group is valid (contains no errors).\n\n ///\n\n /// A group is considered valid if it contains only unique elements\n\n /// (ignoring empty elements).\n\n pub fn is_valid(&self) -> bool {\n\n let elements = self.elements();\n\n let elements = elements.iter().filter(|e| e.is_some()).collect::<Vec<_>>();\n\n let len = elements.len();\n\n let mut elements = elements.into_iter().collect::<Vec<_>>();\n", "file_path": "src/sudoku.rs", "rank": 81, "score": 9.246599143581065 }, { "content": "//! A sudoku crate.\n\n\n\n#![warn(\n\n missing_copy_implementations,\n\n missing_debug_implementations,\n\n trivial_casts,\n\n trivial_numeric_casts,\n\n unused_extern_crates,\n\n unused_import_braces,\n\n unused_qualifications,\n\n unused_results\n\n)]\n\n#![deny(missing_docs)]\n\n\n\n#[cfg(all(feature = \"use_stdweb\", feature = \"use_rand\"))]\n\ncompile_error!(\"use_stdweb and use_rand are mutually exclusive.\");\n\n\n\nmod dimensions;\n\nmod gen;\n\nmod puzzle;\n", "file_path": "src/lib.rs", "rank": 83, "score": 8.395905645839912 }, { "content": " && Some(Element(value)) != focused_value\n\n {\n\n TEXT\n\n } else {\n\n &colors[(value - 1) as usize]\n\n };\n\n ctx.set_fill_style_color(color);\n\n ctx.fill_text(\n\n &format!(\"{}\", value),\n\n left + spacing * (x as f64 + 0.5),\n\n top + spacing * (y as f64 + 0.5),\n\n None,\n\n );\n\n }\n\n }\n\n }\n\n}\n", "file_path": "web/src/view.rs", "rank": 84, "score": 8.176420567622612 }, { "content": " }\n\n }\n\n\n\n /// Returns whether the puzzle is completely full of values.\n\n pub fn is_complete(&self) -> bool {\n\n for point in self.points() {\n\n if self[point].is_none() {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n\n\n /// Returns the relevant groups for checking a given element in the grid.\n\n ///\n\n /// The number of groups is always equal to the number of dimensions plus\n\n /// one.\n\n // This allow is here for higher dimensions\n\n #[allow(clippy::reverse_range_loop)]\n\n pub fn groups(&self, pos: Point) -> [Group; DIMENSIONS + 1] {\n", "file_path": "src/sudoku.rs", "rank": 85, "score": 7.859264541164377 }, { "content": "}\n\n\n\nimpl<T: NumCast + Bounded + FromPrimitive + Default> From<Hsl<T>> for Hsla<T> {\n\n fn from(other: Hsl<T>) -> Self {\n\n Hsla::<T>(other.0, other.1, other.2, T::max_value())\n\n }\n\n}\n\n\n\nimpl<T: NumCast + Bounded + FromPrimitive + Default> From<Rgb<T>> for Hsla<T> {\n\n fn from(other: Rgb<T>) -> Self {\n\n let hsl: Hsl<T> = other.into();\n\n Hsla::<T>(hsl.0, hsl.1, hsl.2, T::max_value())\n\n }\n\n}\n\n\n\nimpl<T: NumCast + Bounded + FromPrimitive + Default> From<Hsl<T>> for Rgba<T> {\n\n fn from(other: Hsl<T>) -> Self {\n\n let rgb: Rgb<T> = other.into();\n\n Rgba::<T>(rgb.0, rgb.1, rgb.2, T::max_value())\n\n }\n\n}\n\n\n", "file_path": "src/ui/color.rs", "rank": 86, "score": 7.7584882849235886 }, { "content": "use stdweb::{\n\n traits::{IEvent, IKeyboardEvent, IMouseEvent},\n\n unstable::TryInto,\n\n web::{\n\n document,\n\n event::{ClickEvent, KeyDownEvent, ResizeEvent},\n\n html_element::*,\n\n window, CanvasRenderingContext2d, IEventTarget, INonElementParentNode, TextAlign,\n\n TextBaseline,\n\n },\n\n};\n\n\n\nuse Context;\n\n\n\nuse sudoku::{ui::model::Game, Difficulty, Element, Point};\n\n\n\nuse std::{cell::RefCell, rc::Rc};\n\n\n\n#[cfg(not(feature = \"light_ui\"))]\n\nconst TEXT: &'static str = \"#fff\";\n", "file_path": "web/src/view.rs", "rank": 87, "score": 7.590189949159882 }, { "content": "impl<T: NumCast + Bounded + FromPrimitive + Default> From<Hsla<T>> for Rgba<T> {\n\n fn from(other: Hsla<T>) -> Self {\n\n let hsl = Hsl::<T>(other.0, other.1, other.2);\n\n let rgb: Rgb<T> = hsl.into();\n\n Rgba::<T>(rgb.0, rgb.1, rgb.2, other.3)\n\n }\n\n}\n\n\n\nimpl<T: NumCast + Bounded + FromPrimitive + Default> From<Rgba<T>> for Hsla<T> {\n\n fn from(other: Rgba<T>) -> Self {\n\n let rgb = Rgb::<T>(other.0, other.1, other.2);\n\n let hsl: Hsl<T> = rgb.into();\n\n Hsla::<T>(hsl.0, hsl.1, hsl.2, other.3)\n\n }\n\n}\n\n\n\nimpl<T: NumCast + Bounded + FromPrimitive + Default> From<Rgb<T>> for Rgba<T> {\n\n fn from(other: Rgb<T>) -> Self {\n\n Rgba::<T>(other.0, other.1, other.2, T::max_value())\n\n }\n", "file_path": "src/ui/color.rs", "rank": 90, "score": 6.456984647707209 }, { "content": " })\n\n .map(Group::Band)\n\n .collect::<Vec<_>>();\n\n let mut g = bands;\n\n g.insert(0, s);\n\n g.insert(0, b);\n\n // Here be dragons (not really, but update this when 1.27 gets stabilized)\n\n clone_into_array(&g[..=DIMENSIONS])\n\n }\n\n\n\n /// Returns the relevant group indices.\n\n // This allow is here for higher dimensions\n\n #[allow(clippy::reverse_range_loop)]\n\n pub fn group_indices(&self, pos: Point) -> Vec<Point> {\n\n for i in 0..DIMENSIONS {\n\n assert!(pos[i] < self.order.pow(2));\n\n }\n\n let top_left = pos.snap(self.order);\n\n let order = i32::from(self.order);\n\n let points = self.points();\n", "file_path": "src/sudoku.rs", "rank": 91, "score": 6.409484535773483 }, { "content": " assert_eq!(groups[1].elements().len(), 9);\n\n assert_eq!(groups[2].elements().len(), 9);\n\n }\n\n\n\n #[test]\n\n fn test_sudoku_groups_length_4_2d() {\n\n let sudoku = Sudoku::new(4);\n\n let groups = sudoku.groups(Point::origin());\n\n assert_eq!(groups[0].elements().len(), 4_usize.pow(DIMENSIONS as u32));\n\n assert_eq!(groups[1].elements().len(), 16);\n\n assert_eq!(groups[2].elements().len(), 16);\n\n }\n\n\n\n #[test]\n\n fn test_sudoku_new() {\n\n for order in 2..10usize {\n\n let sudoku = Sudoku::new(order as u8);\n\n assert_eq!(sudoku.elements.capacity(), order.pow(2 + DIMENSIONS as u32));\n\n }\n\n }\n", "file_path": "src/sudoku.rs", "rank": 92, "score": 6.40205063455827 }, { "content": " }\n\n \"Escape\" => {\n\n context.focused = None;\n\n render(Some(&context));\n\n }\n\n \"ArrowUp\" | \"w\" | \"k\" if cfg!(feature = \"vim_movement\") => {\n\n if point[1] > 0 {\n\n let mut new = point;\n\n new[1] -= 1;\n\n context.focused = Some(new);\n\n render(Some(&context));\n\n }\n\n event.prevent_default();\n\n }\n\n \"ArrowDown\" | \"s\" | \"j\" if cfg!(feature = \"vim_movement\") => {\n\n if point[1] < context.game.current.order.pow(2) - 1 {\n\n let mut new = point;\n\n new[1] += 1;\n\n context.focused = Some(new);\n\n render(Some(&context));\n", "file_path": "web/src/view.rs", "rank": 93, "score": 6.074289953605705 }, { "content": "pub const DIMENSIONS: usize = 2;\n\n/// The number of dimensions in which all sudoku methods will operate.\n\n///\n\n/// # Notes\n\n/// Some features are missing for higher-dimension sudokus.\n\n#[cfg(feature = \"3D\")]\n\npub const DIMENSIONS: usize = 3;\n\n/// The number of dimensions in which all sudoku methods will operate.\n\n///\n\n/// # Notes\n\n/// Some features are missing for higher-dimension sudokus.\n\n#[cfg(feature = \"4D\")]\n\npub const DIMENSIONS: usize = 4;\n\n/// The number of dimensions in which all sudoku methods will operate.\n\n///\n\n/// # Notes\n\n/// Some features are missing for higher-dimension sudokus.\n\n#[cfg(feature = \"5D\")]\n\npub const DIMENSIONS: usize = 5;\n\n/// The number of dimensions in which all sudoku methods will operate.\n", "file_path": "src/dimensions.rs", "rank": 94, "score": 6.017774654873064 }, { "content": "/// Some features are missing for higher-dimension sudokus.\n\n#[cfg(feature = \"9D\")]\n\npub const DIMENSIONS: usize = 9;\n\n/// The number of dimensions in which all sudoku methods will operate.\n\n///\n\n/// # Notes\n\n/// Some features are missing for higher-dimension sudokus.\n\n#[cfg(feature = \"10D\")]\n\npub const DIMENSIONS: usize = 10;\n\n/// The number of dimensions in which all sudoku methods will operate.\n\n///\n\n/// # Notes\n\n/// Some features are missing for higher-dimension sudokus.\n\n#[cfg(feature = \"11D\")]\n\npub const DIMENSIONS: usize = 11;\n\n/// The number of dimensions in which all sudoku methods will operate.\n\n///\n\n/// # Notes\n\n/// Some features are missing for higher-dimension sudokus.\n\n#[cfg(feature = \"12D\")]\n\npub const DIMENSIONS: usize = 12;\n", "file_path": "src/dimensions.rs", "rank": 95, "score": 5.957371049294807 }, { "content": "//! Provides tools to implement a UI presenting a puzzle for the user to solve,\n\n//! along with some other \"nice\" features.\n\n\n\npub mod color;\n\npub mod model;\n", "file_path": "src/ui/mod.rs", "rank": 96, "score": 5.935750535701731 }, { "content": " T::from_f64(T::max_value().to_f64().unwrap_or(1.0) * l).unwrap_or_default(),\n\n )\n\n }\n\n}\n\n\n\nimpl<T: NumCast + Bounded + FromPrimitive + Default> From<Hsl<T>> for Rgb<T> {\n\n fn from(other: Hsl<T>) -> Self {\n\n let triple = (\n\n other.0.to_f64().unwrap_or_default(),\n\n other.1.to_f64().unwrap_or_default(),\n\n other.2.to_f64().unwrap_or_default(),\n\n );\n\n let max = T::max_value().to_f64().unwrap_or(1.0);\n\n let (h, s, l) = (triple.0 / max, triple.1 / max, triple.2 / max);\n\n let c = (1.0 - (2.0 * l - 1.0).abs()) * s;\n\n let x = c * (1.0 - (((h * 6.0) % 2.0) - 1.0).abs());\n\n let m = l - c / 2.0;\n\n let f_h = h * 6.0;\n\n let (r, g, b) = if f_h >= 1.0 && f_h < 2.0 {\n\n (x, c, 0.0)\n", "file_path": "src/ui/color.rs", "rank": 97, "score": 5.872859562475497 }, { "content": " components.0 as f64,\n\n components.1 as f64,\n\n components.2 as f64,\n\n );\n\n Rgb::<T>(\n\n T::from_f64(T::max_value().to_f64().unwrap_or(1.0) * normalized.0)\n\n .unwrap_or_default(),\n\n T::from_f64(T::max_value().to_f64().unwrap_or(1.0) * normalized.1)\n\n .unwrap_or_default(),\n\n T::from_f64(T::max_value().to_f64().unwrap_or(1.0) * normalized.2)\n\n .unwrap_or_default(),\n\n )\n\n })\n\n }\n\n}\n\n\n\nimpl<T: NumCast + Bounded + FromPrimitive + Default> Name for Hsl<T> {\n\n fn with_name(name: &str) -> Option<Self> {\n\n Rgb::<T>::with_name(name).map(|c| c.into())\n\n }\n", "file_path": "src/ui/color.rs", "rank": 98, "score": 5.755815896487401 }, { "content": "//! Utilities for working with colors and color palettes.\n\n\n\nuse num_traits::{Bounded, FromPrimitive, NumCast};\n\nuse std::fmt;\n\n\n\n/// A construct that can be treated as encoding a solid color.\n", "file_path": "src/ui/color.rs", "rank": 99, "score": 5.677685145013741 } ]
Rust
flow-rs/src/channel/storage.rs
ysh329/MegFlow
778a4361a88af43f499528f4509f6523515ce0ee
/** * \file flow-rs/src/channel/storage.rs * MegFlow is Licensed under the Apache License, Version 2.0 (the "License") * * Copyright (c) 2019-2021 Megvii Inc. All rights reserved. * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ use super::{inner, ChannelBase, Receiver, Sender, SenderRecord}; use crate::envelope::SealedEnvelope; use crate::rt::sync::Mutex; use std::collections::HashMap; use std::process; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; #[derive(Clone)] pub struct ChannelStorage { storage: Arc<inner::Channel<SealedEnvelope>>, receiver_epoch: Arc<AtomicUsize>, sender_epoch: Arc<AtomicUsize>, sender_record: SenderRecord, rx_counter: Arc<AtomicUsize>, tx_counter: Arc<AtomicUsize>, } impl ChannelBase for ChannelStorage { fn is_closed(&self) -> bool { self.storage.queue.is_closed() } fn is_none(&self) -> bool { false } } impl ChannelStorage { pub fn bound(cap: usize) -> ChannelStorage { ChannelStorage { storage: inner::bounded(cap), receiver_epoch: Arc::new(AtomicUsize::new(0)), sender_epoch: Arc::new(AtomicUsize::new(0)), sender_record: Arc::new(Mutex::new(HashMap::new())), rx_counter: Arc::new(AtomicUsize::new(0)), tx_counter: Arc::new(AtomicUsize::new(0)), } } pub fn unbound() -> ChannelStorage { ChannelStorage { storage: inner::unbounded(), receiver_epoch: Arc::new(AtomicUsize::new(0)), sender_epoch: Arc::new(AtomicUsize::new(0)), sender_record: Arc::new(Mutex::new(HashMap::new())), rx_counter: Arc::new(AtomicUsize::new(0)), tx_counter: Arc::new(AtomicUsize::new(0)), } } pub fn sender(&self) -> Sender { let count = self.storage.sender_count.fetch_add(1, Ordering::Relaxed); if count > usize::MAX / 2 { process::abort(); } Sender::new( inner::Sender { channel: self.storage.clone(), }, self.sender_epoch.clone(), self.sender_record.clone(), self.tx_counter.clone(), ) } pub fn receiver(&self) -> Receiver { let count = self.storage.receiver_count.fetch_add(1, Ordering::Relaxed); if count > usize::MAX / 2 { process::abort(); } Receiver::new( inner::Receiver { channel: self.storage.clone(), listener: None, }, self.receiver_epoch.clone(), self.rx_counter.clone(), ) } pub fn len(&self) -> usize { self.storage.queue.len() } pub fn is_almost_full(&self) -> bool { match self.storage.queue.capacity() { Some(capacity) => (0.9 * capacity as f64) <= self.len() as f64, None => false, } } pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn close(&self) { self.storage.close(); } pub fn sender_count(&self) -> usize { self.storage.sender_count.load(Ordering::Relaxed) } pub fn receiver_count(&self) -> usize { self.storage.receiver_count.load(Ordering::Relaxed) } pub fn swap_tx_counter(&self) -> usize { self.tx_counter.swap(0, Ordering::Relaxed) } pub fn swap_rx_counter(&self) -> usize { self.rx_counter.swap(0, Ordering::Relaxed) } }
/** * \file flow-rs/src/channel/storage.rs * MegFlow is Licensed under the Apache License, Version 2.0 (the "License") * * Copyright (c) 2019-2021 Megvii Inc. All rights reserved. * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ use super::{inner, ChannelBase, Receiver, Sender, SenderRecord}; use crate::envelope::SealedEnvelope; use crate::rt::sync::Mutex; use std::collections::HashMap; use std::process; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; #[derive(Clone)] pub struct ChannelStorage { storage: Arc<inner::Channel<SealedEnvelope>>, receiver_epoch: Arc<AtomicUsize>, sender_epoch: Arc<AtomicUsize>, sender_record: SenderRecord, rx_counter: Arc<AtomicUsize>, tx_counter: Arc<AtomicUsize>, } impl ChannelBase for ChannelStorage { fn is_closed(&self) -> bool { self.storage.
> ChannelStorage { ChannelStorage { storage: inner::bounded(cap), receiver_epoch: Arc::new(AtomicUsize::new(0)), sender_epoch: Arc::new(AtomicUsize::new(0)), sender_record: Arc::new(Mutex::new(HashMap::new())), rx_counter: Arc::new(AtomicUsize::new(0)), tx_counter: Arc::new(AtomicUsize::new(0)), } } pub fn unbound() -> ChannelStorage { ChannelStorage { storage: inner::unbounded(), receiver_epoch: Arc::new(AtomicUsize::new(0)), sender_epoch: Arc::new(AtomicUsize::new(0)), sender_record: Arc::new(Mutex::new(HashMap::new())), rx_counter: Arc::new(AtomicUsize::new(0)), tx_counter: Arc::new(AtomicUsize::new(0)), } } pub fn sender(&self) -> Sender { let count = self.storage.sender_count.fetch_add(1, Ordering::Relaxed); if count > usize::MAX / 2 { process::abort(); } Sender::new( inner::Sender { channel: self.storage.clone(), }, self.sender_epoch.clone(), self.sender_record.clone(), self.tx_counter.clone(), ) } pub fn receiver(&self) -> Receiver { let count = self.storage.receiver_count.fetch_add(1, Ordering::Relaxed); if count > usize::MAX / 2 { process::abort(); } Receiver::new( inner::Receiver { channel: self.storage.clone(), listener: None, }, self.receiver_epoch.clone(), self.rx_counter.clone(), ) } pub fn len(&self) -> usize { self.storage.queue.len() } pub fn is_almost_full(&self) -> bool { match self.storage.queue.capacity() { Some(capacity) => (0.9 * capacity as f64) <= self.len() as f64, None => false, } } pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn close(&self) { self.storage.close(); } pub fn sender_count(&self) -> usize { self.storage.sender_count.load(Ordering::Relaxed) } pub fn receiver_count(&self) -> usize { self.storage.receiver_count.load(Ordering::Relaxed) } pub fn swap_tx_counter(&self) -> usize { self.tx_counter.swap(0, Ordering::Relaxed) } pub fn swap_rx_counter(&self) -> usize { self.rx_counter.swap(0, Ordering::Relaxed) } }
queue.is_closed() } fn is_none(&self) -> bool { false } } impl ChannelStorage { pub fn bound(cap: usize) -
random
[ { "content": "pub fn ident(lit: impl AsRef<str>) -> Ident {\n\n Ident::new(lit.as_ref(), Span::call_site())\n\n}\n", "file_path": "flow-derive/src/lit.rs", "rank": 0, "score": 153767.02012632263 }, { "content": "pub fn match_last_ty(ty: &syn::Type, wrapper: &str) -> bool {\n\n match ty {\n\n syn::Type::Path(ref p) => p\n\n .path\n\n .segments\n\n .last()\n\n .map(|seg| seg.ident == wrapper)\n\n .unwrap_or(false),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "flow-derive/src/utils.rs", "rank": 1, "score": 142611.0890040362 }, { "content": "#[doc(hidden)]\n\npub fn export() {\n\n pyo3::prepare_freethreaded_python();\n\n pretty_env_logger::init();\n\n}\n", "file_path": "flow-plugins/src/lib.rs", "rank": 2, "score": 133578.0489385558 }, { "content": "pub fn extract_ports(\n\n data: &syn::Data,\n\n ty_name: &str,\n\n f: fn(ExtractParams) -> TokenStream,\n\n) -> Vec<TokenStream> {\n\n let port_func = lit::ident(ty_name.to_lowercase());\n\n fields(data)\n\n .filter(|field| {\n\n match_last_ty(&field.ty, ty_name)\n\n || (match_last_ty(&field.ty, \"DynPorts\")\n\n && match_last_ty(last_inner_ty(&field.ty).unwrap(), ty_name))\n\n || (match_last_ty(&field.ty, \"Vec\")\n\n && match_last_ty(last_inner_ty(&field.ty).unwrap(), ty_name))\n\n })\n\n .map(|field| (&port_func, &field.ident, &field.ty))\n\n .map(f)\n\n .collect()\n\n}\n", "file_path": "flow-derive/src/utils.rs", "rank": 3, "score": 131297.47858299836 }, { "content": "pub fn load_shared(\n\n cfg: &crate::config::interlayer::Node,\n\n ctx: Context,\n\n resources: ResourceCollection,\n\n) -> Result<SharedProxy> {\n\n let local_key = ctx.local_key;\n\n let (s, r) = unbounded();\n\n let shared = Shared::new(ctx.local_key, r, cfg)?.boxed();\n\n let handle = shared.start(ctx, resources);\n\n let (s2, r2) = oneshot::channel();\n\n s2.send(handle).ok();\n\n SharedStopNotify::registry_local()\n\n .get(local_key)\n\n .insert(cfg.entity.name.clone(), SharedStopNotify(r2));\n\n SharedProxy::new(local_key, s, cfg)\n\n}\n", "file_path": "flow-rs/src/node/shared.rs", "rank": 4, "score": 129150.00360415498 }, { "content": "/// A convenient function to load single graph\n\npub fn load_single_graph(\n\n option: Option<loader::LoaderConfig>,\n\n config: &str,\n\n) -> Result<graph::Graph> {\n\n let config: config::presentation::Graph = toml::from_str(config)?;\n\n let config = config::presentation::Config {\n\n main: config.name.clone(),\n\n graphs: vec![config],\n\n nodes: vec![],\n\n resources: vec![],\n\n };\n\n load_impl(option, config)\n\n}\n\n\n\nuse std::sync::atomic::{AtomicU64, Ordering};\n\n\n\nlazy_static::lazy_static!(\n\n static ref LOCAL_KEY: AtomicU64 = AtomicU64::new(0);\n\n);\n\n\n", "file_path": "flow-rs/src/lib.rs", "rank": 5, "score": 129150.00360415498 }, { "content": "pub fn translate_graph(\n\n local_key: u64,\n\n p: presentation::Graph,\n\n shared_nodes: &mut HashMap<String, interlayer::Node>,\n\n) -> Result<interlayer::Graph> {\n\n let mut nodes = HashMap::new();\n\n let mut resources = HashMap::new();\n\n let mut inputs = vec![];\n\n let mut outputs = vec![];\n\n let mut connections = HashMap::new();\n\n\n\n for node in p.nodes {\n\n nodes.insert(node.entity.name.clone(), translate_node(local_key, node)?);\n\n }\n\n for res in p.resources {\n\n resources.insert(res.name.clone(), res);\n\n }\n\n\n\n for (i, conn) in p.connections.into_iter().enumerate() {\n\n let name = format!(\"__{}__\", i);\n", "file_path": "flow-rs/src/config/mod.rs", "rank": 6, "score": 129150.00360415498 }, { "content": "pub fn add_field(item: &mut ItemStruct, field: TokenStream) -> Result<()> {\n\n match item.fields {\n\n Fields::Named(ref mut fields) => fields.named.push(Field::parse_named.parse2(field)?),\n\n Fields::Unnamed(ref mut fields) => fields.unnamed.push(Field::parse_unnamed.parse2(field)?),\n\n _ => unreachable!(),\n\n }\n\n Ok(())\n\n}\n\n\n\npub type ExtractParams<'a> = (&'a Ident, &'a Option<Ident>, &'a Type);\n\n\n", "file_path": "flow-derive/src/utils.rs", "rank": 7, "score": 122852.73002406425 }, { "content": "pub fn hash(input: &TokenStream) -> u64 {\n\n let mut hasher = hash_map::DefaultHasher::new();\n\n hasher.write(input.to_string().as_bytes());\n\n hasher.finish()\n\n}\n\n\n", "file_path": "flow-derive/src/utils.rs", "rank": 8, "score": 110101.37231550248 }, { "content": "pub fn dump(config: &Config) -> Result<()> {\n\n if let Ok(path) = std::env::var(\"MEGFLOW_DUMP\") {\n\n let dot = dump_dot(config);\n\n let path = PathBuf::from(path);\n\n let mut f = File::create(&path)?;\n\n write!(f, \"{}\", dot)?;\n\n let output = Command::new(\"dot\").arg(\"-Tpng\").arg(&path).output()?;\n\n std::fs::write(&path, output.stdout)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "flow-rs/src/config/graphviz.rs", "rank": 9, "score": 110101.37231550248 }, { "content": "fn load_impl(\n\n option: Option<loader::LoaderConfig>,\n\n config: config::presentation::Config,\n\n) -> Result<graph::Graph> {\n\n // init graph local resources\n\n let local_key = LOCAL_KEY.fetch_add(1, Ordering::Relaxed);\n\n registry::initialize(local_key);\n\n // load plugins\n\n if let Some(option) = option {\n\n loader::load(local_key, &option).unwrap();\n\n }\n\n // register global resources\n\n for res in &config.resources {\n\n let mut global_res = resource::GLOBAL_RESOURCES.write().unwrap();\n\n global_res.get_mut(&local_key).unwrap().insert(\n\n res.name.clone(),\n\n res.ty.as_str(),\n\n &res.args,\n\n );\n\n }\n", "file_path": "flow-rs/src/lib.rs", "rank": 10, "score": 108874.48434653824 }, { "content": "pub fn expand(input: DeriveInput) -> TokenStream {\n\n let ident = input.ident;\n\n type IterArgs<'a> = ExtractParams<'a>;\n\n fn set_f((port_func, ident, ty): IterArgs) -> TokenStream {\n\n if match_last_ty(ty, \"Vec\") {\n\n quote_spanned! {ident.span()=>\n\n if port_name == concat!('[', stringify!(#ident), ']') {\n\n if tag.is_none() {\n\n if let Some(p) = self.#ident.iter_mut().find(|p| p.is_none()) {\n\n *p = channel.#port_func();\n\n } else {\n\n self.#ident.push(channel.#port_func());\n\n }\n\n } else {\n\n let i = tag.unwrap() as usize;\n\n match self.#ident.len().cmp(&i) {\n\n std::cmp::Ordering::Greater => {\n\n debug_assert!(self.#ident[i].is_none());\n\n self.#ident[i] = channel.#port_func();\n\n }\n", "file_path": "flow-derive/src/node.rs", "rank": 11, "score": 108290.08447256399 }, { "content": "pub fn expand(input: DeriveInput) -> TokenStream {\n\n let ident = input.ident;\n\n let is_local = attr(&input.attrs, \"local\").is_some();\n\n let spawn_func = if is_local {\n\n lit::ident(\"spawn_local\")\n\n } else {\n\n lit::ident(\"spawn\")\n\n };\n\n\n\n fn send_empty_f((_, ident, ty): ExtractParams) -> TokenStream {\n\n if match_last_ty(ty, \"Vec\") {\n\n quote_spanned! {ident.span()=>\n\n for chan in &self.#ident {\n\n chan.send_any(flow_rs::envelope::DummyEnvelope{}.seal()).await.ok();\n\n }\n\n }\n\n } else if match_last_ty(ty, \"DynPorts\") {\n\n quote_spanned! (ident.span()=>\n\n for chan in self.#ident.cache().values() {\n\n chan.send_any(flow_rs::envelope::DummyEnvelope{}.seal()).await.ok();\n", "file_path": "flow-derive/src/actor.rs", "rank": 12, "score": 108290.08447256399 }, { "content": "#[doc(hidden)]\n\n#[proc_macro]\n\npub fn submit(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as internal::CollectionSlice<syn::Expr>);\n\n internal::submit_expand(input).into()\n\n}\n\n\n", "file_path": "flow-derive/src/lib.rs", "rank": 13, "score": 108290.08447256399 }, { "content": "#[doc(hidden)]\n\n#[proc_macro]\n\npub fn feature(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as internal::FeatureDeclare);\n\n internal::feature_expand(input).into()\n\n}\n\n\n", "file_path": "flow-derive/src/lib.rs", "rank": 14, "score": 108290.08447256399 }, { "content": "pub fn registry_expand(input: ResourceDefine) -> TokenStream {\n\n let name = &input.name;\n\n let ty = &input.ty;\n\n quote! {\n\n flow_rs::submit!(#name.to_owned(),\n\n flow_rs::node::ResourceSlice{\n\n cons: Box::new(|name: String, args: &toml::value::Table| Box::new(<#ty>::new(name, args))),\n\n }\n\n );\n\n }\n\n}\n", "file_path": "flow-derive/src/resource.rs", "rank": 15, "score": 106573.45569950955 }, { "content": "pub fn registry_expand(input: NodeDefine) -> TokenStream {\n\n let name = &input.name;\n\n let ty = &input.ty;\n\n quote! {\n\n flow_rs::submit!(#name.to_owned(),\n\n flow_rs::node::NodeSlice{\n\n cons: Box::new(|name: String, args: &toml::value::Table| Box::new(<#ty>::new(name, args))),\n\n info: flow_rs::node::NodeInfo {\n\n inputs: <#ty>::inputs_name(),\n\n outputs: <#ty>::outputs_name(),\n\n }\n\n }\n\n );\n\n }\n\n}\n\n\n", "file_path": "flow-derive/src/node.rs", "rank": 16, "score": 106573.45569950955 }, { "content": "#[proc_macro_derive(Node)]\n\npub fn node_derive(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n node::expand(input).into()\n\n}\n\n\n\n/// attributes(local): spawn actor by `flow_rs::rt::task::spawn_local`\n", "file_path": "flow-derive/src/lib.rs", "rank": 17, "score": 106573.45569950955 }, { "content": "#[proc_macro]\n\npub fn node_register(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as node::NodeDefine);\n\n node::registry_expand(input).into()\n\n}\n\n\n\n/// A proc macro used to register a resource. `resource_register!(\"ResourceType\", ResourceType)`\n", "file_path": "flow-derive/src/lib.rs", "rank": 18, "score": 106573.45569950955 }, { "content": "#[proc_macro]\n\npub fn resource_register(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as resource::ResourceDefine);\n\n resource::registry_expand(input).into()\n\n}\n\n\n", "file_path": "flow-derive/src/lib.rs", "rank": 19, "score": 106573.45569950955 }, { "content": "#[proc_macro_derive(Actor, attributes(local))]\n\npub fn actor_derive(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n actor::expand(input).into()\n\n}\n\n\n\n/// A proc macro used to register a node. `node_register!(\"NodeType\", NodeType)`\n", "file_path": "flow-derive/src/lib.rs", "rank": 20, "score": 106573.45569950955 }, { "content": "pub fn unbounded<T>() -> Arc<Channel<T>> {\n\n Arc::new(Channel {\n\n queue: ConcurrentQueue::unbounded(),\n\n send_ops: Event::new(),\n\n recv_ops: Event::new(),\n\n stream_ops: Event::new(),\n\n sender_count: AtomicUsize::new(0),\n\n receiver_count: AtomicUsize::new(0),\n\n })\n\n}\n\n\n\n/// The sending side of a channel.\n\n///\n\n/// Senders can be cloned and shared among threads. When all senders associated with a channel are\n\n/// dropped, the channel becomes closed.\n\n///\n\n/// The channel can also be closed manually by calling [`Sender::close()`].\n\npub struct Sender<T> {\n\n /// Inner channel state.\n\n pub(super) channel: Arc<Channel<T>>,\n", "file_path": "flow-rs/src/channel/inner.rs", "rank": 21, "score": 104870.87612104387 }, { "content": "pub fn utils_register(module: &PyModule) -> PyResult<()> {\n\n module.add_function(wrap_pyfunction!(yield_now, module)?)?;\n\n module.add_function(wrap_pyfunction!(sleep, module)?)?;\n\n module.add_function(wrap_pyfunction!(join, module)?)?;\n\n module.add_function(wrap_pyfunction!(create_future, module)?)?;\n\n module.add_class::<PyFuture>()?;\n\n module.add_class::<PyWaker>()?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[flow_rs::rt::test]\n\n async fn test_future() -> PyResult<()> {\n\n pyo3::prepare_freethreaded_python();\n\n let (mut fut, mut waker) = create_future(None);\n\n Python::with_gil(|py| -> PyResult<_> {\n\n waker.wake(py, 1usize.into_py(py))?;\n\n let ret: usize = fut.wait(py).extract(py)?;\n\n assert_eq!(ret, 1);\n\n Ok(())\n\n })?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "flow-rs/src/loader/python/utils.rs", "rank": 22, "score": 103395.96784231544 }, { "content": "pub fn envelope_register(module: &PyModule) -> PyResult<()> {\n\n module.add_class::<PyEnvelope>()?;\n\n Ok(())\n\n}\n", "file_path": "flow-rs/src/loader/python/envelope.rs", "rank": 23, "score": 103395.96784231544 }, { "content": "pub fn reject_cause<T>(err: T) -> Rejection\n\nwhere\n\n T: 'static + Debug + Send + Sync,\n\n{\n\n rweb::reject::custom(RejectCause::from(err))\n\n}\n", "file_path": "flow-plugins/src/utils/error.rs", "rank": 24, "score": 103154.24734798944 }, { "content": "pub fn in_expand(ports: &PortSequence) -> Vec<TokenStream> {\n\n expand(ports, type_name::IN_T)\n\n}\n\n\n", "file_path": "flow-derive/src/ports.rs", "rank": 25, "score": 103154.24734798944 }, { "content": "pub fn out_expand(ports: &PortSequence) -> Vec<TokenStream> {\n\n expand(ports, type_name::OUT_T)\n\n}\n\n\n", "file_path": "flow-derive/src/ports.rs", "rank": 26, "score": 103154.24734798944 }, { "content": "pub fn name_expand(ports: &PortSequence) -> Vec<LitStr> {\n\n ports\n\n .fields\n\n .iter()\n\n .map(|port| match port.ty {\n\n PortType::Dyn => string(format!(\"dyn@{}\", port.name)),\n\n PortType::List => string(format!(\"[{}]\", port.name)),\n\n PortType::Unit => string(&port.name),\n\n })\n\n .collect()\n\n}\n", "file_path": "flow-derive/src/ports.rs", "rank": 27, "score": 101525.04618909993 }, { "content": "pub fn feature_expand(mut input: FeatureDeclare) -> TokenStream {\n\n let name = &input.name;\n\n let args_type = lit::ident(format!(\"__args{}\", name));\n\n let args_name = lit::ident(format!(\"{}_args\", name));\n\n let init = lit::ident(format!(\"__init{}\", name));\n\n let fields = &mut input.fields;\n\n fields.iter_mut().for_each(|field| {\n\n field.vis = Visibility::Public(VisPublic {\n\n pub_token: Token![pub](field.span()),\n\n });\n\n });\n\n quote! {\n\n #[derive(serde::Deserialize, Default)]\n\n pub struct #args_type {\n\n #fields\n\n }\n\n pub static #name: feature::Feature = feature::Feature::new();\n\n lazy_static::lazy_static! {\n\n pub static ref #args_name: std::sync::RwLock<std::collections::HashMap<usize, #args_type>>\n\n = std::sync::RwLock::new(std::collections::HashMap::new());\n", "file_path": "flow-derive/src/internal.rs", "rank": 28, "score": 101525.04618909993 }, { "content": "pub fn string<T: Display>(lit: T) -> LitStr {\n\n LitStr::new(lit.to_string().as_str(), Span::call_site())\n\n}\n\n\n", "file_path": "flow-derive/src/lit.rs", "rank": 29, "score": 100243.18367721612 }, { "content": "#[proc_macro_attribute]\n\npub fn outputs(ports: TokenStream, input: TokenStream) -> TokenStream {\n\n let ports = parse_macro_input!(ports as ports::PortSequence);\n\n let fields = ports::out_expand(&ports);\n\n let names = ports::name_expand(&ports);\n\n let item_struct = parse_macro_input!(input as ItemStruct);\n\n ports_expand(\"outputs\", item_struct, fields, names).into()\n\n}\n\n\n", "file_path": "flow-derive/src/lib.rs", "rank": 30, "score": 97065.69582002203 }, { "content": "#[proc_macro_attribute]\n\npub fn inputs(ports: TokenStream, input: TokenStream) -> TokenStream {\n\n let ports = parse_macro_input!(ports as ports::PortSequence);\n\n let fields = ports::in_expand(&ports);\n\n let names = ports::name_expand(&ports);\n\n let item_struct = parse_macro_input!(input as ItemStruct);\n\n ports_expand(\"inputs\", item_struct, fields, names).into()\n\n}\n\n\n\n/// A proc macro used to define outputs of a node. `#[outputs(port_name[:port_type], ..)]`\n\n///\n\n/// | Annotation | Description |\n\n/// | :- | :- |\n\n/// | `port_name` | SimplePort, which is a Sender of a MPMC channel |\n\n/// | `port_name: []` | ListPort, which is `Vec<SimplePort>` |\n\n/// | `port_name: dyn` | DynamicPort, which is a port created dynamic, see `flow_rs::node::DynamicPort` for more detail |\n", "file_path": "flow-derive/src/lib.rs", "rank": 31, "score": 97065.69582002203 }, { "content": "pub fn bounded<T>(cap: usize) -> Arc<Channel<T>> {\n\n assert!(cap > 0, \"capacity cannot be zero\");\n\n\n\n Arc::new(Channel {\n\n queue: ConcurrentQueue::bounded(cap),\n\n send_ops: Event::new(),\n\n recv_ops: Event::new(),\n\n stream_ops: Event::new(),\n\n sender_count: AtomicUsize::new(0),\n\n receiver_count: AtomicUsize::new(0),\n\n })\n\n}\n\n\n", "file_path": "flow-rs/src/channel/inner.rs", "rank": 32, "score": 96105.6219469607 }, { "content": "pub fn last_inner_ty(ty: &syn::Type) -> Option<&syn::Type> {\n\n if let syn::Type::Path(ref p) = ty {\n\n if p.path.segments.is_empty() {\n\n return None;\n\n }\n\n\n\n if let syn::PathArguments::AngleBracketed(ref inner_ty) =\n\n p.path.segments.last().unwrap().arguments\n\n {\n\n if inner_ty.args.len() != 1 {\n\n return None;\n\n }\n\n\n\n let inner_ty = inner_ty.args.first().unwrap();\n\n if let syn::GenericArgument::Type(ref t) = inner_ty {\n\n return Some(t);\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "flow-derive/src/utils.rs", "rank": 33, "score": 94557.3352486561 }, { "content": "pub fn submit_expand<ID: ToTokens>(input: CollectionSlice<ID>) -> TokenStream {\n\n let name = &input.name;\n\n let expr = &input.expr;\n\n let init = Ident::new(&format!(\"__init{}\", hash(expr)), Span::call_site());\n\n quote! {\n\n #[allow(non_upper_case_globals)]\n\n #[flow_rs::ctor]\n\n fn #init() {\n\n flow_rs::registry::__submit_only_in_ctor(#name, #expr);\n\n }\n\n }\n\n}\n\n\n\npub struct FeatureDeclare {\n\n name: Ident,\n\n fields: Punctuated<Field, Token![,]>,\n\n}\n\n\n\nimpl Parse for FeatureDeclare {\n\n fn parse(input: ParseStream) -> Result<Self> {\n", "file_path": "flow-derive/src/internal.rs", "rank": 34, "score": 93084.08113416375 }, { "content": "pub fn with_context<F, R>(py: Python, f: F) -> R\n\nwhere\n\n F: FnOnce() -> R + Send,\n\n R: Send,\n\n{\n\n let id = CTX.with(|ctx| ctx.borrow_mut().store());\n\n let r = py.allow_threads(f);\n\n CTX.with(|ctx| ctx.borrow_mut().restore(id));\n\n r\n\n}\n\n\n", "file_path": "flow-rs/src/loader/python/context.rs", "rank": 35, "score": 92373.5215339803 }, { "content": "pub fn store(ts: *mut ffi::PyThreadState) -> PyThreadStateUnlimited {\n\n match VERSION.1 {\n\n 6 => {\n\n let ts = ts as *mut PyThreadStateUnlimited3_6;\n\n unsafe {\n\n let unlimited = PyThreadStateUnlimited {\n\n frame: (*ts).frame,\n\n recursion_depth: (*ts).recursion_depth,\n\n exc_type: (*ts).exc_type,\n\n exc_value: (*ts).exc_value,\n\n exc_traceback: (*ts).exc_traceback,\n\n ..Default::default()\n\n };\n\n (*ts).frame = std::ptr::null_mut();\n\n (*ts).recursion_depth = 0;\n\n (*ts).exc_type = std::ptr::null_mut();\n\n (*ts).exc_value = std::ptr::null_mut();\n\n (*ts).exc_traceback = std::ptr::null_mut();\n\n unlimited\n\n }\n", "file_path": "flow-rs/src/loader/python/unlimited.rs", "rank": 36, "score": 91572.02797145635 }, { "content": "pub fn fields(data: &syn::Data) -> syn::punctuated::Iter<syn::Field> {\n\n match data {\n\n syn::Data::Struct(ref item) => match item.fields {\n\n syn::Fields::Named(ref fields) => fields.named.iter(),\n\n syn::Fields::Unnamed(ref fields) => fields.unnamed.iter(),\n\n _ => unreachable!(),\n\n },\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "flow-derive/src/utils.rs", "rank": 37, "score": 90455.0941945141 }, { "content": "pub fn restore(limited: *mut ffi::PyThreadState, unlimited: &PyThreadStateUnlimited) {\n\n match VERSION.1 {\n\n 6 => {\n\n let ts = limited as *mut PyThreadStateUnlimited3_6;\n\n unsafe {\n\n (*ts).frame = unlimited.frame;\n\n (*ts).recursion_depth = unlimited.recursion_depth;\n\n (*ts).exc_type = unlimited.exc_type;\n\n (*ts).exc_value = unlimited.exc_value;\n\n (*ts).exc_traceback = unlimited.exc_traceback;\n\n }\n\n }\n\n 7 | 8 | 9 => {\n\n let ts = limited as *mut PyThreadStateUnlimited3_789;\n\n unsafe {\n\n (*ts).frame = unlimited.frame;\n\n (*ts).recursion_depth = unlimited.recursion_depth;\n\n (*ts).context = unlimited.context;\n\n (*ts).exc_state = unlimited.exc_state;\n\n (*ts).exc_info = unlimited.exc_info;\n", "file_path": "flow-rs/src/loader/python/unlimited.rs", "rank": 38, "score": 87841.94312511879 }, { "content": "pub fn translate_config(local_key: u64, p: presentation::Config) -> Result<interlayer::Config> {\n\n let mut graphs = vec![];\n\n let mut nodes = HashMap::new();\n\n let mut resources = HashMap::new();\n\n\n\n for node in p.nodes {\n\n nodes.insert(node.entity.name.clone(), translate_node(local_key, node)?);\n\n }\n\n\n\n for graph in p.graphs {\n\n graphs.push(translate_graph(local_key, graph, &mut nodes)?);\n\n }\n\n\n\n for res in p.resources {\n\n resources.insert(res.name.clone(), res);\n\n }\n\n\n\n Ok(interlayer::Config {\n\n graphs,\n\n resources,\n\n nodes,\n\n main: p.main,\n\n })\n\n}\n", "file_path": "flow-rs/src/config/mod.rs", "rank": 39, "score": 87578.29327690715 }, { "content": "pub fn attr<'a>(attrs: &'a [syn::Attribute], name: &str) -> Option<&'a syn::Attribute> {\n\n attrs.iter().find(|attr| {\n\n attr.path\n\n .segments\n\n .first()\n\n .map(|p| p.ident == name)\n\n .unwrap_or(false)\n\n })\n\n}\n\n\n", "file_path": "flow-derive/src/utils.rs", "rank": 40, "score": 85882.95436598676 }, { "content": "pub fn __submit_only_in_ctor<Q, ID, T: Collect<ID>>(key: Q, value: T)\n\nwhere\n\n ID: Eq + Hash + 'static,\n\n Q: Into<ID>,\n\n{\n\n T::registry_global().insert(key, value);\n\n}\n\n\n\n#[macro_export]\n\n#[doc(hidden)]\n\nmacro_rules! collect {\n\n ($id:ty, $ty:ty) => {\n\n impl $crate::registry::Collect<$id> for $ty {\n\n #[inline]\n\n fn registry_global() -> &'static $crate::registry::Registry<$id, Self> {\n\n lazy_static::lazy_static! {\n\n static ref REGISTRY: $crate::registry::Registry<$id, $ty> =\n\n $crate::registry::Registry::default();\n\n };\n\n &REGISTRY\n", "file_path": "flow-rs/src/registry.rs", "rank": 41, "score": 85879.64705582344 }, { "content": "/// A function to load graph with config\n\npub fn load(option: Option<loader::LoaderConfig>, config: &str) -> Result<graph::Graph> {\n\n load_impl(option, toml::from_str(config)?)\n\n}\n", "file_path": "flow-rs/src/lib.rs", "rank": 42, "score": 85879.64705582344 }, { "content": "pub fn uget_slice<'a, T: Element>(py: Python, pyobject: &PyAny) -> PyResult<SliceGuard<'a, T>> {\n\n let array: &PyArrayDyn<T> = pyobject.extract()?;\n\n unsafe {\n\n let slice = array\n\n .as_slice()\n\n .map_err(|_| pyo3::exceptions::PyTypeError::new_err(\"not contiguous\"))?;\n\n let slice = std::slice::from_raw_parts(slice.as_ptr(), slice.len());\n\n Ok(SliceGuard {\n\n slice,\n\n _ref: pyobject.into_py(py),\n\n })\n\n }\n\n}\n", "file_path": "flow-rs/src/helper.rs", "rank": 43, "score": 80389.2865174211 }, { "content": "pub fn toml2dict<'a>(py: Python<'a>, args: &toml::value::Table) -> PyResult<&'a PyDict> {\n\n fn append_list(py: Python, value: &toml::Value, list: &PyList) -> PyResult<()> {\n\n match value {\n\n toml::Value::String(s) => list.append(s),\n\n toml::Value::Float(f) => list.append(f),\n\n toml::Value::Integer(i) => list.append(i),\n\n toml::Value::Boolean(b) => list.append(b),\n\n toml::Value::Datetime(t) => list.append(t.to_string()),\n\n toml::Value::Array(l) => {\n\n let pylist = PyList::empty(py);\n\n for e in l {\n\n append_list(py, e, pylist)?;\n\n }\n\n list.append(pylist)\n\n }\n\n toml::Value::Table(d) => {\n\n let pydict = PyDict::new(py);\n\n for (key, value) in d {\n\n fill_dict(py, key, value, pydict)?;\n\n }\n", "file_path": "flow-rs/src/loader/python/node.rs", "rank": 44, "score": 80389.2865174211 }, { "content": "#[inputs(inp)]\n\n#[outputs(out)]\n\n#[derive(Node, Actor)]\n\nstruct Transport {\n\n _name: String,\n\n}\n\n\n\nimpl Transport {\n\n fn new(name: String, _: &toml::value::Table) -> Transport {\n\n Transport {\n\n _name: name,\n\n inp: Default::default(),\n\n out: Default::default(),\n\n }\n\n }\n\n\n\n async fn initialize(&mut self, _: ResourceCollection) {}\n\n async fn finalize(&mut self) {}\n\n\n\n async fn exec(&mut self, _: &Context) -> Result<()> {\n\n if let Ok(envelope) = self.inp.recv::<u32>().await {\n\n self.out.send(envelope).await.ok();\n\n }\n", "file_path": "flow-rs/examples/graph.rs", "rank": 45, "score": 72297.3609961551 }, { "content": "struct Port {\n\n name: syn::Ident,\n\n ty: PortType,\n\n}\n\n\n\nimpl Parse for Port {\n\n fn parse(input: ParseStream) -> Result<Self> {\n\n let name: syn::Ident = input.parse()?;\n\n Ok(if input.peek(Token![:]) {\n\n input.parse::<Token![:]>()?;\n\n if input.peek(syn::token::Dyn) {\n\n input.parse::<syn::token::Dyn>()?;\n\n Port {\n\n name,\n\n ty: PortType::Dyn,\n\n }\n\n } else if input.peek(syn::token::Bracket) {\n\n let _content;\n\n bracketed!(_content in input);\n\n Port {\n", "file_path": "flow-derive/src/ports.rs", "rank": 46, "score": 72297.3609961551 }, { "content": "type NotifySender = Sender<SealedEnvelope>;\n", "file_path": "flow-rs/src/broker.rs", "rank": 47, "score": 72256.52904914015 }, { "content": "type SubSender = Sender<SealedEnvelope>;\n\n\n\n#[derive(Default)]\n\npub struct Broker {\n\n subs: HashMap<String, (NotifySender, NotifyReceiver, Vec<SubSender>)>,\n\n running: Arc<AtomicBool>,\n\n}\n\n\n\npub struct BrokerClient {\n\n id: usize,\n\n notify: NotifySender,\n\n self_notify: SubSender,\n\n sub: SubReceiver,\n\n topic: String,\n\n running: Arc<AtomicBool>,\n\n}\n\n\n\nimpl Broker {\n\n pub fn new() -> Broker {\n\n Broker {\n", "file_path": "flow-rs/src/broker.rs", "rank": 48, "score": 72256.52904914015 }, { "content": "#[flow_rs::ctor]\n\nfn init() {\n\n let mut initialize = crate::registry::INITIALIZE.write().unwrap();\n\n initialize.push(Box::new(|id| {\n\n let mut resources = GLOBAL_RESOURCES.write().unwrap();\n\n resources\n\n .entry(id)\n\n .or_insert_with(|| ResourceCollection::new(id));\n\n }));\n\n let mut finalize = crate::registry::FINALIZE.write().unwrap();\n\n finalize.push(Box::new(|id| {\n\n let mut resources = GLOBAL_RESOURCES.write().unwrap();\n\n resources.remove(&id);\n\n }));\n\n}\n\n\n\nimpl ResourceCollection {\n\n pub(crate) fn new(local_key: u64) -> ResourceCollection {\n\n ResourceCollection {\n\n local_key,\n\n resources: Default::default(),\n", "file_path": "flow-rs/src/resource.rs", "rank": 49, "score": 71731.84600430155 }, { "content": "#[derive(Clone)]\n\nstruct State {\n\n ty: RespTy,\n\n mapping: Arc<Mutex<Mapping>>,\n\n out: Sender,\n\n counter: Arc<AtomicU64>,\n\n}\n\n\n\n#[post(\"/analyze/{extra_data}\")]\n\n#[openapi(summary = \"analyze an image\")]\n\nasync fn analyze(\n\n #[data] state: State,\n\n img: Image,\n\n extra_data: String,\n\n) -> Result<Either<Image, impl Reply>, Rejection> {\n\n let img = img.into_bgr8();\n\n let id = state.id();\n\n\n\n let pyobject: PyObject = Python::with_gil(|py| -> PyResult<_> {\n\n let data = img.as_raw();\n\n let ndarray =\n", "file_path": "flow-plugins/src/image_server.rs", "rank": 50, "score": 71079.85381864759 }, { "content": "#[inputs(inp)]\n\n#[outputs(out:[])]\n\n#[derive(Node, Actor, Default)]\n\nstruct Demux {}\n\n\n\nimpl Demux {\n\n fn new(_: String, _: &Table) -> Demux {\n\n Default::default()\n\n }\n\n\n\n async fn initialize(&mut self, _: ResourceCollection) {}\n\n async fn finalize(&mut self) {}\n\n\n\n async fn exec(&mut self, _: &Context) -> Result<()> {\n\n if let Ok(msg) = self.inp.recv_any().await {\n\n let id = msg\n\n .info()\n\n .to_addr\n\n .expect(\"the envelope has no destination address\");\n\n let id = id as usize;\n\n self.out[id].send_any(msg).await.ok();\n\n }\n\n Ok(())\n", "file_path": "flow-rs/src/node/demux.rs", "rank": 51, "score": 71079.85381864759 }, { "content": "#[derive(Clone)]\n\nstruct State {\n\n mapping: Arc<Mutex<HashMap<u64, (VideoDescp, Sender)>>>,\n\n sender: flow_rs::rt::channel::Sender<(u64, String, oneshot::Sender<RwebResult>)>,\n\n counter: Arc<AtomicU64>,\n\n}\n\n\n", "file_path": "flow-plugins/src/video_server.rs", "rank": 52, "score": 71079.85381864759 }, { "content": "#[inputs(inp)]\n\n#[outputs(out)]\n\n#[derive(Node, Actor, Default)]\n\nstruct Reorder {\n\n cache: BTreeMap<u64, SealedEnvelope>,\n\n seq_id: u64,\n\n}\n\n\n\nimpl Reorder {\n\n fn new(_: String, _: &Table) -> Reorder {\n\n Default::default()\n\n }\n\n\n\n async fn initialize(&mut self, _: ResourceCollection) {}\n\n async fn finalize(&mut self) {}\n\n\n\n async fn exec(&mut self, _: &Context) -> Result<()> {\n\n if let Ok(msg) = self.inp.recv_any().await {\n\n let id = msg\n\n .info()\n\n .partial_id\n\n .expect(\"partial_id required by reorder\");\n\n assert!(id >= self.seq_id);\n", "file_path": "flow-rs/src/node/reorder.rs", "rank": 53, "score": 71079.85381864759 }, { "content": "#[derive(Node)]\n\nstruct Shared {\n\n nodes: Vec<Box<dyn Actor>>,\n\n rx: ReceiverT<SharedConns>,\n\n inputs: HashMap<String, Arc<Sender>>,\n\n outputs: HashMap<String, Receiver>,\n\n}\n\n\n\npub struct SharedStopNotify(pub oneshot::Receiver<rt::task::JoinHandle<()>>);\n\n// is safe because we visit it only in main thread\n\nunsafe impl Sync for SharedStopNotify {}\n\ncrate::collect!(String, SharedStopNotify);\n\n\n\n#[derive(Clone)]\n\npub struct SharedProxy {\n\n ty: String,\n\n conns: SharedConns,\n\n tx: SenderT<SharedConns>,\n\n inputs: Vec<String>,\n\n outputs: Vec<String>,\n\n}\n", "file_path": "flow-rs/src/node/shared.rs", "rank": 54, "score": 71079.85381864759 }, { "content": "struct Messages {\n\n mapping: HashMap<u64, Vec<String>>,\n\n}\n\n\n", "file_path": "flow-plugins/src/video_server.rs", "rank": 55, "score": 71079.85381864759 }, { "content": "#[inputs(inp)]\n\n#[outputs(out)]\n\n#[derive(Node, Actor, Default)]\n\nstruct Transform {}\n\n\n\nimpl Transform {\n\n fn new(_name: String, _args: &Table) -> Transform {\n\n Default::default()\n\n }\n\n\n\n async fn initialize(&mut self, _: ResourceCollection) {}\n\n async fn finalize(&mut self) {}\n\n async fn exec(&mut self, _: &Context) -> Result<()> {\n\n if let Ok(msg) = self.inp.recv_any().await {\n\n self.out.send_any(msg).await.ok();\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nnode_register!(\"Transform\", Transform);\n\n\n", "file_path": "flow-rs/src/node/transform.rs", "rank": 56, "score": 71079.85381864759 }, { "content": "fn ports_expand(\n\n prefix: &str,\n\n mut item_struct: ItemStruct,\n\n fields: Vec<proc_macro2::TokenStream>,\n\n names: Vec<syn::LitStr>,\n\n) -> proc_macro2::TokenStream {\n\n for field in fields {\n\n utils::add_field(&mut item_struct, field).unwrap();\n\n }\n\n let ident = &item_struct.ident;\n\n let func_ident = lit::ident(format!(\"{}_name\", prefix));\n\n let (imp_g, ty_g, where_g) = item_struct.generics.split_for_impl();\n\n quote! {\n\n #item_struct\n\n impl#imp_g #ident#ty_g #where_g {\n\n fn #func_ident() -> Vec<String> {\n\n vec![#(#names),*].into_iter().map(|n: &str| n.to_owned()).collect()\n\n }\n\n }\n\n }\n\n}\n", "file_path": "flow-derive/src/lib.rs", "rank": 57, "score": 70520.07322806458 }, { "content": "fn main() {}\n", "file_path": "flow-derive/examples/node_derive.rs", "rank": 58, "score": 70520.07322806458 }, { "content": "#[derive(Clone)]\n\nstruct DynConns {\n\n name: u64,\n\n inputs: HashMap<String, Sender>,\n\n outputs: HashMap<String, Receiver>,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct DynPorts<V> {\n\n local_key: u64,\n\n target: String,\n\n cap: usize,\n\n broker: Option<BrokerClient>,\n\n cache: HashMap<u64, V>,\n\n _v_holder: PhantomData<V>,\n\n}\n\n\n\nimpl<V> DynPorts<V> {\n\n pub fn new(local_key: u64, target: String, cap: usize, broker: BrokerClient) -> DynPorts<V> {\n\n DynPorts {\n\n local_key,\n", "file_path": "flow-rs/src/node/port.rs", "rank": 59, "score": 69935.53675741263 }, { "content": "#[inputs(inp)]\n\n#[outputs(out:dyn)]\n\n#[derive(Node, Actor, Default)]\n\nstruct DynDemux {\n\n tasks: HashMap<u64, JoinHandle<()>>,\n\n resources: Option<ResourceCollection>,\n\n}\n\n\n\nimpl DynDemux {\n\n fn new(_: String, _: &Table) -> DynDemux {\n\n Default::default()\n\n }\n\n\n\n async fn initialize(&mut self, resources: ResourceCollection) {\n\n self.resources = Some(resources);\n\n }\n\n async fn finalize(&mut self) {\n\n for (_, task) in std::mem::take(&mut self.tasks) {\n\n task.await;\n\n }\n\n }\n\n\n\n async fn exec(&mut self, _: &Context) -> Result<()> {\n", "file_path": "flow-rs/src/node/demux.rs", "rank": 60, "score": 69935.53675741263 }, { "content": "#[inputs(inp)]\n\n#[outputs(out:dyn)]\n\n#[derive(Node, Default)]\n\nstruct DynOutTransform {}\n\n\n\nimpl DynOutTransform {\n\n fn new(_name: String, _args: &Table) -> DynOutTransform {\n\n Default::default()\n\n }\n\n}\n\n\n\nimpl Actor for DynOutTransform {\n\n fn start(mut self: Box<Self>, _: Context, _: ResourceCollection) -> rt::task::JoinHandle<()> {\n\n let (s, r) = rt::channel::unbounded();\n\n rt::task::spawn(async move {\n\n while let Ok((_, out)) = self.out.fetch().await {\n\n let inp = self.inp.clone();\n\n s.send(rt::task::spawn(async move {\n\n let mut empty_n = 0;\n\n loop {\n\n while let Ok(msg) = inp.recv_any().await {\n\n out.send_any(msg).await.ok();\n\n }\n", "file_path": "flow-rs/src/node/transform.rs", "rank": 61, "score": 69935.53675741263 }, { "content": "struct Context {\n\n thread: *mut ffi::PyThreadState,\n\n ctx: PyThreadStateUnlimited,\n\n}\n\n\n", "file_path": "flow-rs/src/loader/python/context.rs", "rank": 62, "score": 69935.53675741263 }, { "content": "#[inputs(inp: dyn)]\n\n#[outputs(out)]\n\n#[derive(Node, Default)]\n\nstruct DynInTransform {}\n\n\n\nimpl DynInTransform {\n\n fn new(_name: String, _args: &Table) -> DynInTransform {\n\n Default::default()\n\n }\n\n}\n\n\n\nimpl Actor for DynInTransform {\n\n fn start(self: Box<Self>, _: Context, _: ResourceCollection) -> rt::task::JoinHandle<()> {\n\n let (s, r) = rt::channel::unbounded();\n\n rt::task::spawn(async move {\n\n while let Ok((_, inp)) = self.inp.fetch().await {\n\n let out = self.out.clone();\n\n s.send(rt::task::spawn(async move {\n\n while !inp.is_closed() {\n\n while let Ok(msg) = inp.recv_any().await {\n\n out.send_any(msg).await.ok();\n\n }\n\n assert!(\n", "file_path": "flow-rs/src/node/transform.rs", "rank": 63, "score": 69935.53675741263 }, { "content": "#[inputs(inp:dyn)]\n\n#[outputs(out:dyn)]\n\n#[derive(Node, Actor, Default)]\n\nstruct VideoServer {\n\n port: u16,\n\n resources: Option<ResourceCollection>,\n\n}\n\n\n", "file_path": "flow-plugins/src/video_server.rs", "rank": 64, "score": 69935.53675741263 }, { "content": "#[inputs]\n\n#[outputs(out)]\n\n#[derive(Node, Actor, Default)]\n\nstruct NoopProducer {}\n\n\n\nimpl NoopProducer {\n\n fn new(_name: String, _args: &Table) -> NoopProducer {\n\n Default::default()\n\n }\n\n\n\n async fn initialize(&mut self, _: ResourceCollection) {}\n\n async fn finalize(&mut self) {}\n\n async fn exec(&mut self, _: &Context) -> Result<()> {\n\n Ok(())\n\n }\n\n}\n\n\n\nnode_register!(\"NoopProducer\", NoopProducer);\n\n\n", "file_path": "flow-rs/src/node/noop.rs", "rank": 65, "score": 69935.53675741263 }, { "content": "#[inputs]\n\n#[outputs(out)]\n\n#[derive(Node, Actor, Default)]\n\nstruct NeverOpr {}\n\n\n\nimpl NeverOpr {\n\n fn new(_name: String, _: &Table) -> NeverOpr {\n\n Default::default()\n\n }\n\n\n\n async fn initialize(&mut self, _: ResourceCollection) {}\n\n async fn finalize(&mut self) {}\n\n async fn exec(&mut self, ctx: &Context) -> Result<()> {\n\n ctx.wait().await;\n\n Ok(())\n\n }\n\n}\n\n\n\nnode_register!(\"NeverOpr\", NeverOpr);\n", "file_path": "flow-rs/tests/nodes_ext.rs", "rank": 66, "score": 69935.53675741263 }, { "content": "#[inputs(inp)]\n\n#[outputs(out)]\n\n#[derive(Node, Actor)]\n\nstruct ImageServer {\n\n port: u16,\n\n ty: RespTy,\n\n}\n\n\n", "file_path": "flow-plugins/src/image_server.rs", "rank": 67, "score": 69935.53675741263 }, { "content": "#[inputs(inp)]\n\n#[outputs]\n\n#[derive(Node, Actor, Default)]\n\nstruct NoopConsumer {}\n\n\n\nimpl NoopConsumer {\n\n fn new(_name: String, _args: &Table) -> NoopConsumer {\n\n Default::default()\n\n }\n\n\n\n async fn initialize(&mut self, _: ResourceCollection) {}\n\n async fn finalize(&mut self) {}\n\n async fn exec(&mut self, _: &Context) -> Result<()> {\n\n self.inp.recv_any().await.ok();\n\n Ok(())\n\n }\n\n}\n\n\n\nnode_register!(\"NoopConsumer\", NoopConsumer);\n", "file_path": "flow-rs/src/node/noop.rs", "rank": 68, "score": 69935.53675741263 }, { "content": "#[inputs(inp)]\n\n#[outputs(out)]\n\n#[derive(Node, Actor, Default)]\n\nstruct ErrorOpr {}\n\n\n\nimpl ErrorOpr {\n\n fn new(_name: String, _: &Table) -> ErrorOpr {\n\n Default::default()\n\n }\n\n\n\n async fn initialize(&mut self, _: ResourceCollection) {}\n\n async fn finalize(&mut self) {}\n\n async fn exec(&mut self, _: &Context) -> Result<()> {\n\n Err(anyhow::anyhow!(\"error\"))\n\n }\n\n}\n\n\n\nnode_register!(\"ErrorOpr\", ErrorOpr);\n\n\n", "file_path": "flow-rs/tests/nodes_ext.rs", "rank": 69, "score": 69935.53675741263 }, { "content": "#[derive(Clone)]\n\nstruct SharedConns {\n\n inputs: HashMap<String, Receiver>,\n\n outputs: HashMap<String, Sender>,\n\n}\n\n\n", "file_path": "flow-rs/src/node/shared.rs", "rank": 70, "score": 69935.53675741263 }, { "content": "#[inputs(a, b)]\n\n#[outputs(c)]\n\n#[derive(Node, Actor, Default)]\n\nstruct BinaryOpr {}\n\n\n\nimpl BinaryOpr {\n\n fn new(_name: String, _args: &Table) -> BinaryOpr {\n\n Default::default()\n\n }\n\n\n\n async fn initialize(&mut self, _: ResourceCollection) {}\n\n async fn finalize(&mut self) {}\n\n async fn exec(&mut self, _: &Context) -> Result<()> {\n\n let mut recv_a = FuturesUnordered::new();\n\n recv_a.push(self.a.recv_any());\n\n let mut recv_b = FuturesUnordered::new();\n\n recv_b.push(self.b.recv_any());\n\n loop {\n\n select! {\n\n a = recv_a.select_next_some() => {\n\n if let Ok(a) = a {\n\n self.c.send_any(a).await.ok();\n\n recv_a.push(self.a.recv_any());\n", "file_path": "flow-rs/tests/nodes_ext.rs", "rank": 71, "score": 69935.53675741263 }, { "content": "struct PortUtility {\n\n ty: interlayer::PortTy,\n\n mapping: fn(&str) -> String,\n\n}\n\nstatic MAPPING: &[PortUtility] = &[\n\n PortUtility {\n\n ty: interlayer::PortTy::Unit,\n\n mapping: |p| p.to_owned(),\n\n },\n\n PortUtility {\n\n ty: interlayer::PortTy::List,\n\n mapping: |p| format!(\"[{}]\", p),\n\n },\n\n PortUtility {\n\n ty: interlayer::PortTy::Dyn,\n\n mapping: |p| format!(\"dyn@{}\", p),\n\n },\n\n];\n\n\n", "file_path": "flow-rs/src/config/mod.rs", "rank": 72, "score": 69935.53675741263 }, { "content": "#[derive(Serialize)]\n\nstruct VideoDescp {\n\n id: u64,\n\n url: String,\n\n}\n\n\n", "file_path": "flow-plugins/src/video_server.rs", "rank": 73, "score": 69935.53675741263 }, { "content": "#[allow(dead_code)]\n\n#[inputs(first, second:dyn, third:[])]\n\n#[outputs(front, last:dyn)]\n\n#[derive(Node, Actor, Default)]\n\nstruct SubNode {}\n\n\n\nimpl SubNode {\n\n fn new(_: String, _args: &toml::value::Table) -> Self {\n\n SubNode {\n\n ..Default::default()\n\n }\n\n }\n\n\n\n async fn initialize(&mut self, _: ResourceCollection) {}\n\n async fn finalize(&mut self) {}\n\n\n\n async fn exec(&mut self, _: &Context) -> Result<()> {\n\n let _: Envelope<f32> = self.first.recv::<f32>().await.unwrap();\n\n Ok(())\n\n }\n\n}\n\n\n\nnode_register!(\"sub\", SubNode);\n\n\n", "file_path": "flow-derive/examples/node_derive.rs", "rank": 74, "score": 69935.53675741263 }, { "content": "#[derive(Serialize)]\n\nstruct NodeQps {\n\n name: String,\n\n qps: HashMap<String, (usize, usize)>, // size, qps\n\n is_block: bool,\n\n}\n\n\n\nimpl Graph {\n\n pub(super) fn dmon(&self) -> JoinHandle<()> {\n\n let conns: Vec<_> = self.conns.values().cloned().collect();\n\n let graph_ty = self.ctx.ty.clone();\n\n let mut first = true;\n\n crate::rt::task::spawn(async move {\n\n loop {\n\n while !QPS.enable.load(Ordering::Relaxed) {\n\n first = true;\n\n QPS.wait().await;\n\n }\n\n let args = QPS_args\n\n .read()\n\n .unwrap()\n", "file_path": "flow-rs/src/graph/debug.rs", "rank": 75, "score": 69935.53675741263 }, { "content": "fn translate_conn(\n\n name: &str,\n\n p: presentation::Connection,\n\n nodes: &mut HashMap<String, interlayer::Node>,\n\n shared_nodes: &mut HashMap<String, interlayer::Node>,\n\n) -> Result<interlayer::Connection> {\n\n let mut rx = vec![];\n\n let mut tx = vec![];\n\n\n\n if p.ports.is_empty() {\n\n return Err(anyhow!(\"encountered an unused connections\"));\n\n }\n\n\n\n for port_s in p.ports {\n\n let ((n, p), tag) = interlayer::Port::parse(port_s.as_str())?;\n\n let mut parse = |node: &mut interlayer::Node| {\n\n let mut find = false;\n\n for utility in MAPPING {\n\n let p = (utility.mapping)(p);\n\n let port = interlayer::Port {\n", "file_path": "flow-rs/src/config/mod.rs", "rank": 76, "score": 69381.14584611122 }, { "content": "fn decode_video(\n\n id: u64,\n\n path: impl AsRef<Path>,\n\n sender: &Sender,\n\n) -> Result<(), ffmpeg_next::Error> {\n\n ONCE_INIT.call_once(|| {\n\n ffmpeg_next::init().unwrap();\n\n });\n\n\n\n let mut ictx = input(&path)?;\n\n\n\n let input = ictx\n\n .streams()\n\n .best(Type::Video)\n\n .ok_or(ffmpeg_next::Error::StreamNotFound)?;\n\n\n\n let video_stream_index = input.index();\n\n\n\n let mut decoder = input.codec().decoder().video()?;\n\n\n", "file_path": "flow-plugins/src/video_server.rs", "rank": 77, "score": 69381.14584611122 }, { "content": "struct NodePlugin {\n\n local_key: u64,\n\n params: RegistryNodeParams,\n\n}\n\n\n\nimpl NodePlugin {\n\n fn new(local_key: u64, params: RegistryNodeParams) -> NodePlugin {\n\n NodePlugin { local_key, params }\n\n }\n\n\n\n fn boxed(self) -> Box<dyn Plugin> {\n\n Box::new(self)\n\n }\n\n}\n\n\n\nimpl Plugin for NodePlugin {\n\n fn submit(&self) {\n\n let params = self.params.clone();\n\n crate::node::NodeSlice::registry_local()\n\n .get(self.local_key)\n", "file_path": "flow-rs/src/loader/python/mod.rs", "rank": 78, "score": 68858.00268753519 }, { "content": "struct PythonLoader;\n\nconst ERR_MSG: &str = \"python plugin parse fault\";\n\n\n\nimpl Loader for PythonLoader {\n\n fn load(\n\n &self,\n\n local_key: u64,\n\n module_path: &Path,\n\n plugin_path: &Path,\n\n ) -> Result<Vec<Box<dyn Plugin>>> {\n\n pyo3::prepare_freethreaded_python();\n\n let mut plugins = vec![];\n\n\n\n let module_name = path_to_module(module_path, plugin_path)?;\n\n\n\n Python::with_gil(|py| -> PyResult<_> {\n\n let module_path = module_path.display().to_string();\n\n let syspath: &PyList = py.import(\"sys\")?.getattr(\"path\")?.try_into()?;\n\n if !syspath\n\n .iter()\n", "file_path": "flow-rs/src/loader/python/mod.rs", "rank": 79, "score": 68858.00268753519 }, { "content": "struct ResourcePlugin {\n\n local_key: u64,\n\n name: String,\n\n res: PyObject,\n\n}\n\n\n\nimpl ResourcePlugin {\n\n fn boxed(self) -> Box<dyn Plugin> {\n\n Box::new(self)\n\n }\n\n}\n\n\n\nimpl Plugin for ResourcePlugin {\n\n fn submit(&self) {\n\n let res = self.res.clone();\n\n crate::resource::ResourceSlice::registry_local()\n\n .get(self.local_key)\n\n .insert(\n\n self.name.clone(),\n\n crate::resource::ResourceSlice {\n", "file_path": "flow-rs/src/loader/python/mod.rs", "rank": 80, "score": 68858.00268753519 }, { "content": "#[pyclass(name = \"Waker\")]\n\nstruct PyWaker {\n\n chan: Option<oneshot::Sender<PyObject>>,\n\n callback: Option<Py<PyFunction>>,\n\n}\n\n\n\n#[pymethods]\n\nimpl PyFuture {\n\n fn wait(&mut self, py: Python) -> PyObject {\n\n if let Some(chan) = std::mem::take(&mut self.chan) {\n\n with_context(py, || wait(chan)).unwrap()\n\n } else {\n\n py.None()\n\n }\n\n }\n\n\n\n fn cancel(&mut self) {\n\n self.chan = None;\n\n }\n\n}\n\n\n", "file_path": "flow-rs/src/loader/python/utils.rs", "rank": 81, "score": 68858.00268753519 }, { "content": "struct ContextPool {\n\n pool: Vec<Context>,\n\n freelist: Vec<usize>,\n\n}\n\n\n\nimpl ContextPool {\n\n fn store(&mut self) -> usize {\n\n let id = self.freelist.pop().unwrap_or(self.pool.len());\n\n if id == self.pool.len() {\n\n self.pool.push(Context {\n\n thread: std::ptr::null_mut(),\n\n ctx: Default::default(),\n\n })\n\n }\n\n unsafe {\n\n let context = self.pool.get_unchecked_mut(id);\n\n context.thread = ffi::PyThreadState_Get();\n\n context.ctx = unlimited::store(context.thread);\n\n }\n\n id\n", "file_path": "flow-rs/src/loader/python/context.rs", "rank": 82, "score": 68858.00268753519 }, { "content": "#[pyclass(name = \"Future\")]\n\nstruct PyFuture {\n\n chan: Option<oneshot::Receiver<PyObject>>,\n\n}\n\n\n", "file_path": "flow-rs/src/loader/python/utils.rs", "rank": 83, "score": 68858.00268753519 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone)]\n\nstruct PyThreadStateUnlimited3_789 {\n\n ob_base: ffi::PyObject,\n\n interp: *mut ffi::PyInterpreterState,\n\n frame: *mut ffi::PyFrameObject,\n\n recursion_depth: i32,\n\n overflowed: i8,\n\n recursion_critical: i8,\n\n stackcheck_counter: i32,\n\n tracing: i32,\n\n use_tracing: i32,\n\n c_profilefunc: *mut c_void,\n\n c_tracefunc: *mut c_void,\n\n c_profileobj: *mut ffi::PyObject,\n\n c_traceobj: *mut ffi::PyObject,\n\n curexc_type: *mut ffi::PyObject,\n\n curexc_value: *mut ffi::PyObject,\n\n curexc_traceback: *mut ffi::PyObject,\n\n exc_state: PyErrStackItem,\n\n exc_info: *mut PyErrStackItem,\n\n dict: *mut ffi::PyObject,\n", "file_path": "flow-rs/src/loader/python/unlimited.rs", "rank": 84, "score": 66885.60425492079 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone)]\n\nstruct PyThreadStateUnlimited3_10 {\n\n ob_base: ffi::PyObject,\n\n interp: *mut ffi::PyInterpreterState,\n\n frame: *mut ffi::PyFrameObject,\n\n recursion_depth: i32,\n\n recursion_headroom: i32,\n\n stackcheck_counter: i32,\n\n\n\n tracing: i32,\n\n cframe: *mut CFrame,\n\n\n\n c_profilefunc: *mut c_void,\n\n c_tracefunc: *mut c_void,\n\n c_profileobj: *mut ffi::PyObject,\n\n c_traceobj: *mut ffi::PyObject,\n\n curexc_type: *mut ffi::PyObject,\n\n curexc_value: *mut ffi::PyObject,\n\n curexc_traceback: *mut ffi::PyObject,\n\n exc_state: PyErrStackItem,\n\n exc_info: *mut PyErrStackItem,\n", "file_path": "flow-rs/src/loader/python/unlimited.rs", "rank": 85, "score": 66885.60425492079 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone)]\n\nstruct PyThreadStateUnlimited3_6 {\n\n ob_base: ffi::PyObject,\n\n interp: *mut ffi::PyInterpreterState,\n\n frame: *mut ffi::PyFrameObject,\n\n recursion_depth: i32,\n\n tracing: i32,\n\n use_tracing: i32,\n\n c_profilefunc: *mut c_void,\n\n c_tracefunc: *mut c_void,\n\n c_profileobj: *mut ffi::PyObject,\n\n c_traceobj: *mut ffi::PyObject,\n\n curexc_type: *mut ffi::PyObject,\n\n curexc_value: *mut ffi::PyObject,\n\n curexc_traceback: *mut ffi::PyObject,\n\n exc_type: *mut ffi::PyObject,\n\n exc_value: *mut ffi::PyObject,\n\n exc_traceback: *mut ffi::PyObject,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n", "file_path": "flow-rs/src/loader/python/unlimited.rs", "rank": 86, "score": 66885.60425492079 }, { "content": "/// Trait for interactiving with graph, which can be derived by `#[derive(Node)]`.\n\npub trait Node {\n\n fn set_port(&mut self, port_name: &str, tag: Option<u64>, channel: &ChannelStorage);\n\n fn set_port_dynamic(\n\n &mut self,\n\n local_key: u64,\n\n port_name: &str,\n\n target: String,\n\n cap: usize,\n\n broker: BrokerClient,\n\n );\n\n fn close(&mut self);\n\n fn is_allinp_closed(&self) -> bool;\n\n}\n\n\n", "file_path": "flow-rs/src/node/mod.rs", "rank": 87, "score": 65774.19027825573 }, { "content": "# MegFlow is Licensed under the Apache License, Version 2.0 (the \"License\")\n\n#\n\n# Copyright (c) 2019-2021 Megvii Inc. All rights reserved.\n\n#\n\n# Unless required by applicable law or agreed to in writing,\n\n# software distributed under the License is distributed on an\n\n# \"AS IS\" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n\n\n#!/usr/bin/env python\n\n# coding=utf-8\n\nimport inspect\n\nimport ast\n\nimport types\n\nimport os\n\nfrom collections import Iterable\n\n\n\ndef __register(name, inputs, outputs, exclusive, func):\n\n params = {}\n\n\n\n params['name'] = name\n\n params['code'] = func\n\n params['inputs'] = inputs\n\n params['outputs'] = outputs\n\n params['exclusive'] = exclusive\n\n\n\n return params\n\n\n\n\n\ndef __res_register(name, func):\n\n params = {}\n\n params['name'] = name\n\n params['code'] = func\n\n\n\n return params\n\n\n\n\n\n__NODES_PLUGINS = 'nodes'\n\n__RESOURCE_PLUGINS = 'resources'\n\n__PLUGINS_DEFAULT = {\n\n __NODES_PLUGINS: [],\n\n __RESOURCE_PLUGINS: [],\n\n}\n\n_PLUGINS_REGISTRY = __PLUGINS_DEFAULT.copy()\n\n\n\ndef register(name=None, inputs=[], outputs=[], exclusive=False):\n\n def decorator(func):\n\n nonlocal name\n\n global _PLUGINS_REGISTRY\n\n if name is None:\n\n name = func.__name__\n\n _PLUGINS_REGISTRY[__NODES_PLUGINS].append(__register(name, inputs, outputs, exclusive, func))\n\n return func\n\n\n\n return decorator\n\n\n\n\n\ndef res_register(name=None):\n\n def decorator(func):\n\n nonlocal name\n\n global _PLUGINS_REGISTRY\n\n if name is None:\n\n name = func.__name__\n\n _PLUGINS_REGISTRY[__RESOURCE_PLUGINS].append(__res_register(name, func))\n\n return func\n\n\n\n return decorator\n\n\n\n\n\ndef collect():\n\n global _PLUGINS_REGISTRY\n\n plugins = _PLUGINS_REGISTRY.copy()\n\n _PLUGINS_REGISTRY = __PLUGINS_DEFAULT.copy()\n\n return plugins\n", "file_path": "flow-python/megflow/registry.py", "rank": 88, "score": 65389.464562486515 }, { "content": "#[derive(Debug)]\n\nstruct RejectCause<T>\n\nwhere\n\n T: 'static + Debug + Send + Sync,\n\n{\n\n err: T,\n\n}\n\n\n\nimpl<T> rweb::reject::Reject for RejectCause<T> where T: 'static + Debug + Send + Sync {}\n\n\n\nimpl<T> From<T> for RejectCause<T>\n\nwhere\n\n T: 'static + Debug + Send + Sync,\n\n{\n\n fn from(err: T) -> Self {\n\n RejectCause { err }\n\n }\n\n}\n\n\n", "file_path": "flow-plugins/src/utils/error.rs", "rank": 89, "score": 64955.159960023724 }, { "content": "pub trait ChannelBase {\n\n fn is_closed(&self) -> bool;\n\n fn is_none(&self) -> bool;\n\n}\n\n\n\npub use error::*;\n\npub use receiver::*;\n\npub use sender::*;\n\npub use storage::*;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::prelude::DummyEnvelope;\n\n use crate::rt;\n\n\n\n #[rt::test]\n\n async fn test_dummy_msg() {\n\n let chan = ChannelStorage::unbound();\n\n assert_eq!(chan.sender_count(), 0);\n", "file_path": "flow-rs/src/channel/mod.rs", "rank": 90, "score": 64699.17423474154 }, { "content": "#!/usr/bin/env python\n\n# coding=utf-8\n\nimport os\n\nimport sys\n\nimport subprocess\n\nimport pkg_resources\n\n\n\n\n\ndef main():\n\n bin_exist = pkg_resources.resource_exists('megflow', 'run_with_plugins_inner')\n\n if not bin_exist:\n\n print('cannot find run_with_plugins, exit!')\n\n sys.exit(-1)\n\n bin_path = pkg_resources.resource_filename('megflow', 'run_with_plugins_inner')\n\n\n\n sys.argv[0] = bin_path\n\n ret = subprocess.Popen(sys.argv)\n\n ret.wait()\n\n\n\n\n\nif __name__ == '__main__':\n\n main()\n", "file_path": "flow-python/megflow/command_line.py", "rank": 91, "score": 64367.34835737875 }, { "content": "# MegFlow is Licensed under the Apache License, Version 2.0 (the \"License\")\n\n#\n\n# Copyright (c) 2019-2021 Megvii Inc. All rights reserved.\n\n#\n\n# Unless required by applicable law or agreed to in writing,\n\n# software distributed under the License is distributed on an\n\n# \"AS IS\" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n\n\n#!/usr/bin/env python\n\n# coding=utf-8\n\n\n\nfrom .registry import register, collect, res_register\n", "file_path": "flow-python/megflow/__init__.py", "rank": 92, "score": 63727.70229860163 }, { "content": "struct RegistryInner<ID, T> {\n\n elems: HashMap<ID, T>,\n\n}\n\npub struct Registry<ID, T> {\n\n inner: RwLock<RegistryInner<ID, Arc<T>>>,\n\n}\n\npub struct RegistryMap<ID, T> {\n\n inner: RwLock<HashMap<u64, Arc<Registry<ID, T>>>>,\n\n}\n\n\n\nimpl<ID: Eq + Hash, T> Default for Registry<ID, T> {\n\n fn default() -> Registry<ID, T> {\n\n Registry {\n\n inner: RwLock::new(RegistryInner {\n\n elems: HashMap::new(),\n\n }),\n\n }\n\n }\n\n}\n\n\n", "file_path": "flow-rs/src/registry.rs", "rank": 93, "score": 61877.65041270181 }, { "content": "#[pyfunction]\n\nfn yield_now(py: Python) {\n\n with_context(py, || wait(crate::rt::task::yield_now()))\n\n}\n\n\n", "file_path": "flow-rs/src/loader/python/utils.rs", "rank": 94, "score": 59441.28808414452 }, { "content": "fn dump_dot(config: &Config) -> String {\n\n let mut buf = vec![];\n\n let mut rename = HashMap::new();\n\n let mut flatten_ports = HashMap::new();\n\n let mut dc = 0;\n\n let mut nc = 0;\n\n let mut mapping = |name| {\n\n let id = rename.entry(name).or_insert(nc);\n\n if id == &nc {\n\n nc += 1;\n\n }\n\n *id\n\n };\n\n let is_graph = |name| config.graphs.iter().any(|x| &x.name == name);\n\n // flatten subgraph i/o ports\n\n for graph in &config.graphs {\n\n for pname in graph.inputs.iter().chain(graph.outputs.iter()) {\n\n let conn = graph.connections.get(pname).unwrap();\n\n flatten_ports.insert(\n\n format!(\"{}:{}\", graph.name, pname),\n", "file_path": "flow-rs/src/config/graphviz.rs", "rank": 95, "score": 56981.03474043119 }, { "content": "pub trait Collect<ID>: Sized + 'static {\n\n fn registry_global() -> &'static Registry<ID, Self>;\n\n fn registry_local() -> &'static RegistryMap<ID, Self>;\n\n}\n", "file_path": "flow-rs/src/registry.rs", "rank": 96, "score": 56109.2768463736 }, { "content": "/// Trait for interactiving with schedule, which can be derived by `#[derive(Actor)]`.\n\npub trait Actor: Node + Send + 'static {\n\n /// Run the actor\n\n fn start(self: Box<Self>, _: Context, _: ResourceCollection) -> JoinHandle<()> {\n\n unimplemented!()\n\n }\n\n}\n\n\n\npub(crate) fn load(\n\n local_key: u64,\n\n config: &crate::config::interlayer::Node,\n\n) -> Result<Vec<Box<dyn Actor>>> {\n\n if let Some(node) = NodeSlice::registry_local()\n\n .get(local_key)\n\n .get(&config.entity.ty)\n\n {\n\n Ok((0..config.cloned.unwrap_or(1))\n\n .into_iter()\n\n .map(|_| (node.cons)(config.entity.name.clone(), &config.entity.args))\n\n .collect())\n\n } else if let Some(graph) = GraphSlice::registry_local()\n", "file_path": "flow-rs/src/node/mod.rs", "rank": 97, "score": 55151.138048189925 }, { "content": "pub trait AnyEnvelope: Any + DynClone + 'static {\n\n fn is_some(&self) -> bool;\n\n fn is_none(&self) -> bool;\n\n fn info(&self) -> &EnvelopeInfo;\n\n fn info_mut(&mut self) -> &mut EnvelopeInfo;\n\n}\n\nclone_trait_object!(AnyEnvelope);\n\n\n\nimpl dyn AnyEnvelope {\n\n pub fn is<T: AnyEnvelope>(&self) -> bool {\n\n let t = TypeId::of::<T>();\n\n let boxed = self.type_id();\n\n\n\n t == boxed\n\n }\n\n\n\n pub fn downcast_ref<T: AnyEnvelope>(&self) -> Option<&T> {\n\n if self.is::<T>() {\n\n unsafe { Some(&*(self as *const dyn AnyEnvelope as *const T)) }\n\n } else {\n", "file_path": "flow-rs/src/envelope/any_envelope.rs", "rank": 98, "score": 55151.138048189925 }, { "content": "#[pyfunction]\n\nfn sleep(py: Python, dur: u64) {\n\n with_context(py, || {\n\n wait(crate::rt::task::sleep(Duration::from_millis(dur)))\n\n })\n\n}\n\n\n", "file_path": "flow-rs/src/loader/python/utils.rs", "rank": 99, "score": 54045.668742335525 } ]
Rust
src/external_service/spread_sheet/token_manager.rs
tacogips/api-everywhere
02224c875afee72dc0ad82707e1ef1a0d7eff51a
use arc_swap::ArcSwap; use chrono::{Duration, Local}; use hyper; use log; use once_cell::sync::OnceCell; use std::path::PathBuf; use std::sync::Arc; use thiserror::Error; use tokio::sync::broadcast; use tokio::task::{JoinError, JoinHandle}; use tokio::time::timeout; use yup_oauth2::{ self as oauth, authenticator::{Authenticator, DefaultHyperClient, HyperClientBuilder}, AccessToken, Error as OauthError, }; type Result<T> = std::result::Result<T, GoogleTokenManagerError>; #[derive(Error, Debug)] pub enum GoogleTokenManagerError { #[error("oauth error:{0}")] OauthError(#[from] OauthError), #[error("failed to load service account file:{0}, {1}")] ServiceAccountFileLoadError(PathBuf, std::io::Error), #[error("invalid service account file:{0}, {1}")] InvalidServiceAccountFileError(PathBuf, std::io::Error), #[error("async task join error:{0}")] JoinError(#[from] JoinError), } static TOKEN_BUFFER_DURATION_TO_EXPIRE: OnceCell<Duration> = OnceCell::new(); fn get_token_buffer_duraiton_to_expire() -> &'static Duration { TOKEN_BUFFER_DURATION_TO_EXPIRE.get_or_init(|| Duration::minutes(2)) } #[allow(dead_code)] pub struct TokenManager<HttpConnector> { authenticator: Arc<Authenticator<HttpConnector>>, scopes: &'static [&'static str], inner_current_token: Arc<ArcSwap<AccessToken>>, token_refreshing_loop_jh: JoinHandle<()>, } impl<HttpConnector> TokenManager<HttpConnector> where HttpConnector: hyper::client::connect::Connect + Clone + Send + Sync + 'static, { pub async fn start( authenticator: Authenticator<HttpConnector>, scopes: &'static [&'static str], stop_refreshing_notifyer_rx: broadcast::Receiver<()>, token_refresh_period: Option<Duration>, ) -> Result<Self> { let access_token = authenticator.token(scopes.as_ref()).await?; let current_token = Arc::new(ArcSwap::from(Arc::new(access_token))); let authenticator = Arc::new(authenticator); let token_refreshing_loop_jh = Self::periodically_refreshing_token( authenticator.clone(), current_token.clone(), scopes, stop_refreshing_notifyer_rx, token_refresh_period, ) .await; let result = Self { authenticator, scopes, inner_current_token: current_token, token_refreshing_loop_jh, }; Ok(result) } async fn periodically_refreshing_token( authenticator: Arc<Authenticator<HttpConnector>>, shared_token: Arc<ArcSwap<AccessToken>>, scopes: &'static [&'static str], mut stop_refreshing_notifyer_rx: broadcast::Receiver<()>, token_refresh_period: Option<Duration>, ) -> JoinHandle<()> { let shared_token_current = shared_token.clone(); let refresh_token_loop_jh = tokio::spawn(async move { let refresh_period = token_refresh_period .map(|p| p.to_std().unwrap()) .unwrap_or_else(|| std::time::Duration::from_secs(30)); loop { let has_stop_notified = timeout(refresh_period, stop_refreshing_notifyer_rx.recv()).await; if has_stop_notified.is_ok() { log::info!("exiting from auth token refreshing loop"); break; } let current_token = shared_token_current.load(); let need_refresh = (**current_token) .expiration_time() .map(|expiration_time| { expiration_time - *get_token_buffer_duraiton_to_expire() <= Local::now() }) .unwrap_or(false); if need_refresh { let new_token = Self::get_new_token(&authenticator, &scopes).await; match new_token { Ok(access_token) => shared_token.store(Arc::new(access_token)), Err(e) => { log::error!("failed to refresh token :{}", e); } } } } log::info!("exit from refreshing token loop") }); refresh_token_loop_jh } #[allow(dead_code)] pub fn authenticator(&self) -> Arc<Authenticator<HttpConnector>> { Arc::clone(&self.authenticator) } #[allow(dead_code)] pub async fn force_refresh_token(&mut self) -> Result<()> { let new_token = Self::get_new_token(&self.authenticator, &self.scopes).await; match new_token { Ok(access_token) => { self.current_token().store(Arc::new(access_token)); Ok(()) } Err(e) => { log::error!("failed to refresh token :{}", e); return Err(e); } } } async fn get_new_token( authenticator: &Authenticator<HttpConnector>, scopes: &'static [&'static str], ) -> Result<AccessToken> { let new_token = authenticator.force_refreshed_token(scopes).await?; Ok(new_token) } pub async fn wait_until_refreshing_finished(self: Self) -> Result<()> { self.token_refreshing_loop_jh.await?; Ok(()) } } impl<ANY> TokenManager<ANY> { pub fn current_token(&self) -> Arc<ArcSwap<AccessToken>> { Arc::clone(&self.inner_current_token) } } pub async fn token_manager_from_service_account_file( scopes: &'static [&'static str], service_account_cred_file: PathBuf, stop_refreshing_notifyer_rx: broadcast::Receiver<()>, token_refresh_period: Option<Duration>, ) -> Result<TokenManager<<DefaultHyperClient as HyperClientBuilder>::Connector>> { let sa_key = oauth::read_service_account_key(&service_account_cred_file) .await .map_err(|e| { GoogleTokenManagerError::ServiceAccountFileLoadError( service_account_cred_file.clone(), e, ) })?; let authenticator = oauth::ServiceAccountAuthenticator::builder(sa_key) .build() .await .map_err(|e| { GoogleTokenManagerError::InvalidServiceAccountFileError(service_account_cred_file, e) })?; TokenManager::start( authenticator, scopes, stop_refreshing_notifyer_rx, token_refresh_period, ) .await } #[cfg(all(test, feature = "test-using-sa"))] mod test { use super::super::scopes; use super::super::test::load_test_sa_file_path; use super::token_manager_from_service_account_file; use tokio::sync::broadcast; #[tokio::test] async fn load_token_manager_test() { let (_, rx) = broadcast::channel(1); let token_manager = token_manager_from_service_account_file( scopes::SHEET_READ_ONLY, load_test_sa_file_path(), rx, None, ) .await; assert!(token_manager.is_ok()); let token_manager = token_manager.unwrap(); let token = token_manager.current_token(); assert_ne!("", token.load().as_str()); } }
use arc_swap::ArcSwap; use chrono::{Duration, Local}; use hyper; use log; use once_cell::sync::OnceCell; use std::path::PathBuf; use std::sync::Arc; use thiserror::Error; use tokio::sync::broadcast; use tokio::task::{JoinError, JoinHandle}; use tokio::time::timeout; use yup_oauth2::{ self as oauth, authenticator::{Authenticator, DefaultHyperClient, HyperClientBuilder}, AccessToken, Error as OauthError, }; type Result<T> = std::result::Result<T, GoogleTokenManagerError>; #[derive(Error, Debug)] pub enum GoogleTokenManagerError { #[error("oauth error:{0}")] OauthError(#[from] OauthError), #[error("failed to load service account file:{0}, {1}")] ServiceAccountFileLoadError(PathBuf, std::io::Error), #[error("invalid service account file:{0}, {1}")] InvalidServiceAccountFileError(PathBuf, std::io::Error), #[error("async task join error:{0}")] JoinError(#[from] JoinError), } static TOKEN_BUFFER_DURATION_TO_EXPIRE: OnceCell<Duration> = OnceCell::new(); fn get_token_buffer_duraiton_to_expire() -> &'static Duration { TOKEN_BUFFER_DURATION_TO_EXPIRE.get_or_init(|| Duration::minutes(2)) } #[allow(dead_code)] pub struct TokenManager<HttpConnector> { authenticator: Arc<Authenticator<HttpConnector>>, scopes: &'static [&'static str], inner_current_token: Arc<ArcSwap<AccessToken>>, token_refreshing_loop_jh: JoinHandle<()>, } impl<HttpConnector> TokenManager<HttpConnector> where HttpConnector: hyper::client::connect::Connect + Clone + Send + Sync + 'static, { pub async fn start( authenticator: Authenticator<HttpConnector>, scopes: &'static [&'static str], stop_refreshing_notifyer_rx: broadcast::Receiver<()>, token_refresh_period: Option<Duration>, ) -> Result<Self> { let access_token = authenticator.token(scopes.as_ref()).await?; let current_token = Arc::new(ArcSwap::from(Arc::new(access_token))); let authenticator = Arc::new(authenticator); let token_refreshing_loop_jh = Self::periodically_refreshing_token( authenticator.clone(), current_token.clone(), scopes, stop_refreshing_notifyer_rx, token_refresh_period, ) .await; let result = Self { authenticator, scopes, inner_current_token: curre
})?; let authenticator = oauth::ServiceAccountAuthenticator::builder(sa_key) .build() .await .map_err(|e| { GoogleTokenManagerError::InvalidServiceAccountFileError(service_account_cred_file, e) })?; TokenManager::start( authenticator, scopes, stop_refreshing_notifyer_rx, token_refresh_period, ) .await } #[cfg(all(test, feature = "test-using-sa"))] mod test { use super::super::scopes; use super::super::test::load_test_sa_file_path; use super::token_manager_from_service_account_file; use tokio::sync::broadcast; #[tokio::test] async fn load_token_manager_test() { let (_, rx) = broadcast::channel(1); let token_manager = token_manager_from_service_account_file( scopes::SHEET_READ_ONLY, load_test_sa_file_path(), rx, None, ) .await; assert!(token_manager.is_ok()); let token_manager = token_manager.unwrap(); let token = token_manager.current_token(); assert_ne!("", token.load().as_str()); } }
nt_token, token_refreshing_loop_jh, }; Ok(result) } async fn periodically_refreshing_token( authenticator: Arc<Authenticator<HttpConnector>>, shared_token: Arc<ArcSwap<AccessToken>>, scopes: &'static [&'static str], mut stop_refreshing_notifyer_rx: broadcast::Receiver<()>, token_refresh_period: Option<Duration>, ) -> JoinHandle<()> { let shared_token_current = shared_token.clone(); let refresh_token_loop_jh = tokio::spawn(async move { let refresh_period = token_refresh_period .map(|p| p.to_std().unwrap()) .unwrap_or_else(|| std::time::Duration::from_secs(30)); loop { let has_stop_notified = timeout(refresh_period, stop_refreshing_notifyer_rx.recv()).await; if has_stop_notified.is_ok() { log::info!("exiting from auth token refreshing loop"); break; } let current_token = shared_token_current.load(); let need_refresh = (**current_token) .expiration_time() .map(|expiration_time| { expiration_time - *get_token_buffer_duraiton_to_expire() <= Local::now() }) .unwrap_or(false); if need_refresh { let new_token = Self::get_new_token(&authenticator, &scopes).await; match new_token { Ok(access_token) => shared_token.store(Arc::new(access_token)), Err(e) => { log::error!("failed to refresh token :{}", e); } } } } log::info!("exit from refreshing token loop") }); refresh_token_loop_jh } #[allow(dead_code)] pub fn authenticator(&self) -> Arc<Authenticator<HttpConnector>> { Arc::clone(&self.authenticator) } #[allow(dead_code)] pub async fn force_refresh_token(&mut self) -> Result<()> { let new_token = Self::get_new_token(&self.authenticator, &self.scopes).await; match new_token { Ok(access_token) => { self.current_token().store(Arc::new(access_token)); Ok(()) } Err(e) => { log::error!("failed to refresh token :{}", e); return Err(e); } } } async fn get_new_token( authenticator: &Authenticator<HttpConnector>, scopes: &'static [&'static str], ) -> Result<AccessToken> { let new_token = authenticator.force_refreshed_token(scopes).await?; Ok(new_token) } pub async fn wait_until_refreshing_finished(self: Self) -> Result<()> { self.token_refreshing_loop_jh.await?; Ok(()) } } impl<ANY> TokenManager<ANY> { pub fn current_token(&self) -> Arc<ArcSwap<AccessToken>> { Arc::clone(&self.inner_current_token) } } pub async fn token_manager_from_service_account_file( scopes: &'static [&'static str], service_account_cred_file: PathBuf, stop_refreshing_notifyer_rx: broadcast::Receiver<()>, token_refresh_period: Option<Duration>, ) -> Result<TokenManager<<DefaultHyperClient as HyperClientBuilder>::Connector>> { let sa_key = oauth::read_service_account_key(&service_account_cred_file) .await .map_err(|e| { GoogleTokenManagerError::ServiceAccountFileLoadError( service_account_cred_file.clone(), e, )
random
[ { "content": "/// \"A\" -> 0\n\n/// \"Z\" -> 25\n\n/// \"AA\"-> 26\n\npub fn col_alphabet_to_num(n: &str) -> Result<usize> {\n\n let chars: Vec<char> = n.chars().into_iter().collect();\n\n let mut digit = chars.len();\n\n let mut result = 0usize;\n\n for each in chars {\n\n let num_at_digit = alpha_to_num(each)?;\n\n\n\n if digit == 1 {\n\n result += num_at_digit;\n\n } else {\n\n result += (num_at_digit + 1) * 26;\n\n }\n\n\n\n digit -= 1;\n\n }\n\n\n\n Ok(result)\n\n}\n\n\n\npub(crate) fn prepend_vec<T>(v: T, vs: &mut Vec<T>) {\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 0, "score": 173146.3526886949 }, { "content": "type Result<T> = std::result::Result<T, RangeError>;\n\n\n\n#[derive(Error, Debug, PartialEq)]\n\npub enum RangeError {\n\n #[error(\"column alphabet out of range:{0}\")]\n\n ColumnAlphabetOutOfRange(char),\n\n\n\n #[error(\"invalid range:{0}\")]\n\n InvalidRangeString(String),\n\n\n\n #[error(\n\n \"invalid range direction:start cell must be at upper-left or same position as end cell:{0}\"\n\n )]\n\n InvalidRangeDirection(String),\n\n\n\n #[error(\"invalid sheet name:{0}\")]\n\n InvalidSheetName(String),\n\n\n\n #[error(\"invalid cell ref:{0}\")]\n\n InvalidCellRefString(String),\n\n\n\n #[error(\"invalid range ref row index:{0}\")]\n\n InvalidRangeRefRow(String),\n\n}\n\n\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 2, "score": 140377.2357533895 }, { "content": "type Result<T> = std::result::Result<T, CellError>;\n\n\n\n#[derive(Error, Debug, PartialEq)]\n\npub enum CellError {\n\n #[error(\"invalid cell value:{0}\")]\n\n InvalidCellValue(String),\n\n}\n\n\n\npub(crate) fn valid_quote_cell_value_regex() -> &'static Regex {\n\n VALID_HEADER_STR_VALUE.get_or_init(|| {\n\n let r = Regex::new(r#\"\"(?P<VALUE>.*)\"\"#).unwrap();\n\n r\n\n })\n\n}\n\n\n\npub struct Cell;\n\nimpl Cell {\n\n pub fn sanitize_str_value(s: &str) -> Result<String> {\n\n let re = valid_quote_cell_value_regex();\n\n\n", "file_path": "src/external_service/spread_sheet/cell.rs", "rank": 3, "score": 140377.2357533895 }, { "content": "type Result<T> = std::result::Result<T, HeaderError>;\n\n\n\n#[derive(Error, Debug, PartialEq)]\n\npub enum HeaderError {\n\n #[error(\"failed to fetch sheet info:{0}\")]\n\n FetchSheetInfoError(String),\n\n\n\n #[error(\"failed to find sheet name:{0}\")]\n\n FetchSheetNameError(String),\n\n\n\n #[error(\"spread sheet not found:{0}\")]\n\n SpreadSheetNotFound(String),\n\n\n\n #[error(\"multiple header not supported:{0}\")]\n\n UnsupportedMultipleHeader(String),\n\n\n\n #[error(\"failed to fetch header values from api:{0}\")]\n\n FetchHeaderApiError(String),\n\n\n\n #[error(\"failed to fetch header values ranges from api:{0}\")]\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 4, "score": 140377.2357533895 }, { "content": "type Result<T> = std::result::Result<T, ValueError>;\n\n\n\n#[cfg(feature = \"restricted\")]\n\npub const MAX_ROW_NUMBER_TO_READ_AT_ONCE: usize = 100;\n\n\n\npub const DEFAULT_ROW_NUMBER_TO_READ_AT_ONCE: usize = 100;\n\n\n\n#[cfg(not(feature = \"restricted\"))]\n\npub const MAX_ROW_NUMBER_TO_READ_AT_ONCE: usize = 10000;\n\n\n\n#[derive(Error, Debug, PartialEq)]\n\npub enum ValueError {\n\n #[error(\"failed to fetch values from api:{0}\")]\n\n FetchValueApiError(String),\n\n\n\n #[error(\"spread sheet not found:{0}\")]\n\n SpreadSheetNotFound(String),\n\n\n\n #[error(\"invalid row number start:{0} end:{1}\")]\n\n InvalidRowNumber(usize, usize),\n", "file_path": "src/external_service/spread_sheet/value.rs", "rank": 5, "score": 140377.2357533895 }, { "content": "type Result<T> = std::result::Result<T, SheetMetaError>;\n\n\n\n#[derive(Error, Debug, PartialEq)]\n\npub enum SheetMetaError {\n\n #[error(\"invalid spread sheet id:{0}\")]\n\n InvalidSheetId(String),\n\n\n\n #[error(\"invalid spread sheet tab id:{0}\")]\n\n InvalidTabId(String),\n\n\n\n #[error(\"invalid spread sheet url:{0}\")]\n\n InvalidSheetUrl(String),\n\n}\n\n\n", "file_path": "src/external_service/spread_sheet/sheet.rs", "rank": 6, "score": 138084.38078246397 }, { "content": "type Result<T> = std::result::Result<T, SpreadSheetError>;\n\n\n\n#[derive(Error, Debug, PartialEq)]\n\npub enum SpreadSheetError {\n\n #[error(\"header error :{0}\")]\n\n HeaderError(#[from] HeaderError),\n\n\n\n #[error(\"value error :{0}\")]\n\n ValueError(#[from] ValueError),\n\n}\n\n\n\nimpl SpreadSheetError {\n\n pub fn is_not_found(&self) -> bool {\n\n if let SpreadSheetError::HeaderError(e) = self {\n\n e.is_not_found()\n\n } else if let SpreadSheetError::ValueError(e) = self {\n\n e.is_not_found()\n\n } else {\n\n false\n\n }\n", "file_path": "src/external_service/spread_sheet/mod.rs", "rank": 7, "score": 138084.38078246397 }, { "content": "fn sanitize_sheet_name(sheet_name: &str) -> Result<Option<String>> {\n\n if sheet_name.is_empty() {\n\n Ok(None)\n\n } else {\n\n let re = quoted_sheet_name();\n\n re.captures(sheet_name).map_or_else(\n\n || {\n\n if sheet_name.contains(\"'\") {\n\n Err(RangeError::InvalidSheetName(sheet_name.to_string()))\n\n } else {\n\n Ok(Some(sheet_name.to_string()))\n\n }\n\n },\n\n |capture| {\n\n let sheet_name = &capture[\"SHEET_NAME\"];\n\n Ok(Some(sheet_name.replace(\"''\", \"'\")))\n\n },\n\n )\n\n }\n\n}\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 8, "score": 129386.20870086458 }, { "content": "pub fn is_col_range_overflow(col_num: usize) -> Result<(), usize> {\n\n // 130:A to DZ\n\n if col_num > MAX_COL {\n\n Err(MAX_COL)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/external_service/spread_sheet/restricted.rs", "rank": 9, "score": 129329.65811719873 }, { "content": "pub fn is_row_range_overflow(row_num: usize) -> Result<(), usize> {\n\n if row_num > MAX_ROW {\n\n Err(MAX_ROW)\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/external_service/spread_sheet/restricted.rs", "rank": 10, "score": 129329.65811719873 }, { "content": "type Result<T> = std::result::Result<T, AppError>;\n\n\n\n#[derive(Error, Debug, PartialEq)]\n\npub enum AppError {\n\n #[error(\"invalid spread sheet id:{0}\")]\n\n ConfigError(#[from] config::ConfigError),\n\n\n\n #[error(\"google token managerr error \")]\n\n GoogleTokenManagerError,\n\n\n\n #[error(\"invalid token manager reference\")]\n\n InvalidTokenManagerReferenceError,\n\n\n\n #[error(\"hyper error\")]\n\n HyperError,\n\n}\n\n\n\n#[derive(Debug, Parser)]\n\npub struct Arg {\n\n #[clap(short, long, default_value = \"127.0.0.1\")]\n", "file_path": "src/main.rs", "rank": 11, "score": 129208.74583206826 }, { "content": "type Result<T> = std::result::Result<T, ConfigError>;\n\n\n\n#[derive(Error, Debug, PartialEq)]\n\npub enum ConfigError {\n\n #[error(\"no env vr {0}\")]\n\n NoEnvVar(String),\n\n\n\n #[error(\"no service account file\")]\n\n NoServiceAccountFile,\n\n\n\n #[error(\"service account load error\")]\n\n ServiceAccountLoadError(String),\n\n}\n\n\n\nmacro_rules! env_value {\n\n ($env_key:expr) => {\n\n env::var($env_key).map_err(|_| ConfigError::NoEnvVar($env_key.to_string()))\n\n };\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 12, "score": 127037.75066987218 }, { "content": "type Result<T> = std::result::Result<T, JsonStructureError>;\n\n\n\n#[derive(Error, Debug, PartialEq)]\n\npub enum JsonStructureError {\n\n #[error(\"invalid json structure definition :{0}\")]\n\n InvalidJsonStructureDef(String),\n\n\n\n #[error(\"invalid key :{0}\")]\n\n InvalidKey(String),\n\n\n\n #[error(\"value out of range. key:{0}, idx:{1}\")]\n\n ValueOutOfRange(String, usize),\n\n\n\n #[error(\"invalid structure state {0}\")]\n\n InvalidStructureState(String),\n\n}\n\n\n\n///\n\n/// [\"col1\",\"col2\",\"col3\"]\n\n/// =>\n", "file_path": "src/json_structure/mod.rs", "rank": 14, "score": 123007.30275850667 }, { "content": "#[derive(Debug)]\n\nstruct ColAlphabet<'a>(&'a str);\n\n\n\nimpl Display for CellRef {\n\n fn fmt(&self, f: &mut Formatter) -> FmtResult {\n\n write!(\n\n f,\n\n \"{}{}\",\n\n num_to_alphabet_base_number(self.col_index),\n\n self.row_index + 1\n\n )\n\n }\n\n}\n\n\n\nimpl CellRef {\n\n pub fn new(col_index: usize, row_index: usize) -> Self {\n\n Self {\n\n col_index,\n\n row_index,\n\n }\n\n }\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 15, "score": 111634.86457007984 }, { "content": "fn reqwest_client() -> &'static ReqClient {\n\n REQWEST_CLIENT.get_or_init(|| ReqClient::new())\n\n}\n\n\n", "file_path": "src/external_service/spread_sheet/mod.rs", "rank": 16, "score": 107755.40807944193 }, { "content": "fn quoted_sheet_name() -> &'static Regex {\n\n VALID_SHEET_NAME_RE.get_or_init(|| {\n\n let r = Regex::new(r\"'(?P<SHEET_NAME>.*)'\").unwrap();\n\n r\n\n })\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]\n\npub struct CellRef {\n\n pub col_index: usize, //zero_base\n\n pub row_index: usize, //zero_base\n\n}\n\n\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 17, "score": 107755.40807944193 }, { "content": "fn valid_cell_ref_regex() -> &'static Regex {\n\n VALID_CELL_REF_RE.get_or_init(|| {\n\n let r = Regex::new(r\"(?P<RANGE_COL>[A-Z]+)(?P<RANGE_ROW>[0-9]+)\").unwrap();\n\n r\n\n })\n\n}\n\n\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 18, "score": 105693.34036060609 }, { "content": "fn valid_sheet_id_regex() -> &'static Regex {\n\n VALID_SHEET_ID_RE.get_or_init(|| {\n\n let r = Regex::new(r\"^[A-Za-z0-9]+$\").unwrap();\n\n r\n\n })\n\n}\n\n\n", "file_path": "src/external_service/spread_sheet/sheet.rs", "rank": 19, "score": 105693.34036060609 }, { "content": "fn valid_sheet_url_regex() -> &'static Regex {\n\n VALID_SHEET_URL_RE.get_or_init(|| {\n\n let r =\n\n Regex::new(r\"^https://docs.google.com/spreadsheets/d/(?P<SHEET_ID>[A-Za-z0-9]+)/?.*$\")\n\n .unwrap();\n\n r\n\n })\n\n}\n\n\n", "file_path": "src/external_service/spread_sheet/sheet.rs", "rank": 20, "score": 105693.34036060609 }, { "content": "fn valid_sheet_url_with_tab_id_regex() -> &'static Regex {\n\n VALID_SHEET_URL_WITH_TAB_ID_RE.get_or_init(|| {\n\n let r = Regex::new(\n\n r\"^https://docs.google.com/spreadsheets/d/(?P<SHEET_ID>[A-Za-z0-9]+)/?.*gid=(?P<TAB_ID>[0-9]+)$\",\n\n )\n\n .unwrap();\n\n r\n\n })\n\n}\n\n\n\n#[derive(Debug, PartialEq, Serialize)]\n\npub struct SheetIdOrName {\n\n pub tab_sheet_id: Option<u32>,\n\n pub tab_sheet_name: Option<String>,\n\n}\n\n\n\nimpl SheetIdOrName {\n\n pub fn is_need_get_sheet_name_by_id(&self) -> Option<u32> {\n\n if self.tab_sheet_name.is_none() {\n\n if let Some(sheet_id) = self.tab_sheet_id {\n", "file_path": "src/external_service/spread_sheet/sheet.rs", "rank": 21, "score": 101900.99962660862 }, { "content": "/// 0 -> \"A\"\n\n/// 25 -> \"Z\"\n\n/// 26 -> \"AA\"\n\npub fn num_to_alphabet_base_number(mut n: usize) -> String {\n\n let mut result: Vec<char> = Vec::new();\n\n\n\n loop {\n\n let m = n % 26;\n\n let d = n / 26;\n\n let c = ALPHABET[m];\n\n prepend_vec(c, &mut result);\n\n if d == 0 {\n\n return result.iter().collect();\n\n }\n\n n = d - 1\n\n }\n\n}\n\n\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 22, "score": 94410.29857095843 }, { "content": "type Key<'a> = &'a str;\n\n\n", "file_path": "src/json_structure/mod.rs", "rank": 23, "score": 93956.9622487404 }, { "content": "pub fn quit_signal_handler() -> (iterator::backend::Handle, SignalsInfo) {\n\n let signals = Signals::new(&[SIGHUP, SIGTERM, SIGINT, SIGQUIT]).unwrap();\n\n (signals.handle(), signals)\n\n}\n\n\n\npub async fn handle_quit_signals(signals: Signals) {\n\n let mut signals = signals.fuse();\n\n while let Some(signal) = signals.next().await {\n\n match signal {\n\n SIGHUP | SIGTERM | SIGINT | SIGQUIT => {\n\n // Shutdown the system;\n\n log::info!(\"shutdown signal has receipt\");\n\n break;\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n", "file_path": "src/web/mod.rs", "rank": 24, "score": 77883.23808232154 }, { "content": "fn key_seq_to_key_str<'a>(keys: &[Key<'a>]) -> String {\n\n keys.join(\".\")\n\n}\n\n\n", "file_path": "src/json_structure/mod.rs", "rank": 25, "score": 65223.513255844206 }, { "content": "fn key_seq_to_sub_key_str<'a>(keys: &[Key<'a>], end_idx: usize) -> String {\n\n keys[0..end_idx].join(\".\")\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Object<'a> {\n\n pub keys: Vec<Key<'a>>,\n\n pub values: HashMap<Key<'a>, Structure<'a>>,\n\n}\n\n\n\nimpl<'a> Object<'a> {\n\n pub fn new() -> Self {\n\n Self {\n\n keys: Vec::new(),\n\n values: HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn from_strs(strs: &'a [&str]) -> Result<Object<'a>> {\n\n let mut obj = Self::new();\n", "file_path": "src/json_structure/mod.rs", "rank": 26, "score": 58059.38709049514 }, { "content": "fn default_header_range(sheet_name: Option<&SheetName>) -> RangeRef {\n\n RangeRef::new(\n\n sheet_name.map(|e| e.clone().into_inner()),\n\n CellRef::new(0, 0), //A1\n\n CellRef::new(25, 0), //Z1\n\n )\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\npub struct RecordHeader(String);\n\n\n\nimpl RecordHeader {\n\n fn new(s: &str) -> Result<Self> {\n\n let s = Cell::sanitize_str_value(s)?;\n\n Ok(Self(s))\n\n }\n\n\n\n pub fn as_str(&self) -> &str {\n\n &self.0\n\n }\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 27, "score": 55078.96636421329 }, { "content": "fn build_json<'a>(\n\n sheet_response: &'a mut SheetValueResponse,\n\n as_single_obj: bool,\n\n) -> Result<JsonValue, json_structure::JsonStructureError> {\n\n let headers: Vec<&str> = sheet_response\n\n .headers\n\n .values\n\n .iter()\n\n .map(|header_value| header_value.as_str())\n\n .collect();\n\n let strcuture_obj = json_structure::Object::from_strs(headers.as_slice())?;\n\n let structure_obj = json_structure::Structure::new_obj(strcuture_obj);\n\n\n\n if as_single_obj {\n\n // its confirmed that sheet_response is not empty\n\n let first_row = sheet_response.row_values.values.get(0).unwrap();\n\n let first_row: Vec<&JsonValue> = first_row.iter().map(|v| v.as_inner()).collect();\n\n let response_json = structure_obj.build_json(first_row.as_slice())?;\n\n Ok(response_json.into_json_value())\n\n } else {\n", "file_path": "src/web/spread_sheet_handler.rs", "rank": 28, "score": 48142.05038058222 }, { "content": "(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[820],{4977:function(n,_,u){(window.__NEXT_P=window.__NEXT_P||[]).push([\"/_error\",function(){return u(9185)}])}},function(n){n.O(0,[774,888,179],(function(){return _=4977,n(n.s=_);var _}));var _=n.O();_N_E=_}]);", "file_path": "src/playground_html/_next/static/chunks/pages/_error-2280fa386d040b66.js", "rank": 29, "score": 40231.392083112856 }, { "content": "pub mod spread_sheet;\n", "file_path": "src/external_service/mod.rs", "rank": 30, "score": 26300.40971296439 }, { "content": "\n\n let expected = RawHeaders {\n\n range: RangeRef::from_str(\"many headers!A1:CC1\").unwrap(),\n\n values,\n\n };\n\n\n\n assert_eq!(result, expected)\n\n }\n\n\n\n #[tokio::test]\n\n async fn get_headers_from_sheet_4_invalid() {\n\n let (_, rx) = broadcast::channel(1);\n\n let token_manager = token_manager_from_service_account_file(\n\n scopes::SHEET_READ_ONLY,\n\n load_test_sa_file_path(),\n\n rx,\n\n None,\n\n )\n\n .await\n\n .unwrap();\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 31, "score": 24518.074134923267 }, { "content": " range: RangeRef::from_str(\"header escape!B2:D2\").unwrap(),\n\n values,\n\n };\n\n\n\n assert_eq!(result, expected)\n\n }\n\n\n\n #[tokio::test]\n\n async fn get_headers_from_sheet_3() {\n\n let (_, rx) = broadcast::channel(1);\n\n let token_manager = token_manager_from_service_account_file(\n\n scopes::SHEET_READ_ONLY,\n\n load_test_sa_file_path(),\n\n rx,\n\n None,\n\n )\n\n .await\n\n .unwrap();\n\n let token_manager = Arc::new(token_manager);\n\n\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 32, "score": 24517.745287549365 }, { "content": "use once_cell::sync::OnceCell;\n\nuse regex::Regex;\n\n\n\nstatic VALID_HEADER_STR_VALUE: OnceCell<Regex> = OnceCell::new();\n\n\n\nuse thiserror::Error;\n\n\n", "file_path": "src/external_service/spread_sheet/cell.rs", "rank": 33, "score": 24517.614319160726 }, { "content": "\n\nimpl Default for RowValues {\n\n fn default() -> Self {\n\n Self::new(vec![])\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {}\n\n\n\n#[cfg(all(test, feature = \"test-using-sa\"))]\n\nmod cloud_test {\n\n\n\n use super::super::scopes;\n\n use super::super::test::*;\n\n use super::super::token_manager_from_service_account_file;\n\n use super::*;\n\n use tokio::sync::broadcast;\n\n\n\n use reqwest::Client;\n", "file_path": "src/external_service/spread_sheet/value.rs", "rank": 34, "score": 24516.969167942163 }, { "content": " return Err(HeaderError::EmptyHeaderValueRanges(format!(\n\n \"latest range {}\",\n\n &range_str\n\n )))\n\n }\n\n };\n\n\n\n let result = Self::convert_from(value_ranges, specified_range)?;\n\n Ok(result)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {}\n\n\n\n#[cfg(all(test, feature = \"test-using-sa\"))]\n\nmod cloud_test {\n\n\n\n use super::super::api::*;\n\n use super::super::scopes;\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 35, "score": 24516.429060661023 }, { "content": " None,\n\n get_expected_sheet_info(),\n\n );\n\n\n\n assert_eq!(expected, result.unwrap());\n\n }\n\n\n\n #[tokio::test]\n\n async fn get_header_test_invalid_1() {\n\n let (_, rx) = broadcast::channel(1);\n\n let token_manager = token_manager_from_service_account_file(\n\n scopes::SHEET_READ_ONLY,\n\n load_test_sa_file_path(),\n\n rx,\n\n None,\n\n )\n\n .await\n\n .unwrap();\n\n let token_manager = Arc::new(token_manager);\n\n\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 36, "score": 24516.154155223197 }, { "content": "use super::*;\n\nuse reqwest::Client as ReqClient;\n\nuse std::str::FromStr;\n\nuse std::sync::Arc;\n\nuse thiserror::Error;\n\n\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 37, "score": 24515.88866024737 }, { "content": "mod api;\n\nmod cell;\n\nmod header;\n\nmod range;\n\nmod restricted;\n\nmod sheet;\n\nmod token_manager;\n\nmod value;\n\n\n\npub use api::*;\n\npub use cell::*;\n\npub use header::*;\n\nuse once_cell::sync::OnceCell;\n\npub use range::*;\n\nuse reqwest::Client as ReqClient;\n\n#[cfg(feature = \"restricted\")]\n\nuse restricted::*;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::Value as JsonValue;\n\npub use sheet::*;\n\nuse std::sync::Arc;\n\nuse thiserror::Error;\n\npub use token_manager::*;\n\npub use value::*;\n\n\n\nstatic REQWEST_CLIENT: OnceCell<ReqClient> = OnceCell::new();\n\n\n", "file_path": "src/external_service/spread_sheet/mod.rs", "rank": 38, "score": 24515.883923807258 }, { "content": "use super::*;\n\nuse once_cell::sync::OnceCell;\n\nuse regex::Regex;\n\nuse std::fmt::{Display, Formatter, Result as FmtResult};\n\nuse std::ptr;\n\nuse std::str::FromStr;\n\n\n\nconst ALPHABET: [char; 26] = [\n\n 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S',\n\n 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',\n\n];\n\n\n\n// 'A' -> to index\n\npub(crate) fn alpha_to_num(c: char) -> Result<usize> {\n\n if (c as u8) < 65 || (c as u8) > 90 {\n\n return Err(RangeError::ColumnAlphabetOutOfRange(c));\n\n }\n\n Ok(c as usize - 65)\n\n}\n\n\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 39, "score": 24515.67317595939 }, { "content": " assert_eq!(expected, result.unwrap());\n\n }\n\n\n\n #[tokio::test]\n\n async fn get_header_test_valid_3() {\n\n let (_, rx) = broadcast::channel(1);\n\n let token_manager = token_manager_from_service_account_file(\n\n scopes::SHEET_READ_ONLY,\n\n load_test_sa_file_path(),\n\n rx,\n\n None,\n\n )\n\n .await\n\n .unwrap();\n\n let token_manager = Arc::new(token_manager);\n\n\n\n let client = Client::new();\n\n let sheet_id = TEST_SHEET1_ID;\n\n\n\n let sheet_mata = SheetMeta::new(\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 40, "score": 24515.043014103907 }, { "content": " let client = Client::new();\n\n let sheet_id = TEST_SHEET1_ID;\n\n let sheet_not_exist_tag_id = 12345;\n\n\n\n let sheet_mata = SheetMeta::new(sheet_id.to_string(), Some(sheet_not_exist_tag_id), None);\n\n let result = HeaderSearchCondition::create(&client, token_manager, sheet_mata, None).await;\n\n assert!(result.is_err());\n\n }\n\n\n\n #[tokio::test]\n\n async fn get_headers_from_sheet_1() {\n\n let (_, rx) = broadcast::channel(1);\n\n let token_manager = token_manager_from_service_account_file(\n\n scopes::SHEET_READ_ONLY,\n\n load_test_sa_file_path(),\n\n rx,\n\n None,\n\n )\n\n .await\n\n .unwrap();\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 41, "score": 24514.997908467663 }, { "content": " Self { values }\n\n }\n\n\n\n pub fn empty() -> Self {\n\n Self { values: vec![] }\n\n }\n\n\n\n pub fn push(&mut self, row: Vec<CellValue>) {\n\n self.values.push(row)\n\n }\n\n\n\n pub async fn read_values<HttpConnector>(\n\n client: &ReqClient,\n\n token_manager: Arc<TokenManager<HttpConnector>>,\n\n option: &ReadValueOption,\n\n ) -> Result<RowValues> {\n\n option.validate()?;\n\n\n\n let (start_col, end_col) = option.col_range;\n\n if start_col > end_col {\n", "file_path": "src/external_service/spread_sheet/value.rs", "rank": 42, "score": 24514.64836197488 }, { "content": "\n\nimpl FromStr for RangeRef {\n\n type Err = RangeError;\n\n fn from_str(range_str: &str) -> std::result::Result<RangeRef, Self::Err> {\n\n let re = valid_range_regex();\n\n re.captures(range_str).map_or_else(\n\n || Err(RangeError::InvalidRangeString(range_str.to_string())),\n\n |capture| {\n\n let sheet_name = &capture.name(\"SHEET_NAME\");\n\n let sheet_name = match sheet_name.map(|name| name.as_str()) {\n\n Some(\"\") => None,\n\n Some(s @ _) => sanitize_sheet_name(s)?,\n\n None => None,\n\n };\n\n\n\n let start = {\n\n let start_range_col = &capture[\"START_RANGE_COL\"];\n\n let start_range_row = &capture[\"START_RANGE_ROW\"];\n\n let col_alpha = ColAlphabet(start_range_col);\n\n CellRef::from_row_and_col(&col_alpha, start_range_row)?\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 43, "score": 24514.534851549637 }, { "content": " let result = HeaderSearchCondition::create(&client, token_manager, sheet_mata, None).await;\n\n assert!(result.is_ok());\n\n\n\n let expected = HeaderSearchCondition::new(\n\n SpreadSheetId::new(TEST_SHEET1_ID.to_string()),\n\n Some(SheetName::new(\"empty_sheet\".to_string())),\n\n None,\n\n get_expected_sheet_info(),\n\n );\n\n\n\n assert_eq!(expected, result.unwrap());\n\n }\n\n\n\n #[tokio::test]\n\n async fn get_header_test_valid_2() {\n\n let (_, rx) = broadcast::channel(1);\n\n let token_manager = token_manager_from_service_account_file(\n\n scopes::SHEET_READ_ONLY,\n\n load_test_sa_file_path(),\n\n rx,\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 44, "score": 24514.438546782963 }, { "content": " fn from_str(cell_ref_str: &str) -> std::result::Result<CellRef, Self::Err> {\n\n let re = valid_cell_ref_regex();\n\n re.captures(cell_ref_str).map_or_else(\n\n || Err(RangeError::InvalidCellRefString(cell_ref_str.to_string())),\n\n |capture| {\n\n let start_range_col = &capture[\"RANGE_COL\"];\n\n let start_range_row = &capture[\"RANGE_ROW\"];\n\n let col_alpha = ColAlphabet(start_range_col);\n\n Ok(CellRef::from_row_and_col(&col_alpha, start_range_row)?)\n\n },\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 45, "score": 24514.35263355645 }, { "content": " }\n\n}\n\n\n\npub mod scopes {\n\n pub const SHEET_READ_ONLY: &[&'static str] =\n\n &[\"https://www.googleapis.com/auth/spreadsheets.readonly\"];\n\n}\n\n\n\npub struct FetchRowCondition {\n\n specific_row_idx: Option<usize>,\n\n pagination: Option<Pagination>,\n\n}\n\n\n\nimpl FetchRowCondition {\n\n pub fn with_specific_row_idx(row_idx: usize) -> Self {\n\n Self {\n\n specific_row_idx: Some(row_idx),\n\n pagination: None,\n\n }\n\n }\n", "file_path": "src/external_service/spread_sheet/mod.rs", "rank": 46, "score": 24514.17348033272 }, { "content": "use once_cell::sync::OnceCell;\n\nuse regex::Regex;\n\nuse serde::Serialize;\n\nuse thiserror::Error;\n\n\n\nstatic VALID_SHEET_ID_RE: OnceCell<Regex> = OnceCell::new();\n\nstatic VALID_SHEET_URL_RE: OnceCell<Regex> = OnceCell::new();\n\nstatic VALID_SHEET_URL_WITH_TAB_ID_RE: OnceCell<Regex> = OnceCell::new();\n\n\n", "file_path": "src/external_service/spread_sheet/sheet.rs", "rank": 47, "score": 24513.926759630045 }, { "content": "\n\n#[derive(Debug, PartialEq, Serialize, Deserialize)]\n\npub struct RangeRef {\n\n pub sheet_name: Option<String>,\n\n pub start: CellRef,\n\n pub end: CellRef,\n\n}\n\n\n\nimpl RangeRef {\n\n pub fn new(sheet_name: Option<String>, start: CellRef, end: CellRef) -> Self {\n\n Self {\n\n sheet_name,\n\n start,\n\n end,\n\n }\n\n }\n\n\n\n pub fn next_row_index(&self) -> usize {\n\n self.end.row_index + 1\n\n }\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 48, "score": 24513.80624974136 }, { "content": " sheet_name: Option<SheetName>,\n\n specified_cell_range: Option<(CellRef, CellRef)>,\n\n sheet_info: Sheet,\n\n ) -> Self {\n\n Self {\n\n spread_sheet_id,\n\n sheet_name,\n\n specified_cell_range,\n\n sheet_info,\n\n }\n\n }\n\n\n\n pub fn as_range(&self) -> Option<RangeRef> {\n\n match &self.specified_cell_range {\n\n Some((start, end)) => Some(RangeRef {\n\n sheet_name: self\n\n .sheet_name\n\n .as_ref()\n\n .map(|name| name.as_str().to_string()),\n\n start: start.clone(),\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 49, "score": 24513.320075316828 }, { "content": " let token_manager = token_manager_from_service_account_file(\n\n scopes::SHEET_READ_ONLY,\n\n load_test_sa_file_path(),\n\n rx,\n\n None,\n\n )\n\n .await\n\n .unwrap();\n\n let token_manager = Arc::new(token_manager);\n\n\n\n let client = Client::new();\n\n let sheet_id = TEST_SHEET1_ID;\n\n\n\n let sheet_mata = SheetMeta::new(sheet_id.to_string(), Some(0), None);\n\n let result = HeaderSearchCondition::create(&client, token_manager, sheet_mata, None).await;\n\n assert!(result.is_ok());\n\n\n\n let expected = HeaderSearchCondition::new(\n\n SpreadSheetId::new(TEST_SHEET1_ID.to_string()),\n\n None,\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 50, "score": 24512.99879198708 }, { "content": "\n\n fn from_row_and_col<'a>(col_alpha: &ColAlphabet<'a>, row_num_str: &str) -> Result<Self> {\n\n let col_index = col_alphabet_to_num(&col_alpha.0)?;\n\n let row_num = row_num_str.parse::<usize>().map_err(|_e| {\n\n RangeError::InvalidCellRefString(Self::invalid_cell_error_msg(col_alpha, row_num_str))\n\n })?;\n\n\n\n let row_index = if row_num == 0 {\n\n return Err(RangeError::InvalidCellRefString(\n\n Self::invalid_cell_error_msg(col_alpha, row_num_str),\n\n ));\n\n } else {\n\n row_num - 1\n\n };\n\n\n\n Ok(Self {\n\n col_index,\n\n row_index,\n\n })\n\n }\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 51, "score": 24512.937975719484 }, { "content": "\n\n #[tokio::test]\n\n async fn get_values_test_valid_1() {\n\n let (_, rx) = broadcast::channel(1);\n\n let token_manager = token_manager_from_service_account_file(\n\n scopes::SHEET_READ_ONLY,\n\n load_test_sa_file_path(),\n\n rx,\n\n None,\n\n )\n\n .await\n\n .unwrap();\n\n let token_manager = Arc::new(token_manager);\n\n\n\n let client = Client::new();\n\n let sheet_id = TEST_SHEET1_ID;\n\n\n\n let option =\n\n ReadValueOption::new(SpreadSheetId::new(sheet_id.to_string()), None, (0, 9), 1, 2);\n\n\n", "file_path": "src/external_service/spread_sheet/value.rs", "rank": 52, "score": 24512.846867007152 }, { "content": " spread_sheet_id: SpreadSheetId,\n\n sheet_name: Option<SheetName>,\n\n col_range: (usize, usize),\n\n start_row_idx: usize,\n\n end_row_idx: usize,\n\n ) -> Self {\n\n Self {\n\n spread_sheet_id,\n\n sheet_name,\n\n col_range,\n\n start_row_idx,\n\n end_row_idx,\n\n }\n\n }\n\n pub fn validate(&self) -> Result<()> {\n\n let row_num = self.end_row_idx as i64 - self.start_row_idx as i64;\n\n if row_num < 0 {\n\n return Err(ValueError::InvalidRowNumber(\n\n self.start_row_idx,\n\n self.end_row_idx,\n", "file_path": "src/external_service/spread_sheet/value.rs", "rank": 53, "score": 24512.805973770108 }, { "content": " }\n\n\n\n #[tokio::test]\n\n async fn get_header_test_valid_1() {\n\n let (_, rx) = broadcast::channel(1);\n\n let token_manager = token_manager_from_service_account_file(\n\n scopes::SHEET_READ_ONLY,\n\n load_test_sa_file_path(),\n\n rx,\n\n None,\n\n )\n\n .await\n\n .unwrap();\n\n let token_manager = Arc::new(token_manager);\n\n\n\n let client = Client::new();\n\n let sheet_id = TEST_SHEET1_ID;\n\n\n\n let sheet_mata = SheetMeta::new(sheet_id.to_string(), Some(TEST_SHEET1_EMPTY_TAG_ID), None);\n\n\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 54, "score": 24512.742649163338 }, { "content": "\n\n pub fn with_pagination(offset: Option<usize>, limit: Option<usize>) -> Self {\n\n Self {\n\n specific_row_idx: None,\n\n pagination: Some(Pagination::new(offset, limit)),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Pagination {\n\n offset: Option<usize>,\n\n limit: Option<usize>,\n\n}\n\n\n\nimpl Pagination {\n\n pub fn new(offset: Option<usize>, limit: Option<usize>) -> Self {\n\n Self { offset, limit }\n\n }\n\n}\n", "file_path": "src/external_service/spread_sheet/mod.rs", "rank": 55, "score": 24512.665732413574 }, { "content": " )\n\n }\n\n };\n\n sheet_meta.and_then(|sheet_meta| sheet_meta.validate())\n\n }\n\n\n\n pub fn validate(self) -> Result<Self> {\n\n let re = valid_sheet_id_regex();\n\n if !re.is_match(&self.spread_sheet_id) {\n\n return Err(SheetMetaError::InvalidSheetId(self.spread_sheet_id));\n\n };\n\n\n\n Ok(self)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use super::*;\n", "file_path": "src/external_service/spread_sheet/sheet.rs", "rank": 56, "score": 24512.465809919653 }, { "content": " }\n\n\n\n pub fn validate(&self) -> Result<()> {\n\n if !self.start.is_at_left_upper_or_same_as(&self.end) {\n\n return Err(RangeError::InvalidRangeDirection(format!(\n\n \"start cell is not at left right of end cell {}:{}\",\n\n self.start, self.end,\n\n )));\n\n }\n\n Ok(())\n\n }\n\n}\n\nimpl Display for RangeRef {\n\n fn fmt(&self, f: &mut Formatter) -> FmtResult {\n\n match self.sheet_name.as_ref() {\n\n Some(sheet_name) => write!(f, \"'{}'!{}:{}\", sheet_name, self.start, self.end),\n\n None => write!(f, \"{}:{}\", self.start, self.end),\n\n }\n\n }\n\n}\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 57, "score": 24512.44322450341 }, { "content": " use super::super::test::*;\n\n use super::super::token_manager_from_service_account_file;\n\n use super::*;\n\n use reqwest::Client;\n\n use tokio::sync::broadcast;\n\n\n\n fn get_expected_sheet_info() -> Sheet {\n\n Sheet {\n\n spreadsheet_id: TEST_SHEET1_ID.to_string(),\n\n sheets: vec![\n\n SheetProperty {\n\n properties: SheetPropertyData {\n\n sheet_id: 0,\n\n title: \"grouping\".to_string(),\n\n index: 0,\n\n sheet_type: \"GRID\".to_string(),\n\n grid_properties: GridProperties {\n\n row_count: 1000,\n\n column_count: 28,\n\n frozen_row_count: None,\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 58, "score": 24512.35863068969 }, { "content": "use super::TokenManager;\n\nuse super::*;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::sync::Arc;\n\nuse thiserror::Error;\n\n\n", "file_path": "src/external_service/spread_sheet/value.rs", "rank": 59, "score": 24512.34507393363 }, { "content": " if let Err(e) = is_col_range_overflow(header_range.end_col_index()) {\n\n return Err(HeaderError::ColIndexOutOfRescription(e));\n\n }\n\n\n\n if max_col_count_of_grid <= header_range.start_col_index() {\n\n break;\n\n }\n\n range_str = header_range.as_string();\n\n\n\n let sheet_values = get_sheet_value(\n\n &client,\n\n token_manager.clone(),\n\n &condition.spread_sheet_id,\n\n &range_str,\n\n None,\n\n None,\n\n None,\n\n )\n\n .await\n\n .map_err(|e| {\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 60, "score": 24512.137555776535 }, { "content": " self.end.col_index = other.end.col_index\n\n }\n\n\n\n if other.end.row_index > self.end.row_index {\n\n self.end.row_index = other.end.row_index\n\n }\n\n }\n\n\n\n pub fn col_range_size(&self) -> usize {\n\n self.end.col_index - self.start.col_index + 1\n\n }\n\n\n\n pub fn as_string(&self) -> String {\n\n format!(\"{}\", self)\n\n }\n\n\n\n pub fn shift_in_col(&mut self, shift: i32) -> Result<()> {\n\n let start_col_index = (self.start.col_index as i32) + shift;\n\n if start_col_index < 0 {\n\n return Err(RangeError::InvalidRangeRefRow(format!(\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 61, "score": 24511.968313759076 }, { "content": " end: end.clone(),\n\n }),\n\n\n\n None => None,\n\n }\n\n }\n\n\n\n pub async fn create<HttpConnector>(\n\n client: &ReqClient,\n\n token_manager: Arc<TokenManager<HttpConnector>>,\n\n meta: SheetMeta,\n\n specified_cell_range: Option<(CellRef, CellRef)>,\n\n ) -> Result<HeaderSearchCondition> {\n\n let spread_sheet_id = SpreadSheetId::new(meta.spread_sheet_id);\n\n\n\n let sheet_info = api::get_sheet(client, token_manager, &spread_sheet_id)\n\n .await\n\n .map_err(|e| {\n\n if e.is_not_found() {\n\n HeaderError::SpreadSheetNotFound(format!(\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 62, "score": 24511.672427779857 }, { "content": " pub fn new(\n\n spread_sheet_id: String,\n\n tab_sheet_id: Option<u32>,\n\n tab_sheet_name: Option<String>,\n\n ) -> Self {\n\n let sheet_id_or_name = SheetIdOrName {\n\n tab_sheet_id,\n\n tab_sheet_name,\n\n };\n\n\n\n Self {\n\n spread_sheet_id,\n\n sheet_id_or_name,\n\n }\n\n }\n\n\n\n pub fn from_url(url: &str) -> Result<SheetMeta> {\n\n let re = valid_sheet_url_with_tab_id_regex();\n\n let sheet_meta = re.captures(url).map_or_else(\n\n || Err(SheetMetaError::InvalidSheetUrl(url.to_string())),\n", "file_path": "src/external_service/spread_sheet/sheet.rs", "rank": 63, "score": 24511.487979167938 }, { "content": " let row_values =\n\n RowValues::read_values(&client, token_manager.clone(), &value_option).await?;\n\n\n\n return Ok(SheetValueResponse {\n\n headers,\n\n row_values,\n\n pagination: pagination_in_response,\n\n });\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #[cfg(feature = \"test-using-sa\")]\n\n use std::path::PathBuf;\n\n\n\n pub const TEST_SHEET1 :&str= \"https://docs.google.com/spreadsheets/d/1HA4munsvl5UUlb9DKmJvhrwfGlSQ97hSQZf13M3ZO4Y/edit#gid=0\";\n\n pub const TEST_SHEET1_ID: &str = \"1HA4munsvl5UUlb9DKmJvhrwfGlSQ97hSQZf13M3ZO4Y\";\n\n\n\n pub const TEST_SHEET1_WITH_TAG_ID :&str= \"https://docs.google.com/spreadsheets/d/1HA4munsvl5UUlb9DKmJvhrwfGlSQ97hSQZf13M3ZO4Y/edit#gid=2089556915\";\n", "file_path": "src/external_service/spread_sheet/mod.rs", "rank": 64, "score": 24511.118568008795 }, { "content": "\n\npub async fn create_header_condition_from_sheet_meta<HttpConnector>(\n\n token_manager: Arc<TokenManager<HttpConnector>>,\n\n sheet_meta: SheetMeta,\n\n specified_cell_range: Option<(CellRef, CellRef)>,\n\n) -> Result<HeaderSearchCondition> {\n\n //TODO(tacogips) restriction\n\n let client = reqwest_client();\n\n let header_condition =\n\n HeaderSearchCondition::create(client, token_manager, sheet_meta, specified_cell_range)\n\n .await?;\n\n Ok(header_condition)\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct SheetValueResponse {\n\n pub headers: RawHeaders,\n\n pub row_values: RowValues,\n\n pub pagination: Option<Pagination>,\n\n}\n", "file_path": "src/external_service/spread_sheet/mod.rs", "rank": 65, "score": 24511.063445068005 }, { "content": " assert!(result.is_err());\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_range_ref_from_str() {\n\n {\n\n let result = RangeRef::from_str(\"A1:A2\");\n\n\n\n assert!(result.is_ok());\n\n let result = result.unwrap();\n\n\n\n assert_eq!(\n\n RangeRef {\n\n sheet_name: None,\n\n start: CellRef::new(0, 0),\n\n end: CellRef::new(0, 1),\n\n },\n\n result\n\n );\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 66, "score": 24510.841878929295 }, { "content": " ));\n\n }\n\n\n\n if row_num as usize > MAX_ROW_NUMBER_TO_READ_AT_ONCE {\n\n return Err(ValueError::TooManyRowNumber(\n\n MAX_ROW_NUMBER_TO_READ_AT_ONCE,\n\n row_num as usize,\n\n ));\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, PartialEq)]\n\npub struct RowValues {\n\n pub values: Vec<Vec<CellValue>>,\n\n}\n\n\n\nimpl RowValues {\n\n pub fn new(values: Vec<Vec<CellValue>>) -> Self {\n", "file_path": "src/external_service/spread_sheet/value.rs", "rank": 67, "score": 24510.808941462834 }, { "content": " }\n\n\n\n {\n\n let result = RangeRef::from_str(\"'sheet name 1'!A1:A2\");\n\n\n\n assert!(result.is_ok());\n\n let result = result.unwrap();\n\n\n\n assert_eq!(\n\n RangeRef {\n\n sheet_name: Some(\"sheet name 1\".to_string()),\n\n start: CellRef::new(0, 0),\n\n end: CellRef::new(0, 1),\n\n },\n\n result\n\n );\n\n }\n\n\n\n {\n\n let result = RangeRef::from_str(\"'sheet name 1'!A1:B2\");\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 68, "score": 24510.59206174244 }, { "content": " return Err(ValueError::InvalidColRange(start_col, end_col));\n\n }\n\n #[cfg(feature = \"restricted\")]\n\n if let Err(e) = is_row_range_overflow(end_col) {\n\n return Err(ValueError::RowIndexOutOfRescription(e));\n\n }\n\n\n\n let col_size = end_col - start_col + 1;\n\n\n\n let sheet_name = option.sheet_name.clone().map(|v| v.into_inner());\n\n\n\n let start = CellRef::new(start_col, option.start_row_idx);\n\n\n\n let end = CellRef::new(end_col, option.end_row_idx);\n\n\n\n let value_range = RangeRef::new(sheet_name.clone(), start, end);\n\n\n\n let sheet_values = get_sheet_value(\n\n &client,\n\n token_manager.clone(),\n", "file_path": "src/external_service/spread_sheet/value.rs", "rank": 69, "score": 24510.582981038773 }, { "content": " sheet_mata,\n\n Some((\n\n CellRef::from_str(\"B2\").unwrap(),\n\n CellRef::from_str(\"D2\").unwrap(),\n\n )),\n\n )\n\n .await\n\n .unwrap();\n\n\n\n let result = RawHeaders::read_raw_headers(&client, token_manager, &condition).await;\n\n\n\n assert!(result.is_ok());\n\n let result = result.unwrap();\n\n\n\n let values: Vec<RecordHeader> = vec![\n\n RecordHeader(\"\".to_string()),\n\n RecordHeader(r#\"\"\"#.to_string()),\n\n RecordHeader(\"\".to_string()),\n\n ];\n\n let expected = RawHeaders {\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 70, "score": 24510.55528679638 }, { "content": "\n\nimpl SheetValueResponse {\n\n pub fn is_empty(&self) -> bool {\n\n self.row_values.values.is_empty()\n\n }\n\n}\n\n\n\npub async fn fetch_sheet_value<HttpConnector>(\n\n token_manager: Arc<TokenManager<HttpConnector>>,\n\n header_search_condition: &HeaderSearchCondition,\n\n row_serach_condition: &FetchRowCondition,\n\n) -> Result<SheetValueResponse> {\n\n //TODO(tacogips) restriction\n\n let client = reqwest_client();\n\n\n\n let headers =\n\n RawHeaders::read_raw_headers(&client, token_manager.clone(), header_search_condition)\n\n .await?;\n\n\n\n let value_col_range = headers.range.col_range_indices();\n", "file_path": "src/external_service/spread_sheet/mod.rs", "rank": 71, "score": 24510.511110785363 }, { "content": "\n\n pub fn col_range_indices(&self) -> (usize, usize) {\n\n (self.start.col_index, self.end.col_index)\n\n }\n\n\n\n pub fn contains(&mut self, other: &RangeRef) -> bool {\n\n self.start.col_index <= other.start.col_index\n\n && self.start.row_index <= other.start.row_index\n\n && self.end.col_index >= other.end.col_index\n\n && self.end.row_index >= other.end.row_index\n\n }\n\n\n\n pub fn set_end_col_index(&mut self, col_index: usize) -> Result<()> {\n\n self.end.col_index = col_index;\n\n self.validate()?;\n\n Ok(())\n\n }\n\n\n\n pub fn start_col_index(&mut self) -> usize {\n\n self.start.col_index\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 72, "score": 24510.44175919413 }, { "content": " assert_eq!(\n\n RangeRef {\n\n sheet_name: Some(\"sheet name 1\".to_string()),\n\n start: CellRef::new(0, 0),\n\n end: CellRef::new(1, 1),\n\n },\n\n result\n\n );\n\n }\n\n\n\n {\n\n let result = RangeRef::from_str(\"'sheet name 1'!B2:B2\");\n\n\n\n assert!(result.is_ok());\n\n let result = result.unwrap();\n\n\n\n assert_eq!(\n\n RangeRef {\n\n sheet_name: Some(\"sheet name 1\".to_string()),\n\n start: CellRef::new(1, 1),\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 73, "score": 24510.190615435127 }, { "content": " log::debug!(\"fetching header range :{}\", header_range);\n\n\n\n let mut all_sheet_values: Option<SheetValues> = None;\n\n\n\n let max_col_count_of_grid = {\n\n let sheet_name = condition.sheet_name.as_ref().map(|s| s.as_str());\n\n match condition.sheet_info.find_property_by_name(sheet_name) {\n\n None => {\n\n return Err(HeaderError::UnknwonError(format!(\n\n \"sheet info not found:{:?}\",\n\n sheet_name\n\n )))\n\n }\n\n Some(property) => property.properties.grid_properties.column_count,\n\n }\n\n };\n\n\n\n let mut range_str: String = \"\".to_string();\n\n loop {\n\n #[cfg(feature = \"restricted\")]\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 74, "score": 24510.08713754317 }, { "content": "\n\n assert!(result.is_ok());\n\n let result = result.unwrap();\n\n\n\n assert_eq!(\n\n RangeRef {\n\n sheet_name: Some(\"sheet name 1\".to_string()),\n\n start: CellRef::new(0, 0),\n\n end: CellRef::new(1, 1),\n\n },\n\n result\n\n );\n\n }\n\n\n\n {\n\n let result = RangeRef::from_str(\"sheet name 1!A1:B2\");\n\n\n\n assert!(result.is_ok());\n\n let result = result.unwrap();\n\n\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 75, "score": 24510.042382882424 }, { "content": " end: CellRef::new(1, 1),\n\n },\n\n result\n\n );\n\n }\n\n\n\n {\n\n let result = RangeRef::from_str(\"'sheet ''name 1'!B2:B2\");\n\n\n\n assert!(result.is_ok());\n\n let result = result.unwrap();\n\n\n\n assert_eq!(\n\n RangeRef {\n\n sheet_name: Some(\"sheet 'name 1\".to_string()),\n\n start: CellRef::new(1, 1),\n\n end: CellRef::new(1, 1),\n\n },\n\n result\n\n );\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 76, "score": 24510.00018957381 }, { "content": " RecordHeader(\"favorite\".to_string()),\n\n RecordHeader(\"favorite\".to_string()),\n\n RecordHeader(\"favorite\".to_string()),\n\n RecordHeader(\"address.city.name\".to_string()),\n\n RecordHeader(\"address.zipcode\".to_string()),\n\n RecordHeader(\"address.city.code\".to_string()),\n\n RecordHeader(\"link title note\".to_string()),\n\n ];\n\n let expected = RawHeaders {\n\n range: RangeRef::from_str(\"grouping!A1:J1\").unwrap(),\n\n values,\n\n };\n\n\n\n assert_eq!(result, expected)\n\n }\n\n\n\n #[tokio::test]\n\n async fn get_headers_from_sheet_2() {\n\n let (_, rx) = broadcast::channel(1);\n\n let token_manager = token_manager_from_service_account_file(\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 77, "score": 24509.881331725115 }, { "content": "\n\n pub async fn read_raw_headers<HttpConnector>(\n\n client: &ReqClient,\n\n token_manager: Arc<TokenManager<HttpConnector>>,\n\n condition: &HeaderSearchCondition,\n\n ) -> Result<RawHeaders> {\n\n let specified_range = condition.specified_cell_range.is_some();\n\n let mut header_range = match condition.as_range() {\n\n Some(range) => range,\n\n None => default_header_range(condition.sheet_name.as_ref()),\n\n };\n\n\n\n if !header_range.is_one_line_row() {\n\n log::warn!(\"header range is multiple line :{}\", header_range);\n\n return Err(HeaderError::UnsupportedMultipleHeader(format!(\n\n \"{}\",\n\n header_range\n\n )));\n\n }\n\n\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 78, "score": 24509.728825771825 }, { "content": " scopes::SHEET_READ_ONLY,\n\n load_test_sa_file_path(),\n\n rx,\n\n None,\n\n )\n\n .await\n\n .unwrap();\n\n let token_manager = Arc::new(token_manager);\n\n\n\n let client = Client::new();\n\n let sheet_id = TEST_SHEET1_ID;\n\n\n\n let sheet_mata = SheetMeta::new(\n\n sheet_id.to_string(),\n\n None,\n\n Some(\"header escape\".to_string()),\n\n );\n\n let condition = HeaderSearchCondition::create(\n\n &client,\n\n token_manager.clone(),\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 79, "score": 24509.670690750387 }, { "content": " CellValue(\"34\".into()),\n\n CellValue(\"male\".into()),\n\n CellValue(\"shopping\".into()),\n\n CellValue(\"\".into()),\n\n CellValue(\"fishing\".into()),\n\n CellValue(\"tokyo\".into()),\n\n CellValue(\"111222\".into()),\n\n CellValue(\"TK\".into()),\n\n CellValue(\"\".into()),\n\n ],\n\n ]);\n\n\n\n assert_eq!(row_values, expected);\n\n }\n\n\n\n #[tokio::test]\n\n async fn get_values_test_valid_2() {\n\n let (_, rx) = broadcast::channel(1);\n\n let token_manager = token_manager_from_service_account_file(\n\n scopes::SHEET_READ_ONLY,\n", "file_path": "src/external_service/spread_sheet/value.rs", "rank": 80, "score": 24509.595119204932 }, { "content": " pub const TEST_SHEET1_EMPTY_TAG_ID: u32 = 2089556915;\n\n\n\n #[cfg(feature = \"test-using-sa\")]\n\n pub fn load_test_sa_file_path() -> PathBuf {\n\n let mut p = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n p.push(\"dev-secret/test-sa-key.json\");\n\n p\n\n }\n\n}\n", "file_path": "src/external_service/spread_sheet/mod.rs", "rank": 81, "score": 24509.249086433934 }, { "content": "\n\n #[error(\"invalid col range start:{0} end:{1}\")]\n\n InvalidColRange(usize, usize),\n\n\n\n #[error(\"too many row number to read. max is {0}, passed {1} \")]\n\n TooManyRowNumber(usize, usize),\n\n\n\n #[cfg(feature = \"restricted\")]\n\n #[error(\"row index out of restriction:{0}\")]\n\n RowIndexOutOfRescription(usize),\n\n}\n\n\n\nimpl ValueError {\n\n pub fn is_not_found(&self) -> bool {\n\n if let ValueError::SpreadSheetNotFound(_) = self {\n\n true\n\n } else {\n\n false\n\n }\n\n }\n", "file_path": "src/external_service/spread_sheet/value.rs", "rank": 82, "score": 24509.201429132972 }, { "content": " fn test_cell_ref_from_str() {\n\n {\n\n let result = CellRef::from_str(\"A1\");\n\n assert!(result.is_ok());\n\n assert_eq!(CellRef::new(0, 0), result.unwrap());\n\n }\n\n\n\n {\n\n let result = CellRef::from_str(\"B3\");\n\n assert!(result.is_ok());\n\n assert_eq!(CellRef::new(1, 2), result.unwrap());\n\n }\n\n\n\n {\n\n let result = CellRef::from_str(\"BA2\");\n\n assert!(result.is_ok());\n\n assert_eq!(CellRef::new(52, 1), result.unwrap());\n\n }\n\n {\n\n let result = CellRef::from_str(\"B0\");\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 83, "score": 24509.14688656965 }, { "content": " let client = Client::new();\n\n let sheet_id = TEST_SHEET1_ID;\n\n\n\n let sheet_mata =\n\n SheetMeta::new(sheet_id.to_string(), None, Some(\"many headers\".to_string()));\n\n\n\n let condition =\n\n HeaderSearchCondition::create(&client, token_manager.clone(), sheet_mata, None)\n\n .await\n\n .unwrap();\n\n\n\n let result = RawHeaders::read_raw_headers(&client, token_manager, &condition).await;\n\n\n\n assert!(result.is_ok());\n\n let result = result.unwrap();\n\n\n\n let values: Vec<RecordHeader> = (1..=81)\n\n .into_iter()\n\n .map(|n| RecordHeader(format!(\"h{}\", n)))\n\n .collect();\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 84, "score": 24509.100691168667 }, { "content": " \"invalid start cell row :{}\",\n\n start_col_index\n\n )));\n\n }\n\n let end_col_index = (self.end.col_index as i32) + shift;\n\n\n\n if end_col_index < 0 {\n\n return Err(RangeError::InvalidRangeRefRow(format!(\n\n \"invalid end cell row :{}\",\n\n end_col_index\n\n )));\n\n }\n\n\n\n self.start.col_index = start_col_index as usize;\n\n self.end.col_index = end_col_index as usize;\n\n Ok(())\n\n }\n\n\n\n pub fn is_one_line_row(&self) -> bool {\n\n self.start.row_index == self.end.row_index\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 85, "score": 24509.039511914718 }, { "content": " }\n\n\n\n {\n\n let result = RangeRef::from_str(\"'sheet name 1!B2:B2\");\n\n\n\n assert!(result.is_err());\n\n }\n\n\n\n {\n\n let result = RangeRef::from_str(\"'sheet name 1'!B2:A2\");\n\n\n\n assert!(result.is_err());\n\n }\n\n\n\n {\n\n let result = RangeRef::from_str(\"A3:A2\");\n\n\n\n assert!(result.is_err());\n\n }\n\n }\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 86, "score": 24509.01529264174 }, { "content": "\n\n pub fn empty_values(len: usize) -> Vec<RecordHeader> {\n\n (0..len)\n\n .into_iter()\n\n .map(|_| RecordHeader(\"\".to_string()))\n\n .collect()\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\npub struct RawHeaders {\n\n pub range: RangeRef,\n\n pub values: Vec<RecordHeader>,\n\n}\n\n\n\nimpl RawHeaders {\n\n pub(crate) fn convert_from(\n\n value_ranges: Vec<ValueRange>,\n\n specified_range: bool,\n\n ) -> Result<RawHeaders> {\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 87, "score": 24509.005566875963 }, { "content": " pub fn is_not_found(&self) -> bool {\n\n if let HeaderError::SpreadSheetNotFound(_) = self {\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Debug)]\n\npub struct HeaderSearchCondition {\n\n pub spread_sheet_id: SpreadSheetId,\n\n pub sheet_name: Option<SheetName>,\n\n pub specified_cell_range: Option<(CellRef, CellRef)>,\n\n pub sheet_info: Sheet,\n\n}\n\n\n\nimpl HeaderSearchCondition {\n\n pub fn new(\n\n spread_sheet_id: SpreadSheetId,\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 88, "score": 24509.001915927 }, { "content": " if value_ranges.is_empty() {\n\n return Err(HeaderError::InvalidRangeRefInReturnedValue(\"\".to_string()));\n\n }\n\n\n\n let mut merged_range: Option<RangeRef> = None;\n\n let mut all_headers: Vec<RecordHeader> = Vec::new();\n\n\n\n for each_value_range in value_ranges {\n\n let mut each_response_header_range = RangeRef::from_str(&each_value_range.range)\n\n .map_err(|e| {\n\n log::error!(\"invalid range ref in returned value of header {}\", e);\n\n HeaderError::InvalidRangeRefInReturnedValue(each_value_range.range.clone())\n\n })?;\n\n\n\n if each_value_range.values.is_none() {\n\n break;\n\n }\n\n\n\n let mut each_header_values = each_value_range.values.unwrap();\n\n if each_header_values.is_empty() {\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 89, "score": 24508.980798958994 }, { "content": "}\n\n\n\n#[derive(Serialize, Deserialize, Debug, PartialEq)]\n\npub struct CellValue(JsonValue);\n\nimpl CellValue {\n\n pub fn as_inner(&self) -> &JsonValue {\n\n &self.0\n\n }\n\n}\n\n\n\npub struct ReadValueOption {\n\n spread_sheet_id: SpreadSheetId,\n\n sheet_name: Option<SheetName>,\n\n col_range: (usize, usize),\n\n start_row_idx: usize,\n\n end_row_idx: usize,\n\n}\n\n\n\nimpl ReadValueOption {\n\n pub fn new(\n", "file_path": "src/external_service/spread_sheet/value.rs", "rank": 90, "score": 24508.931057972684 }, { "content": " if e.is_not_found() {\n\n match header_range.sheet_name.clone() {\n\n Some(sheet_name) => HeaderError::SpreadSheetNotFound(format!(\n\n \"sheet name :{} is not found in spread sheet {}\",\n\n sheet_name, &condition.spread_sheet_id,\n\n )),\n\n None => HeaderError::SpreadSheetNotFound(format!(\n\n \"spread sheet {} is not found\",\n\n &condition.spread_sheet_id,\n\n )),\n\n }\n\n } else {\n\n HeaderError::FetchHeaderApiError(format!(\"{}\", e))\n\n }\n\n });\n\n let sheet_values = match sheet_values {\n\n Err(e) => {\n\n log::error!(\"fetch header error :{:?}\", e);\n\n return Err(e);\n\n }\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 91, "score": 24508.81376096613 }, { "content": " &option.spread_sheet_id,\n\n &value_range.as_string(),\n\n None,\n\n None,\n\n None,\n\n )\n\n .await\n\n .map_err(|e| {\n\n if e.is_not_found() {\n\n ValueError::SpreadSheetNotFound(format!(\n\n \"sheet name: [{}] not found on spread sheet: {}\",\n\n sheet_name.unwrap_or_default(),\n\n &option.spread_sheet_id,\n\n ))\n\n } else {\n\n ValueError::FetchValueApiError(format!(\"{}\", e))\n\n }\n\n })?;\n\n\n\n let mut result = RowValues::default();\n", "file_path": "src/external_service/spread_sheet/value.rs", "rank": 92, "score": 24508.69304734348 }, { "content": " let token_manager = Arc::new(token_manager);\n\n\n\n let client = Client::new();\n\n let sheet_id = TEST_SHEET1_ID;\n\n\n\n let sheet_mata = SheetMeta::new(\n\n sheet_id.to_string(),\n\n None,\n\n Some(\"not existing shet\".to_string()),\n\n );\n\n\n\n let condition =\n\n HeaderSearchCondition::create(&client, token_manager.clone(), sheet_mata, None)\n\n .await\n\n .unwrap();\n\n\n\n let result = RawHeaders::read_raw_headers(&client, token_manager, &condition).await;\n\n\n\n assert!(result.is_err());\n\n }\n\n}\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 93, "score": 24508.68350968031 }, { "content": " }\n\n\n\n pub fn end_col_index(&mut self) -> usize {\n\n self.end.col_index\n\n }\n\n\n\n pub fn expand(&mut self, other: &RangeRef) {\n\n if self.contains(other) {\n\n return;\n\n }\n\n\n\n if other.start.col_index < self.start.col_index {\n\n self.start.col_index = other.start.col_index\n\n }\n\n\n\n if other.start.row_index < self.start.row_index {\n\n self.start.row_index = other.start.row_index\n\n }\n\n\n\n if other.end.col_index > self.end.col_index {\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 94, "score": 24508.64604703684 }, { "content": " let token_manager = Arc::new(token_manager);\n\n\n\n let client = Client::new();\n\n let sheet_id = TEST_SHEET1_ID;\n\n\n\n let sheet_mata = SheetMeta::new(sheet_id.to_string(), None, None);\n\n let condition =\n\n HeaderSearchCondition::create(&client, token_manager.clone(), sheet_mata, None)\n\n .await\n\n .unwrap();\n\n\n\n let result = RawHeaders::read_raw_headers(&client, token_manager, &condition).await;\n\n\n\n assert!(result.is_ok());\n\n let result = result.unwrap();\n\n\n\n let values: Vec<RecordHeader> = vec![\n\n RecordHeader(\"name\".to_string()),\n\n RecordHeader(\"age\".to_string()),\n\n RecordHeader(\"sex\".to_string()),\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 95, "score": 24508.618870797683 }, { "content": " if vs.len() == vs.capacity() {\n\n vs.reserve(1);\n\n }\n\n unsafe {\n\n let head = vs.as_mut_ptr();\n\n\n\n ptr::copy(head, head.offset(1), vs.len());\n\n ptr::write(head, v);\n\n vs.set_len(vs.len() + 1);\n\n }\n\n}\n\n\n\nstatic VALID_RANGE_RE: OnceCell<Regex> = OnceCell::new();\n\nstatic VALID_CELL_REF_RE: OnceCell<Regex> = OnceCell::new();\n\nstatic VALID_SHEET_NAME_RE: OnceCell<Regex> = OnceCell::new();\n\n\n\npub(crate) fn valid_range_regex() -> &'static Regex {\n\n VALID_RANGE_RE.get_or_init(|| {\n\n let r = Regex::new(r\"(?P<SHEET_NAME>.*?)!?(?P<START_RANGE_COL>[A-Z]+)(?P<START_RANGE_ROW>[0-9]+):(?P<END_RANGE_COL>[A-Z]+)(?P<END_RANGE_ROW>[0-9]+)\").unwrap();\n\n r\n\n })\n\n}\n\n\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 96, "score": 24508.271192625027 }, { "content": " start_row_idx,\n\n finish_row_idx,\n\n Some(Pagination::new(Some(offset), Some(limit))),\n\n )\n\n };\n\n\n\n let max_row_count_of_grid = {\n\n let sheet_name = header_search_condition\n\n .sheet_name\n\n .as_ref()\n\n .map(|s| s.as_str());\n\n match header_search_condition\n\n .sheet_info\n\n .find_property_by_name(sheet_name)\n\n {\n\n None => {\n\n return Err(HeaderError::UnknwonError(format!(\n\n \"sheet info not found:{:?}\",\n\n sheet_name\n\n )))?\n", "file_path": "src/external_service/spread_sheet/mod.rs", "rank": 97, "score": 24508.232327018977 }, { "content": "\n\n #[test]\n\n fn test_fmt_range() {\n\n {\n\n let input = \"'sheet name 1'!B2:BA2\".to_string();\n\n let result = RangeRef::from_str(&input);\n\n assert!(result.is_ok());\n\n assert_eq!(input, result.unwrap().to_string())\n\n }\n\n\n\n {\n\n let input = \"B2:BA3\".to_string();\n\n let result = RangeRef::from_str(&input);\n\n assert!(result.is_ok());\n\n assert_eq!(input, result.unwrap().to_string())\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_expand() {\n", "file_path": "src/external_service/spread_sheet/range.rs", "rank": 98, "score": 24508.08567642453 }, { "content": " sheet_id.to_string(),\n\n None,\n\n Some(\"name_specified_sheet\".to_string()),\n\n );\n\n let result = HeaderSearchCondition::create(&client, token_manager, sheet_mata, None).await;\n\n assert!(result.is_ok());\n\n\n\n let expected = HeaderSearchCondition::new(\n\n SpreadSheetId::new(TEST_SHEET1_ID.to_string()),\n\n Some(SheetName::new(\"name_specified_sheet\".to_string())),\n\n None,\n\n get_expected_sheet_info(),\n\n );\n\n\n\n assert_eq!(expected, result.unwrap());\n\n }\n\n\n\n #[tokio::test]\n\n async fn get_header_test_valid_4() {\n\n let (_, rx) = broadcast::channel(1);\n", "file_path": "src/external_service/spread_sheet/header.rs", "rank": 99, "score": 24507.84001347461 } ]
Rust
2d-games/match-three/src/mgfw/ecs/entity.rs
Syn-Nine/rust-mini-games
b80fa60d524c8fb6749731cbc078a1a2526861ed
use super::*; use crate::mgfw::log; pub const ENTITY_SZ: usize = 256; #[derive(Copy, Clone)] pub struct EntityIdSpan { pub first: usize, pub last: usize, } struct Entity { components: u32, } pub struct EntityRegistry { data: *mut Entity, cursor: usize, span: EntityIdSpan, } #[allow(dead_code)] impl EntityRegistry { pub fn new(mgr: &mut CacheManager) -> EntityRegistry { log(format!("Constructing EntityRegistry")); let sz_bytes = std::mem::size_of::<Entity>() * ENTITY_SZ; EntityRegistry { data: mgr.allocate(sz_bytes) as *mut Entity, cursor: 0, span: EntityIdSpan { first: ENTITY_SZ - 1, last: 0, }, } } pub fn add(&mut self) -> usize { for _i in 0..ENTITY_SZ { if !self.has_component(self.cursor, COMPONENT_ACTIVE) { break; } self.cursor = (self.cursor + 1) % ENTITY_SZ; } if self.has_component(self.cursor, COMPONENT_ACTIVE) { log(format!( "WARNING: EntityRegistry: Ran out of available entity slots!" )); assert!(false); } self.add_component(self.cursor, COMPONENT_ACTIVE); self.cursor } pub fn add_component(&mut self, idx: usize, component: u32) { let entity = self.get_data_ref_mut(idx); entity.components |= component; if idx < self.span.first { self.span.first = idx; } if idx > self.span.last { self.span.last = idx; } } pub fn has_component(&self, idx: usize, component: u32) -> bool { let entity = self.get_data_ref(idx); (entity.components & component) == component } pub fn set_active(&mut self, idx: usize, val: bool) { self.overwrite_component(idx, COMPONENT_VISIBLE, val); } pub fn is_active(&self, idx: usize) -> bool { return self.has_component(idx, COMPONENT_ACTIVE); } pub fn set_visibility(&mut self, idx: usize, val: bool) { self.overwrite_component(idx, COMPONENT_VISIBLE, val); } pub fn is_visible(&self, idx: usize) -> bool { return self.has_component(idx, COMPONENT_VISIBLE); } pub fn clear_component(&mut self, idx: usize, component: u32) { let entity = self.get_data_ref_mut(idx); entity.components &= !component; if COMPONENT_ACTIVE == component && (idx == self.span.first || idx == self.span.last) { self.update_span(); } } pub fn overwrite_component(&mut self, idx: usize, component: u32, val: bool) { self.clear_component(idx, component); if val { self.add_component(idx, component); } } pub fn get_id_span(&self) -> EntityIdSpan { self.span } fn update_span(&mut self) { self.span = EntityIdSpan { first: ENTITY_SZ - 1, last: 0, }; for idx in 0..ENTITY_SZ { if self.has_component(idx, COMPONENT_ACTIVE) { if idx < self.span.first { self.span.first = idx; } if idx > self.span.last { self.span.last = idx; } } } } fn get_data_ref_mut(&self, idx: usize) -> &mut Entity { assert!(idx < ENTITY_SZ); unsafe { &mut *(self.data.offset(idx as isize)) } } fn get_data_ref(&self, idx: usize) -> &Entity { assert!(idx < ENTITY_SZ); unsafe { &*(self.data.offset(idx as isize)) } } }
use super::*; use crate::mgfw::log; pub const ENTITY_SZ: usize = 256; #[derive(Copy, Clone)] pub struct EntityIdSpan { pub first: usize, pub last: usize, } struct Entity { components: u32, } pub struct EntityRegistry { data: *mut Entity, cursor: usize, span: EntityIdSpan, } #[allow(dead_code)] impl EntityRegistry { pub fn new(mgr: &mut CacheManager) -> EntityRegistry { log(format!("Constructing EntityRegistry")); let sz_bytes = std::mem::size_of::<Entity>() * ENTITY_SZ; EntityRegistry { data: mgr.allocate(sz_bytes) as *mut Entity, cursor: 0, span: EntityIdSpan { first: ENTITY_SZ - 1, last: 0, }, } } pub fn add(&mut self) -> usize { for _i in 0..ENTITY_SZ { if !self.has_component(self.cursor, COMPONENT_ACTIVE) { break; } self.cursor = (self.cursor + 1) % ENTITY_SZ; } if self.has_component(self.cursor, COMPONENT_ACTIVE) { log(format!( "WARNING: EntityRegistry: Ran out of available entity slots!" )); assert!(false); } self.add_component(self.cursor, COMPONENT_ACTIVE); self.cursor } pub fn add_component(&mut self, idx: usize, component: u32) { let entity = self.get_data_ref_mut(idx); entity.components |= component; if idx < self.span.first { self.span.first = idx; } if idx > self.span.last { self.span.last = idx; } } pub fn has_component(&self, idx: usize, component: u32) -> bool { let entity = self.get_data_ref(idx); (entity.components & component) == component } pub fn set_active(&mut self, idx: usize, val: bool) { self.overwrite_component(idx, COMPONENT_VISIBLE, val); } pub fn is_active(&self, idx: usize) -> bool { return self.has_component(idx, COMPONENT_ACTIVE); } pub fn set_visibility(&mut self, idx: usize, val: bool) { self.overwrite_component(idx, COMPONENT_VISIBLE, val); } pub fn is_visible(&self, idx: usize) -> bool { return self.has_component(idx, COMPONENT_VISIBLE); } pub fn clear_component(&mut self, idx: usize, component: u32) { let entity = self.get_data_ref_mut(idx); entity.components &= !component; if COMPONENT_ACTIVE == component && (idx == self.span.first || idx == self.span.last) { self.update_span(); } } pub fn overwrite_component(&mut self, idx: usize, component: u32, val: bool) { self.clear_component(idx, component); if val { self.add_component(idx, component); } } pub fn get_id_span(&self) -> EntityIdSpan { self.span } fn update_span(&mut self) { self.span = EntityIdSpan { first: ENTITY_SZ - 1, last: 0, }; for idx in 0..ENTITY_SZ {
} } fn get_data_ref_mut(&self, idx: usize) -> &mut Entity { assert!(idx < ENTITY_SZ); unsafe { &mut *(self.data.offset(idx as isize)) } } fn get_data_ref(&self, idx: usize) -> &Entity { assert!(idx < ENTITY_SZ); unsafe { &*(self.data.offset(idx as isize)) } } }
if self.has_component(idx, COMPONENT_ACTIVE) { if idx < self.span.first { self.span.first = idx; } if idx > self.span.last { self.span.last = idx; } }
if_condition
[ { "content": "// update the entities for the cursor\n\npub fn update_cursor_entities(cache: &mut GameData, world: &mut mgfw::ecs::World) {\n\n let mut j: usize = 0;\n\n let loc = get_cursor_location(cache);\n\n let cx: f32 = loc.0 as f32 * 16.0 + 8.0;\n\n let cy: f32 = loc.1 as f32 * 16.0 + 56.0;\n\n\n\n for i in 0..16 {\n\n if 0 != cache.piece[i] {\n\n let e = cache.cursor_entity_start + j + 5 * cache.curr_block;\n\n let ex: f32 = (i % 4) as f32 * 16.0 + cx;\n\n let ey: f32 = ((i - (i % 4)) / 4) as f32 * 16.0 + cy;\n\n world.entity_set_position_xy(e, ex, ey);\n\n let t = cache.cursor_telegraph_start + j;\n\n world.entity_set_position_xy(t, ex, ey);\n\n j = j + 1;\n\n }\n\n }\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 0, "score": 288384.9779509875 }, { "content": "// check if the position in cache overlaps the board edges or another block\n\npub fn check_constrained(cache: &mut GameData) -> bool {\n\n if check_constrained_lr(cache) {\n\n return true;\n\n }\n\n\n\n let loc = get_cursor_location(cache);\n\n\n\n for y in 0..4 {\n\n for x in 0..4 {\n\n let pidx = y * 4 + x;\n\n if 0 != cache.piece[pidx] {\n\n let my: i8 = (loc.1 + y) as i8 - 1;\n\n if 18 < my {\n\n return true;\n\n }\n\n }\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 1, "score": 286034.6634263402 }, { "content": "// check if the position in cache overlaps the board or another block to the left and right\n\npub fn check_constrained_lr(cache: &mut GameData) -> bool {\n\n let loc = get_cursor_location(cache);\n\n\n\n for y in 0..4 {\n\n for x in 0..4 {\n\n let pidx = y * 4 + x;\n\n if 0 != cache.piece[pidx] {\n\n let mx: i8 = (loc.0 + x) as i8 - 1;\n\n let my: i8 = (loc.1 + y) as i8 - 1;\n\n if 0 > mx || 9 < mx {\n\n return true;\n\n }\n\n let idx = my as usize * BOARD_X + mx as usize;\n\n if 0 != cache.board[idx] {\n\n return true;\n\n }\n\n }\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 2, "score": 282680.88836608594 }, { "content": "// reset the cursor to the top of the board using the given block pattern\n\npub fn init_cursor(cache: &mut GameData, world: &mut mgfw::ecs::World, pattern: usize) {\n\n cache.curr_block = pattern;\n\n cache.block_rotation = 0;\n\n cache.cursor = 15;\n\n cache.move_timer = cache.game_timer + 1.0;\n\n let start: usize = 16 * pattern;\n\n let mut j: usize = 0;\n\n // there are multiple entities for cursors, one of each pattern, this hides all first\n\n hide_cursor(cache, world);\n\n // then this shows the one we care about\n\n for i in 0..16 {\n\n cache.piece[i] = cache.tetra_base[start + i];\n\n if 0 != cache.piece[i] {\n\n let e = cache.cursor_entity_start + j + 5 * cache.curr_block;\n\n world.entity_set_visibility(e, true);\n\n j = j + 1;\n\n }\n\n }\n\n update_cursor_entities(cache, world);\n\n update_telegraph_entities(cache, world);\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 3, "score": 280250.6112767959 }, { "content": "#[rustfmt::skip]\n\npub fn update(cache: &mut GameData, heap: &mut GameDataHeap, world: &mut mgfw::ecs::World) -> bool {\n\n let mut expect_blown = false;\n\n\n\n cache.frame = (cache.frame + 1) % 128;\n\n\n\n if !cache.ready {\n\n if cache.frame == 127 {\n\n cache.ready = true;\n\n }\n\n return false;\n\n }\n\n\n\n // Amortize workload\n\n if 0 == cache.frame && !cache.stuck {\n\n let op = cache.options;\n\n cache.stuck = check_stuck(cache);\n\n if !cache.stuck && op != cache.options {\n\n if 0 != cache.options {\n\n world.entity_set_text(1, format!(\"Options: {}\", cache.options));\n\n let ln = world.text_get_width(1) as f32 * 0.5;\n", "file_path": "2d-games/mahjong/src/game/game.rs", "rank": 4, "score": 276120.1717365517 }, { "content": "#[rustfmt::skip]\n\npub fn update(cache: &mut GameData, _heap: &mut GameDataHeap, world: &mut mgfw::ecs::World) -> bool {\n\n let mut expect_blown = false;\n\n\n\n cache.frame = (cache.frame + 1) % 128;\n\n\n\n if cache.refresh_board {\n\n hide_pieces(world);\n\n for idx in 0..9 {\n\n let x = cache.board_xyv[idx * 2 + 0];\n\n let y = cache.board_xyv[idx * 2 + 1];\n\n match cache.board_state[idx] {\n\n ENUM_PLAYER => place_piece(idx, 0, x, y, world),\n\n ENUM_COMPUTER => place_piece(idx, 1, x, y, world),\n\n _ => (),\n\n }\n\n }\n\n cache.refresh_board = false;\n\n }\n\n \n\n if cache.game_over {\n", "file_path": "2d-games/tictactoe/src/game/game.rs", "rank": 5, "score": 276120.1717365517 }, { "content": "#[rustfmt::skip]\n\npub fn update(cache: &mut GameData, heap: &mut GameDataHeap, world: &mut mgfw::ecs::World) -> bool {\n\n let expect_blown = false;\n\n\n\n cache.frame = (cache.frame + 1) % 128;\n\n\n\n if 0 < cache.click_timer {\n\n cache.click_timer -= 1;\n\n }\n\n\n\n if !cache.ready {\n\n return false;\n\n }\n\n\n\n if cache.show_popup {\n\n cache.popup_timer -= 833.0e-6;\n\n if 0.0 > cache.popup_timer {\n\n cache.popup_timer = 0.0;\n\n cache.show_popup = false;\n\n world.entity_set_visibility(90, false);\n\n world.entity_set_visibility(91, false);\n", "file_path": "2d-games/match-three/src/game/game.rs", "rank": 6, "score": 273699.82275291707 }, { "content": "#[rustfmt::skip]\n\npub fn update(cache: &mut GameData, _heap: &mut GameDataHeap, world: &mut mgfw::ecs::World) -> bool {\n\n let mut expect_blown = false;\n\n\n\n cache.frame = (cache.frame + 1) % 128;\n\n\n\n if !cache.ready {\n\n if cache.frame == 127 {\n\n cache.ready = true;\n\n }\n\n return false;\n\n }\n\n\n\n // Amortize workload\n\n if 0 == cache.frame {\n\n if !cache.level_up_lock && !cache.game_over {\n\n if !cache.gen_block && !check_stick(cache, world) {\n\n if cache.move_timer < cache.game_timer {\n\n cache.cursor += BOARD_X;\n\n cache.move_timer = cache.game_timer + 1.0 * f64::powf(0.95, cache.level as f64);\n\n update_cursor_entities(cache, world);\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 7, "score": 273699.82275291707 }, { "content": "pub fn check_stick(cache: &mut GameData, world: &mut mgfw::ecs::World) -> bool {\n\n let loc = get_cursor_location(cache);\n\n let mut ret = false;\n\n\n\n // is ther another block below us? if so, the piece is stuck\n\n if check_constrained(cache) || 0 == get_collide_depth(cache) {\n\n ret = true;\n\n\n\n // copy cursor block to board\n\n for y in 0..4 {\n\n for x in 0..4 {\n\n let pidx = y * 4 + x;\n\n if 0 != cache.piece[pidx] {\n\n let cidx = (loc.1 + y - 1) * BOARD_X + (loc.0 + x - 1);\n\n cache.board[cidx] = cache.curr_block + 1;\n\n }\n\n }\n\n }\n\n\n\n // check if any rows can be cleared\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 8, "score": 270062.02478436136 }, { "content": "pub fn print_header(data: &super::GameData) {\n\n println!(\"\\n{}:\", print_name(data));\n\n println!(\n\n \"{}\",\n\n style(\"-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-\")\n\n .color256(8)\n\n );\n\n println!(\"{}\\n\", print_description(data));\n\n}\n\n\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 9, "score": 268489.7886172191 }, { "content": "pub fn rotate_cursor(cache: &mut GameData) {\n\n let temp = cache.piece;\n\n cache.block_rotation = (cache.block_rotation + 1) % 4;\n\n\n\n // tetra_base holds the puzzle piece pattern at each orientation\n\n // rotating the piece just adds an offset in tetra_base\n\n for y in 0..4 {\n\n for x in 0..4 {\n\n let idx = y * 4 + x;\n\n let pdx = cache.curr_block * 16 + 7 * 16 * cache.block_rotation + idx;\n\n cache.piece[idx] = cache.tetra_base[pdx];\n\n }\n\n }\n\n\n\n // if rotation would break a constraint, undo the rotation\n\n if check_constrained(cache) {\n\n cache.piece = temp;\n\n }\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 10, "score": 268454.87670510355 }, { "content": "pub fn move_cursor_down(cache: &mut GameData) {\n\n if 0 < get_collide_depth(cache) {\n\n cache.cursor = cache.cursor + BOARD_X;\n\n }\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 11, "score": 268454.87670510355 }, { "content": "pub fn move_cursor_right(cache: &mut GameData) {\n\n let temp = cache.cursor;\n\n let cx = cache.cursor % BOARD_X;\n\n\n\n if 9 > cx {\n\n cache.cursor = cache.cursor + 1;\n\n if check_constrained_lr(cache) {\n\n cache.cursor = temp;\n\n }\n\n }\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 12, "score": 264977.84541072184 }, { "content": "pub fn move_cursor_left(cache: &mut GameData) {\n\n let temp = cache.cursor;\n\n let cx = cache.cursor % BOARD_X;\n\n\n\n if 0 < cx {\n\n cache.cursor = cache.cursor - 1;\n\n if check_constrained_lr(cache) {\n\n cache.cursor = temp;\n\n }\n\n }\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 13, "score": 264977.84541072184 }, { "content": "#[rustfmt::skip]\n\npub fn event(cache: &mut GameData, heap: &mut GameDataHeap, world: &mut mgfw::ecs::World, event_id: u8) -> bool {\n\n\n\n if mgfw::EVENT_INPUT_MOUSE_BUTTON_UP != event_id { return false; }\n\n\n\n if cache.stuck || (world.mouse_x < 10 && world.mouse_y < 10) {\n\n // shuffle board\n\n for i in 0..BOARD_SZ {\n\n if 0 == cache.board[i] {\n\n continue;\n\n }\n\n\n\n let i0: usize = cache.board[i] - 1;\n\n if !cache.stones[i0].active {\n\n continue;\n\n }\n\n \n\n let mut i1: usize = i0;\n\n \n\n loop {\n\n let j: usize = (mgfw::rnd() * BOARD_SZ as f32).floor() as usize;\n", "file_path": "2d-games/mahjong/src/game/game.rs", "rank": 14, "score": 264175.3072160903 }, { "content": "#[rustfmt::skip]\n\npub fn event(cache: &mut GameData, _heap: &mut GameDataHeap, world: &mut mgfw::ecs::World, event_id: u8) -> bool {\n\n if ENUM_PLAYER != cache.turn { return false; } // consume event\n\n if cache.game_over { return false; } // no clicking during game over animation\n\n\n\n match event_id {\n\n mgfw::EVENT_INPUT_MOUSE_BUTTON_UP => {\n\n let mx = world.mouse_x as f32;\n\n let my = world.mouse_y as f32;\n\n for idx in 0..9 {\n\n if ENUM_NONE == cache.board_state[idx] &&\n\n mx > cache.board_xyv[idx * 2 + 0] - 40.0 &&\n\n mx < cache.board_xyv[idx * 2 + 0] + 40.0 &&\n\n my > cache.board_xyv[idx * 2 + 1] - 40.0 &&\n\n my < cache.board_xyv[idx * 2 + 1] + 40.0 {\n\n cache.board_state[idx] = ENUM_PLAYER;\n\n cache.refresh_board = true;\n\n check_win(cache);\n\n }\n\n }\n\n }\n\n _ => (),\n\n }\n\n false\n\n}\n\n\n", "file_path": "2d-games/tictactoe/src/game/game.rs", "rank": 15, "score": 264175.3072160903 }, { "content": "#[rustfmt::skip]\n\npub fn event(cache: &mut GameData, heap: &mut GameDataHeap, world: &mut mgfw::ecs::World, event_id: u8) -> bool {\n\n // noop\n\n\n\n if mgfw::EVENT_INPUT_MOUSE_BUTTON_UP != event_id { return false; }\n\n if cache.show_popup { return false; }\n\n if cache.explosion { return false; }\n\n //println!(\"{},{}\", world.mouse_x, world.mouse_y);\n\n if 0 >= cache.click_timer {\n\n if SWAP_INVALID != cache.swap_idx {\n\n swap(cache);\n\n cache.click_timer = 400;\n\n }\n\n if BOMB_INVALID != cache.bomb_idx {\n\n // start fifo\n\n let mut fifo: std::boxed::Box<VecDeque<u8>> = Box::new(VecDeque::new());\n\n fifo.push_back(cache.bomb_idx);\n\n\n\n heap.boom_deque.push_back(cache.bomb_idx);\n\n\n\n // iterate fifo\n", "file_path": "2d-games/match-three/src/game/game.rs", "rank": 16, "score": 261935.8665822862 }, { "content": "#[rustfmt::skip]\n\npub fn event(cache: &mut GameData, _heap: &mut GameDataHeap, world: &mut mgfw::ecs::World, event_id: u8) -> bool {\n\n\n\n // press spacebar to reset if game-over\n\n if cache.game_over {\n\n match event_id {\n\n mgfw::EVENT_INPUT_KEYBOARD_PRESSED_SPACE => game_reset(cache, world),\n\n _ => ()\n\n }\n\n return false;\n\n }\n\n\n\n // waiting for block to generate, skip input\n\n if cache.gen_block || cache.level_up_lock {\n\n return false;\n\n }\n\n\n\n // normal input to move/rotate block\n\n match event_id {\n\n mgfw::EVENT_INPUT_KEYBOARD_PRESSED_LEFT => move_cursor_left(cache),\n\n mgfw::EVENT_INPUT_KEYBOARD_PRESSED_RIGHT => move_cursor_right(cache),\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 17, "score": 261935.8665822862 }, { "content": "pub fn print_options(data: &super::GameData) -> String {\n\n match data.menu {\n\n MenuEnum::Main => {\n\n let mut ret = String::new();\n\n ret.push_str(\" [A] Men at Arms\\n\");\n\n ret.push_str(\" [D] Diplomacy\\n\");\n\n ret.push_str(\" [W] War Room\\n\");\n\n ret.push_str(\" [K] Kingdoms of the Realm\\n\");\n\n ret.push_str(\" [H] Help\\n\");\n\n ret.push_str(\" [Q] Quit Game\\n\");\n\n ret.push_str(\"\\n [E] End Turn\\n\");\n\n ret\n\n }\n\n MenuEnum::Arms => {\n\n let mut ret = String::new();\n\n ret.push_str(\" [T] Train\\n\");\n\n ret.push_str(\" [D] Disband\\n\");\n\n ret.push_str(\"\\n [R] Return\\n\");\n\n ret\n\n }\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 18, "score": 260111.68589685735 }, { "content": "// how much room is there between the cursor block and the bocks below it\n\npub fn get_collide_depth(cache: &mut GameData) -> usize {\n\n let mut ret: usize = BOARD_Y;\n\n let mut bot: [i8; 4] = [-(BOARD_Y as i8); 4];\n\n let loc = get_cursor_location(cache);\n\n\n\n for x in 0..4 {\n\n for y in 0..4 {\n\n let pidx = y * 4 + x;\n\n if 0 != cache.piece[pidx] {\n\n bot[x] = y as i8;\n\n }\n\n }\n\n }\n\n\n\n let mut top: [usize; 4] = [BOARD_Y; 4];\n\n\n\n for x in 0..4 {\n\n let xx: i8 = (loc.0 + x) as i8 - 1;\n\n if 0 <= xx && 9 >= xx {\n\n for y in loc.1..BOARD_Y {\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 19, "score": 257481.47998695722 }, { "content": "// hide the cursor, used on game win/lose\n\npub fn hide_cursor(cache: &mut GameData, world: &mut mgfw::ecs::World) {\n\n for i in 0..5 * 7 {\n\n let e = cache.cursor_entity_start + i;\n\n world.entity_set_visibility(e, false);\n\n }\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 20, "score": 255819.2189107412 }, { "content": "// clear a row from the board\n\npub fn erase_row(cache: &mut GameData, row: usize) {\n\n for x in 0..BOARD_X {\n\n for y in 0..BOARD_Y {\n\n let yy: i8 = row as i8 - y as i8 - 1;\n\n if 0 <= yy {\n\n let yidx = yy as usize * BOARD_X + x;\n\n cache.board[yidx + BOARD_X] = cache.board[yidx];\n\n }\n\n }\n\n }\n\n}\n\n\n\n// this gets called by MGFW with input events\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 21, "score": 253785.1173215342 }, { "content": "// updated the entity billboard with the specified image\n\npub fn update_entity_block(world: &mut mgfw::ecs::World, entity: usize, block: usize) {\n\n match block {\n\n 1 => world.entity_set_billboard(entity, String::from(\"assets/block-1.png\")),\n\n 2 => world.entity_set_billboard(entity, String::from(\"assets/block-1.png\")),\n\n 3 => world.entity_set_billboard(entity, String::from(\"assets/block-2.png\")),\n\n 4 => world.entity_set_billboard(entity, String::from(\"assets/block-3.png\")),\n\n 5 => world.entity_set_billboard(entity, String::from(\"assets/block-3.png\")),\n\n 6 => world.entity_set_billboard(entity, String::from(\"assets/block-4.png\")),\n\n 7 => world.entity_set_billboard(entity, String::from(\"assets/block-5.png\")),\n\n _ => (),\n\n }\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 22, "score": 253642.58277128421 }, { "content": "fn print_arms(data: &super::GameData, short: bool) -> String {\n\n let mut ret = String::new();\n\n\n\n ret = format!(\"{}{:>12} | {} | {}\\n\", ret, \" Unit \", \"Qty \", \"Might\");\n\n ret = format!(\n\n \"{}-------------------------------------------------------------------\\n\",\n\n ret\n\n );\n\n\n\n if data.kingdoms[0].barbarians > 0 {\n\n ret = format!(\n\n \"{}{:>11} | {:<5} | {}\\n\",\n\n ret,\n\n \"Barbarians\",\n\n data.kingdoms[0].barbarians,\n\n (data.kingdoms[0].barbarians as f64 * 2.0 * 0.30) as i32\n\n );\n\n }\n\n if data.kingdoms[0].pikemen > 0 {\n\n ret = format!(\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 23, "score": 253371.42911054735 }, { "content": "fn print_kingdoms(data: &super::GameData, borders_only: bool) -> String {\n\n let mut ret = String::new();\n\n\n\n ret = format!(\n\n \"{}{:>19} | {:<5} | {:<8} | {:<8} | {:<7} | {}\\n\",\n\n ret, \"Kingdom \", \"Lands\", \"Pop x100\", \"Might\", \"Status\", \"P Win\"\n\n );\n\n ret = format!(\n\n \"{}-------------------------------------------------------------------\\n\",\n\n ret\n\n );\n\n\n\n for i in 0..25 {\n\n let kingdom = &data.kingdoms[i];\n\n\n\n if 0 == kingdom.land {\n\n continue;\n\n }\n\n let mut s = \" \".to_string();\n\n\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 24, "score": 253371.4291105473 }, { "content": "// update the telegraph block at the bottom showing where the piece would land if dropped\n\npub fn update_telegraph_entities(cache: &mut GameData, world: &mut mgfw::ecs::World) {\n\n let depth = get_collide_depth(cache);\n\n\n\n for i in 0..5 {\n\n let t = cache.cursor_telegraph_start + i;\n\n world.entity_set_visibility(t, false);\n\n }\n\n\n\n if 0 == depth {\n\n return;\n\n }\n\n\n\n let mut j: usize = 0;\n\n for i in 0..16 {\n\n if 0 != cache.piece[i] {\n\n let cc = cache.cursor + depth * BOARD_X;\n\n let cx: f32 = (cc % BOARD_X) as f32 * 16.0 + 8.0;\n\n let cy: f32 = ((cc - (cc % BOARD_X)) / BOARD_X) as f32 * 16.0 + 56.0;\n\n let ex: f32 = (i % 4) as f32 * 16.0 + cx;\n\n let ey: f32 = ((i - (i % 4)) / 4) as f32 * 16.0 + cy;\n\n let t = cache.cursor_telegraph_start + j;\n\n world.entity_set_position_xy(t, ex, ey);\n\n world.entity_set_visibility(t, true);\n\n j = j + 1;\n\n }\n\n }\n\n}\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 25, "score": 252850.93734776808 }, { "content": "fn update_image(cache: &mut GameData, idx: usize, world: &mut mgfw::ecs::World) {\n\n let entity = cache.icons[idx].entity as usize;\n\n match cache.icons[idx].class {\n\n 0 => world.entity_set_billboard(entity, String::from(\"assets/gem-blue.png\")),\n\n 1 => world.entity_set_billboard(entity, String::from(\"assets/gem-green.png\")),\n\n 2 => world.entity_set_billboard(entity, String::from(\"assets/gem-red.png\")),\n\n 3 => world.entity_set_billboard(entity, String::from(\"assets/gem-purple.png\")),\n\n 4 => world.entity_set_billboard(entity, String::from(\"assets/stone-head.png\")),\n\n 5 => world.entity_set_billboard(entity, String::from(\"assets/bomb.png\")),\n\n _ => (),\n\n }\n\n}\n\n\n", "file_path": "2d-games/match-three/src/game/game.rs", "rank": 26, "score": 251648.26470477975 }, { "content": "// conver cursor index to x/y coordinates\n\npub fn get_cursor_location(cache: &GameData) -> (usize, usize) {\n\n let cx = cache.cursor % BOARD_X;\n\n let cy = (cache.cursor - cx) / BOARD_X;\n\n (cx, cy)\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 27, "score": 244770.7851537781 }, { "content": "fn perform_money_action(money: u32, room: &mut RoomEnum, stats: &mut Player) -> bool {\n\n match room {\n\n RoomEnum::RoomBankDeposit => {\n\n if money == 1 {\n\n stats.gold_bank = stats.gold_bank + stats.gold;\n\n println!(\n\n \"{}\",\n\n style(format!(\"You deposited {} gold\", stats.gold)).color256(10)\n\n );\n\n stats.gold = 0;\n\n } else if money <= stats.gold {\n\n stats.gold = stats.gold - money;\n\n stats.gold_bank = stats.gold_bank + money;\n\n println!(\n\n \"{}\",\n\n style(format!(\"You deposited {} gold\", money)).color256(10)\n\n );\n\n } else if money > stats.gold {\n\n println!(\n\n \"{}\",\n", "file_path": "text-games/lord/src/main.rs", "rank": 28, "score": 241750.87466124352 }, { "content": "#[rustfmt::skip]\n\npub fn shutdown(cache: &mut GameData, _heap: &mut GameDataHeap) {\n\n // re-box and consume to deallocate memory\n\n let _temp = unsafe { Box::from_raw(cache.heap) };\n\n}\n", "file_path": "2d-games/tictactoe/src/game/game.rs", "rank": 29, "score": 241501.62573948666 }, { "content": "pub fn shutdown(_cache: &mut GameData, heap: &mut GameDataHeap) {\n\n // deallocate and overwrite existing memory\n\n *heap = GameDataHeap::default();\n\n \n\n // re-box and consume\n\n //let _temp = unsafe { Box::from_raw(cache.heap) };\n\n}\n", "file_path": "2d-games/mahjong/src/game/game.rs", "rank": 30, "score": 241501.62573948666 }, { "content": "pub fn shutdown(_cache: &mut GameData, heap: &mut GameDataHeap) {\n\n // deallocate and overwrite existing memory\n\n *heap = GameDataHeap::default();\n\n\n\n // re-box and consume\n\n //let _temp = unsafe { Box::from_raw(cache.heap) };\n\n}\n\n\n\n// this gets called by MGFW at 1200hz\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 31, "score": 239180.9555322901 }, { "content": "pub fn shutdown(_cache: &mut GameData, heap: &mut GameDataHeap) {\n\n // deallocate and overwrite existing memory\n\n *heap = GameDataHeap::default();\n\n \n\n // re-box and consume\n\n //let _temp = unsafe { Box::from_raw(cache.heap) };\n\n}\n\n// 814", "file_path": "2d-games/match-three/src/game/game.rs", "rank": 32, "score": 239180.9555322901 }, { "content": "fn check_stuck(cache: &mut GameData) -> bool {\n\n\n\n let mut options: Vec<usize> = Vec::new();\n\n // check constraints\n\n for z in 0..5 {\n\n for x in 0..30 {\n\n for y in 0..17 {\n\n let bidx: usize = z * 510 + y * 30 + x;\n\n if 0 != cache.board[bidx] {\n\n let mut found = cache.board[bidx];\n\n\n\n // check lhs/rhs constraints\n\n let mut lhs: bool = false;\n\n let mut rhs: bool = false;\n\n for yy in 0..3 {\n\n let yloc: i32 = y as i32 + yy as i32 - 1;\n\n let xloc: i32 = x as i32 - 2;\n\n if yloc >= 0 && yloc <= 17 && xloc >= 0 && xloc <= 30 {\n\n let nidx: usize = z * 510 + yloc as usize * 30 + xloc as usize;\n\n if 0 != cache.board[nidx] {\n", "file_path": "2d-games/mahjong/src/game/game.rs", "rank": 33, "score": 232982.6924275087 }, { "content": "#[rustfmt::skip]\n\nfn match_draw(cache: &mut GameData) -> bool {\n\n for i in 0..9 {\n\n if ENUM_NONE == cache.board_state[i] { return false; }\n\n }\n\n true\n\n}\n\n\n", "file_path": "2d-games/tictactoe/src/game/game.rs", "rank": 34, "score": 232982.6924275087 }, { "content": "#[rustfmt::skip]\n\npub fn initialize(cache: &mut GameData, heap: &mut GameDataHeap, world: &mut mgfw::ecs::World) {\n\n\n\n world.parse_world(\"assets/world.dat\");\n\n \n\n cache.selected = NONE_SELECTED;\n\n cache.frame = 0;\n\n cache.ready = false;\n\n cache.game_timer = 0.0;\n\n cache.options = 0;\n\n cache.stuck = false;\n\n cache.win = false;\n\n cache.game_timer_last = -1;\n\n\n\n for i in 0..NUM_STONES {\n\n // stone highlight\n\n let id = world.new_entity();\n\n world.entity_set_billboard(id, String::from(\"assets/highlight.png\"));\n\n world.entity_set_scale_xy(id, 30.0, 42.0);\n\n world.entity_set_visibility(id, false);\n\n \n", "file_path": "2d-games/mahjong/src/game/game.rs", "rank": 35, "score": 228848.12207435863 }, { "content": "#[rustfmt::skip]\n\npub fn initialize(cache: &mut GameData, _heap: &mut GameDataHeap, world: &mut mgfw::ecs::World) {\n\n world.parse_world(\"assets/world.dat\");\n\n\n\n // initialize board positions\n\n for row in 0..3 {\n\n for col in 0..3 {\n\n let pos = mgfw::ecs::Position {x: 40.0 + 88.0 * col as f32, y: 104.0 + 88.0 * row as f32 };\n\n let idx = row * 3 + col;\n\n cache.board_xyv[idx * 2 + 0] = pos.x;\n\n cache.board_xyv[idx * 2 + 1] = pos.y;\n\n }\n\n }\n\n\n\n // clear the board\n\n reset_board(cache);\n\n \n\n cache.turn = ENUM_COMPUTER;\n\n if mgfw::rnd() < 0.5 { cache.turn = ENUM_PLAYER; }\n\n}\n\n\n", "file_path": "2d-games/tictactoe/src/game/game.rs", "rank": 36, "score": 228848.12207435863 }, { "content": "#[rustfmt::skip]\n\npub fn initialize(cache: &mut GameData, heap: &mut GameDataHeap, world: &mut mgfw::ecs::World) {\n\n\n\n world.parse_world(\"assets/world.dat\");\n\n cache.level = 1;\n\n\n\n for i in 0..81 {\n\n let id = world.new_entity();\n\n cache.icons[i].entity = id as u8;\n\n cache.icons[i].class = gen_class(cache.level);\n\n cache.icons[i].slot = ICON_OPEN;\n\n world.entity_set_scale_xy(id, 64.0, 64.0);\n\n world.entity_set_visibility(id, false);\n\n update_image(cache, i, world);\n\n //\n\n cache.board[i] = ICON_OPEN;\n\n }\n\n\n\n for i in 0..80 {\n\n let id = world.new_entity();\n\n world.entity_set_scale_xy(id, 16.0, 16.0);\n", "file_path": "2d-games/match-three/src/game/game.rs", "rank": 37, "score": 226897.84082278764 }, { "content": "#[rustfmt::skip]\n\npub fn initialize(cache: &mut GameData, _heap: &mut GameDataHeap, world: &mut mgfw::ecs::World) {\n\n\n\n world.parse_world(\"assets/world.dat\");\n\n \n\n cache.level = 1;\n\n cache.frame = 0;\n\n cache.ready = false;\n\n cache.game_timer = 0.0;\n\n cache.move_timer = cache.game_timer + 0.1;\n\n cache.block_rotation = 0;\n\n cache.gen_block = false;\n\n cache.gen_block_timer = 0.0;\n\n cache.row_counter = 0;\n\n cache.level_up_timer = 0.0;\n\n cache.game_over_timer = 0.0;\n\n cache.game_over = false;\n\n cache.level_up_lock = false;\n\n cache.level_rows = 10;\n\n\n\n // create level progress bar entity\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 38, "score": 226897.84082278764 }, { "content": "// update the progress bar entity using the current row cleared counter\n\npub fn update_progressbar(cache: &mut GameData, world: &mut mgfw::ecs::World) {\n\n if cache.row_counter > cache.level_rows {\n\n cache.row_counter = cache.level_rows;\n\n }\n\n let xs: f32 = 54.0 * cache.row_counter as f32 / (cache.level_rows as f32);\n\n world.entity_set_position_xy(cache.row_counter_entity, 25.0 + xs * 0.5, 42.0);\n\n world.entity_set_scale_xy(cache.row_counter_entity, xs, 3.0); \n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 39, "score": 219703.31144327408 }, { "content": "// update the board cell's entities\n\npub fn update_board(cache: &mut GameData, world: &mut mgfw::ecs::World) {\n\n for i in 0..BOARD_SZ {\n\n let e: usize = cache.board_entity_start + i;\n\n let b: usize = cache.board[i];\n\n if 0 == b {\n\n world.entity_set_visibility(e, false);\n\n } else {\n\n update_entity_block(world, e, b);\n\n world.entity_set_visibility(e, true);\n\n }\n\n }\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 40, "score": 219699.0213485924 }, { "content": "// empty the board and update the board's entities\n\npub fn clear_board(cache: &mut GameData, world: &mut mgfw::ecs::World) {\n\n for i in 0..BOARD_SZ {\n\n cache.board[i] = 0;\n\n }\n\n update_board(cache, world);\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 41, "score": 219698.96079743683 }, { "content": "pub fn check_clear(cache: &mut GameData, world: &mut mgfw::ecs::World) {\n\n let mut e = cache.particle_entity_start;\n\n\n\n // for each board line, check if line has been filled\n\n for y in 0..BOARD_Y {\n\n let mut skip = false;\n\n for x in 0..BOARD_X {\n\n let idx = y * BOARD_X + x;\n\n if 0 == cache.board[idx] {\n\n skip = true;\n\n break;\n\n }\n\n }\n\n\n\n // line found, create particles and slide down prior rows\n\n if !skip {\n\n for x in 0..BOARD_X {\n\n let idx = y * BOARD_X + x;\n\n update_entity_block(world, e, cache.board[idx]);\n\n world.entity_set_visibility(e, true);\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 42, "score": 219694.27100354992 }, { "content": "// reset game state on game-over\n\npub fn game_reset(cache: &mut GameData, world: &mut mgfw::ecs::World) {\n\n // wait a little bit of time for input events to clear to keep the game-over popup open\n\n if cache.game_over_timer < cache.game_timer {\n\n cache.level = 1;\n\n world.entity_set_text(1, format!(\"Level: {}\", cache.level));\n\n world.entity_set_color_rgba(0, 1.0, 1.0, 1.0, 1.0);\n\n clear_board(cache, world);\n\n init_cursor(cache, world, (mgfw::rnd() * 7.0).floor() as usize);\n\n select_next_block(cache, world);\n\n world.entity_set_visibility(cache.game_over_entity_start + 0, false);\n\n world.entity_set_visibility(cache.game_over_entity_start + 2, false);\n\n world.entity_set_visibility(cache.game_over_entity_start + 3, false);\n\n cache.game_over = false;\n\n cache.row_counter = 0;\n\n cache.level_rows = 10;\n\n update_progressbar(cache, world);\n\n }\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 43, "score": 219694.27100354992 }, { "content": "#[rustfmt::skip]\n\nfn match_three(cache: &mut GameData, value: u8) -> bool {\n\n for i in 0..3 {\n\n if (cache.board_state[i * 3 + 0] == value // row check\n\n && cache.board_state[i * 3 + 1] == value\n\n && cache.board_state[i * 3 + 2] == value)\n\n || (cache.board_state[i + 0] == value // column check\n\n && cache.board_state[i + 3] == value\n\n && cache.board_state[i + 6] == value)\n\n { return true; }\n\n }\n\n // cross check\n\n if cache.board_state[4] == value {\n\n if (cache.board_state[0] == value && cache.board_state[8] == value)\n\n || (cache.board_state[2] == value && cache.board_state[6] == value)\n\n { return true; }\n\n }\n\n false\n\n}\n\n\n", "file_path": "2d-games/tictactoe/src/game/game.rs", "rank": 44, "score": 219409.44881433505 }, { "content": "// determine what the next block will be and update the ui\n\npub fn select_next_block(cache: &mut GameData, world: &mut mgfw::ecs::World) {\n\n loop {\n\n cache.next_block = (mgfw::rnd() * 7.0).floor() as usize;\n\n if cache.curr_block == 0 || cache.curr_block == 1 {\n\n if cache.next_block != 0 && cache.next_block != 1 {\n\n break;\n\n }\n\n continue;\n\n }\n\n if cache.next_block != cache.curr_block {\n\n break;\n\n }\n\n }\n\n for i in 0..7 {\n\n world.entity_set_visibility(3 + i, false);\n\n\n\n if cache.row_counter < cache.level_rows {\n\n if i == cache.next_block {\n\n world.entity_set_visibility(3 + i, true);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/game/game.rs", "rank": 45, "score": 217430.0031699163 }, { "content": "pub fn get_swap_idx(cache: &mut GameData, hidx: i16, mx: i32, my: i32) -> (i16, f32, f32) {\n\n if cache.animating {\n\n return (SWAP_INVALID, 0.0, 0.0);\n\n }\n\n if ICON_OPEN == cache.board[hidx as usize] {\n\n return (SWAP_INVALID, 0.0, 0.0);\n\n }\n\n if cache.icons[cache.board[hidx as usize] as usize].animating {\n\n return (SWAP_INVALID, 0.0, 0.0);\n\n }\n\n if cache.icons[cache.board[hidx as usize] as usize].class > 3 {\n\n return (SWAP_INVALID, 0.0, 0.0);\n\n }\n\n let hx = hidx % 9;\n\n let hy = (hidx - hx) / 9;\n\n let cx = (hx + 1) as i32 * 64;\n\n let cy = (hy + 1) as i32 * 64;\n\n\n\n let r = 15 * 15;\n\n\n", "file_path": "2d-games/match-three/src/game/game.rs", "rank": 46, "score": 215443.1995898979 }, { "content": "fn perform_action(option: &u8, room: &mut RoomEnum, flags: &mut [bool; 10]) -> bool {\n\n match room {\n\n RoomEnum::RoomEntrance => {\n\n if b'g' == *option {\n\n println!(\"\\nYou climb your way through a small opening in the broken glass door.\");\n\n *room = RoomEnum::RoomFoyer;\n\n return true;\n\n }\n\n }\n\n RoomEnum::RoomFoyer => {\n\n if b'c' == *option {\n\n println!(\"\\nYou slowly make your way down the dark hallway.\");\n\n *room = RoomEnum::RoomHallway;\n\n return true;\n\n }\n\n }\n\n RoomEnum::RoomHallway => {\n\n if b'b' == *option {\n\n println!(\"\\nYou jimmy open the old wooden door.\");\n\n *room = RoomEnum::RoomOffice;\n", "file_path": "text-games/asylum/src/main.rs", "rank": 47, "score": 204753.8584336078 }, { "content": "fn get_valid_input(mut room: &mut RoomEnum, mut flags: &mut [bool; 10]) {\n\n println!(\"\\nWhat do you do?\");\n\n println!(\"\\nOptions:\\n{}\", room_options(&room, &mut flags));\n\n\n\n loop {\n\n let option = get_player_input();\n\n if perform_action(&option, &mut room, &mut flags) {\n\n return;\n\n }\n\n println!(\"Try Again:\");\n\n }\n\n}\n\n\n", "file_path": "text-games/asylum/src/main.rs", "rank": 48, "score": 203859.90808544884 }, { "content": "fn print_quit(data: &super::GameData) -> String {\n\n let mut ret = \"End of Game Stats:\\n\".to_string();\n\n\n\n ret = format!(\n\n \"{}{:>16}: {}\\n\",\n\n ret,\n\n \"Population\",\n\n (data.kingdoms[0].pop * 100.0).floor()\n\n );\n\n ret = format!(\"{}{:>16}: {}\\n\", ret, \"Gold\", data.kingdoms[0].gold);\n\n ret = format!(\n\n \"{}{:>16}: {}\\n\",\n\n ret, \"From Plundering\", data.kingdoms[0].gold_plundered\n\n );\n\n ret = format!(\n\n \"{}{:>16}: {}\\n\",\n\n ret, \"From Tribute\", data.kingdoms[0].gold_tribute\n\n );\n\n ret = format!(\"{}{:>16}: {}\\n\", ret, \"Lands\", data.kingdoms[0].land);\n\n ret = format!(\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 49, "score": 203442.90142589586 }, { "content": "fn print_description(data: &super::GameData) -> String {\n\n match data.menu {\n\n MenuEnum::Main => print_stats(data),\n\n MenuEnum::Arms => print_arms(data, false),\n\n MenuEnum::Train => print_arms(data, false),\n\n MenuEnum::Disband => print_arms(data, false),\n\n MenuEnum::Diplomacy => print_relations(data),\n\n MenuEnum::Demand => print_relations(data),\n\n MenuEnum::TributeDemanded => print_tribute_demanded(data),\n\n MenuEnum::Warroom => print_mights(data),\n\n MenuEnum::Invade => print_mights(data),\n\n MenuEnum::Realm => print_kingdoms(data, false),\n\n MenuEnum::Quit => print_quit(data),\n\n MenuEnum::Notification => format!(\n\n \"Your Highness,\\n{}\",\n\n data.notifications[0].message.to_string()\n\n ),\n\n }\n\n}\n\n\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 50, "score": 203442.90142589586 }, { "content": "fn print_mights(data: &super::GameData) -> String {\n\n let mut ret = print_kingdoms(data, true);\n\n\n\n ret = format!(\"{}\\n{:>10}: {}\\n\", ret, \"Gold\", data.kingdoms[0].gold);\n\n ret = format!(\"{}{:>10}: {}\\n\", ret, \"Lands\", data.kingdoms[0].land);\n\n ret = format!(\n\n \"{}{:>10}: {:}\\n\",\n\n ret, \"Might\", data.kingdoms[0].might as i32\n\n );\n\n\n\n ret\n\n}\n\n\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 51, "score": 203442.90142589586 }, { "content": "fn print_stats(data: &super::GameData) -> String {\n\n let mut ret = String::new();\n\n\n\n ret = format!(\n\n \"{}{:>12}: {}\\n\",\n\n ret,\n\n \"Population\",\n\n (data.kingdoms[0].pop * 100.0).floor()\n\n );\n\n ret = format!(\"{}{:>12}: {}\\n\", ret, \"Gold\", data.kingdoms[0].gold);\n\n ret = format!(\"{}{:>12}: {}\\n\", ret, \"Lands\", data.kingdoms[0].land);\n\n ret = format!(\n\n \"{}{:>12}: {}\\n\",\n\n ret, \"Might\", data.kingdoms[0].might as i32\n\n );\n\n\n\n ret\n\n}\n\n\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 52, "score": 203442.90142589586 }, { "content": "fn print_relations(data: &super::GameData) -> String {\n\n print_kingdoms(data, true)\n\n}\n\n\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 53, "score": 203442.90142589586 }, { "content": "fn print_name(data: &super::GameData) -> String {\n\n match data.menu {\n\n MenuEnum::Main => format!(\"Kingdom of Player, Year {}\", data.year),\n\n MenuEnum::Arms => \"Men at Arms\".to_string(),\n\n MenuEnum::Train => \"Men at Arms\".to_string(),\n\n MenuEnum::Disband => \"Men at Arms\".to_string(),\n\n MenuEnum::Diplomacy => \"Diplomacy\".to_string(),\n\n MenuEnum::Demand => \"Diplomacy\".to_string(),\n\n MenuEnum::TributeDemanded => \"Diplomacy\".to_string(),\n\n MenuEnum::Warroom => \"War Room\".to_string(),\n\n MenuEnum::Invade => \"War Room\".to_string(),\n\n MenuEnum::Realm => \"Kingdoms of the Realm\".to_string(),\n\n MenuEnum::Quit => format!(\"Kingdom of Player, Year {}\", data.year),\n\n MenuEnum::Notification => data.notifications[0].from.to_string(),\n\n }\n\n}\n\n\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 54, "score": 203442.90142589586 }, { "content": "fn print_tribute_demanded(data: &super::GameData) -> String {\n\n let mut ret = String::new();\n\n\n\n if data.kingdoms[0].demands.is_empty() {\n\n return ret;\n\n }\n\n\n\n ret = format!(\n\n \"Your Highness,\\nYou have received a demand of {} gold tribute from {}\\n\",\n\n data.kingdoms[0].demands[0].tribute, data.kingdoms[data.kingdoms[0].demands[0].who].ruler\n\n );\n\n\n\n ret = format!(\n\n \"{}{:>12}: {}\\n\",\n\n ret, \"Their Might\", data.kingdoms[data.kingdoms[0].demands[0].who].might as i32\n\n );\n\n ret = format!(\n\n \"{}{:>12}: {}\\n\",\n\n ret, \"Your Might\", data.kingdoms[0].might as i32\n\n );\n\n ret = format!(\n\n \"{}{:>12}: {}\\n\",\n\n ret, \"Your Gold\", data.kingdoms[0].gold as i32\n\n );\n\n ret\n\n}\n\n\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 55, "score": 201480.7626803509 }, { "content": "fn print_demand_options(data: &super::GameData) -> String {\n\n let mut ret = String::new();\n\n\n\n let options = [\n\n '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I',\n\n 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q',\n\n ];\n\n\n\n let mut c = 0;\n\n\n\n for i in 0..25 {\n\n let kingdom = &data.kingdoms[i];\n\n\n\n if 0 == kingdom.land {\n\n continue;\n\n }\n\n\n\n if !data.kingdoms[0].borders[i] {\n\n continue;\n\n }\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 56, "score": 201480.7626803509 }, { "content": "fn print_kings_killed(data: &super::GameData) -> String {\n\n let mut ret = String::new();\n\n\n\n for i in 0..data.kingdoms[0].civkill.len() {\n\n ret = format!(\n\n \"{} {}\\n\",\n\n ret, data.kingdoms[data.kingdoms[0].civkill[i]].ruler\n\n );\n\n }\n\n\n\n ret\n\n}\n\n\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 57, "score": 201480.7626803509 }, { "content": "fn print_arms_killed(data: &super::GameData) -> String {\n\n let mut ret = String::new();\n\n\n\n ret = format!(\"{}{:>12} | {} | {}\\n\", ret, \" Killed \", \"Qty \", \"Might\");\n\n ret = format!(\n\n \"{}-------------------------------------------------------------------\\n\",\n\n ret\n\n );\n\n\n\n if data.kingdoms[0].bkill > 0 {\n\n ret = format!(\n\n \"{}{:>11} | {:<5} | {}\\n\",\n\n ret,\n\n \"Barbarians\",\n\n data.kingdoms[0].bkill,\n\n (data.kingdoms[0].bkill as f64 * 2.0 * 0.30) as i32\n\n );\n\n }\n\n if data.kingdoms[0].pkill > 0 {\n\n ret = format!(\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 58, "score": 201480.7626803509 }, { "content": "fn print_invade_options(data: &super::GameData) -> String {\n\n let mut ret = String::new();\n\n\n\n let options = [\n\n '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I',\n\n 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q',\n\n ];\n\n\n\n let mut c = 0;\n\n\n\n for i in 0..25 {\n\n let kingdom = &data.kingdoms[i];\n\n\n\n if 0 == kingdom.land {\n\n continue;\n\n }\n\n\n\n if !data.kingdoms[0].borders[i] {\n\n continue;\n\n }\n", "file_path": "text-games/knights/src/game/menu.rs", "rank": 59, "score": 201480.7626803509 }, { "content": "#[allow(dead_code)]\n\npub fn deg2rad(val: f32) -> f32 {\n\n val * PI as f32 / 180.0\n\n}\n\n\n", "file_path": "2d-games/mahjong/src/mgfw/mod.rs", "rank": 60, "score": 198663.5266112951 }, { "content": "#[allow(dead_code)]\n\npub fn deg2rad(val: f32) -> f32 {\n\n val * PI as f32 / 180.0\n\n}\n\n\n", "file_path": "2d-games/tictactoe/src/mgfw/mod.rs", "rank": 61, "score": 198663.5266112951 }, { "content": "#[allow(dead_code)]\n\npub fn deg2rad(val: f32) -> f32 {\n\n val * PI as f32 / 180.0\n\n}\n\n\n", "file_path": "2d-games/tet-rust/src/mgfw/mod.rs", "rank": 62, "score": 196331.48098255775 }, { "content": "#[allow(dead_code)]\n\npub fn deg2rad(val: f32) -> f32 {\n\n val * PI as f32 / 180.0\n\n}\n\n\n", "file_path": "2d-games/match-three/src/mgfw/mod.rs", "rank": 63, "score": 196331.48098255775 }, { "content": "fn destroy_block(cache: &mut GameData, world: &mut mgfw::ecs::World, hidx: usize) {\n\n if ICON_OPEN == cache.board[hidx] {\n\n return;\n\n }\n\n let idx = cache.board[hidx] as usize;\n\n let entity = cache.icons[idx].entity as usize;\n\n cache.icons[idx].class = gen_class(cache.level);\n\n cache.icons[idx].slot = ICON_OPEN;\n\n world.entity_set_visibility(entity, false);\n\n update_image(cache, idx, world);\n\n cache.board[hidx] = ICON_OPEN;\n\n}\n\n\n", "file_path": "2d-games/match-three/src/game/game.rs", "rank": 64, "score": 196028.7741452126 }, { "content": "fn charge_for_room(stats: &mut Player) -> bool {\n\n let price = tavern_price(&stats.level);\n\n if stats.gold >= price {\n\n stats.gold = stats.gold - price;\n\n stats.health = stats.max_health;\n\n return true;\n\n }\n\n println!(\"{}\", style(\"You don't have enough gold!\").red());\n\n false\n\n}\n\n\n", "file_path": "text-games/lord/src/main.rs", "rank": 65, "score": 195623.88022676005 }, { "content": "fn place_piece(idx: usize, player: u8, x: f32, y: f32, world: &mut mgfw::ecs::World) {\n\n let wid = 5 + (idx + player as usize * 9);\n\n world.entity_set_position_xy(wid, x, y);\n\n world.entity_set_visibility(wid, true);\n\n world.entity_set_position_xy(wid + 18, x, y);\n\n world.entity_set_visibility(wid + 18, true);\n\n}\n\n\n", "file_path": "2d-games/tictactoe/src/game/game.rs", "rank": 66, "score": 192058.84811314166 }, { "content": "fn add_to_bag(cache: &mut GameData, world: &mut mgfw::ecs::World, gem: u8, count: usize) {\n\n assert!(4 > gem);\n\n for _i in 0..count {\n\n if 80 > cache.bag_sz {\n\n let idx = cache.bag_sz as usize;\n\n let entity = MINI_GEM_IDX + idx;\n\n\n\n cache.bag[idx] = gem;\n\n let image = match gem {\n\n 0 => String::from(\"assets/gem-blue.png\"),\n\n 1 => String::from(\"assets/gem-green.png\"),\n\n 2 => String::from(\"assets/gem-red.png\"),\n\n 3 => String::from(\"assets/gem-purple.png\"),\n\n _ => String::from(\"\"),\n\n };\n\n\n\n world.entity_set_billboard(entity, image.clone());\n\n\n\n world.entity_set_visibility(entity, true);\n\n world.entity_set_alpha_ease(entity, 0.0, 1.0, 1.0);\n", "file_path": "2d-games/match-three/src/game/game.rs", "rank": 67, "score": 188715.97063147437 }, { "content": "fn perform_action(option: &u8, room: &mut RoomEnum, stats: &mut Player) -> bool {\n\n match room {\n\n RoomEnum::RoomTown => {\n\n match *option {\n\n b't' => *room = RoomEnum::RoomTavern,\n\n b'b' => *room = RoomEnum::RoomBank,\n\n b'n' => *room = RoomEnum::RoomArena,\n\n b'a' => *room = RoomEnum::RoomArmour,\n\n b'w' => *room = RoomEnum::RoomWeapons,\n\n b'f' => *room = RoomEnum::RoomForest,\n\n b's' => *room = RoomEnum::RoomStats,\n\n b'q' => *room = RoomEnum::RoomQuit,\n\n _ => return false,\n\n };\n\n reset_view();\n\n }\n\n RoomEnum::RoomTavern => match *option {\n\n b'g' if charge_for_room(stats) => *room = RoomEnum::RoomTavernRest,\n\n b'l' => *room = RoomEnum::RoomTavernLore,\n\n b'r' => {\n", "file_path": "text-games/lord/src/main.rs", "rank": 68, "score": 186608.7833076083 }, { "content": "pub fn new() -> GameData {\n\n GameData::new()\n\n}\n\n\n\nimpl GameData {\n\n fn new() -> GameData {\n\n GameData {\n\n kingdoms: Vec::new(),\n\n menu: MenuEnum::Main,\n\n year: 0,\n\n map: gen::gen_map(),\n\n pop: gen::gen_pop(),\n\n notifications: VecDeque::new(),\n\n log: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn run(&mut self) -> Result<(), Error> {\n\n self.initialize();\n\n\n", "file_path": "text-games/knights/src/game/mod.rs", "rank": 69, "score": 177333.8960513371 }, { "content": "pub fn gen_map() -> Vec<usize> {\n\n let mut ret: Vec<usize> = Vec::new();\n\n\n\n for i in 0..25 {\n\n ret.push(i);\n\n }\n\n\n\n for i in 0..25 {\n\n for _j in 0..25 {\n\n let b = rand::thread_rng().gen_range(0, 25);\n\n let temp = ret[b];\n\n ret[b] = ret[i];\n\n ret[i] = temp;\n\n }\n\n }\n\n\n\n ret\n\n}\n\n\n", "file_path": "text-games/knights/src/game/gen.rs", "rank": 70, "score": 172096.60199321064 }, { "content": "fn swap(cache: &mut GameData) {\n\n let sidx = cache.swap_idx;\n\n let oidx = sidx % 4;\n\n let hidx = (sidx - oidx) / 4;\n\n\n\n let src = hidx as usize;\n\n let dst = match oidx {\n\n 0 => hidx - 9,\n\n 1 => hidx + 9,\n\n 2 => hidx - 1,\n\n 3 => hidx + 1,\n\n _ => hidx,\n\n } as usize;\n\n\n\n let a = cache.board[src];\n\n let b = cache.board[dst];\n\n cache.board[src] = b;\n\n cache.board[dst] = a;\n\n\n\n slide(cache, a, dst as i16);\n\n slide(cache, b, src as i16);\n\n}\n\n\n", "file_path": "2d-games/match-three/src/game/game.rs", "rank": 71, "score": 171416.115770152 }, { "content": "fn reset_board(cache: &mut GameData) {\n\n for idx in 0..9 {\n\n cache.board_state[idx] = ENUM_NONE;\n\n }\n\n cache.refresh_board = true;\n\n cache.refresh_score = true;\n\n}\n\n\n", "file_path": "2d-games/tictactoe/src/game/game.rs", "rank": 72, "score": 171416.115770152 }, { "content": "fn check_win(cache: &mut GameData) {\n\n cache.turn = 1 + (1 - (cache.turn - 1));\n\n if match_three(cache, ENUM_PLAYER) {\n\n cache.pscore += 1;\n\n cache.game_over = true;\n\n cache.winner = ENUM_PLAYER;\n\n } else if match_three(cache, ENUM_COMPUTER) {\n\n cache.cscore += 1;\n\n cache.game_over = true;\n\n cache.winner = ENUM_COMPUTER;\n\n } else if match_draw(cache) {\n\n cache.game_over = true;\n\n cache.winner = ENUM_NONE;\n\n }\n\n}\n\n\n", "file_path": "2d-games/tictactoe/src/game/game.rs", "rank": 73, "score": 171416.115770152 }, { "content": "#[rustfmt::skip]\n\nfn computer_turn(cache: &mut GameData) {\n\n if cache.game_over { return; } // no clicking during game over animation\n\n // count the available options\n\n let mut num_options = 0;\n\n let mut block_option = 10; // option that will block player win\n\n let mut win_option = 10; // option that will result in immediate win\n\n let mut aggressive = false; // flag to limit aggressiveness\n\n if mgfw::rnd() < 0.5 { aggressive = true; }\n\n\n\n // look for available move options\n\n for idx in 0..9 {\n\n if ENUM_NONE == cache.board_state[idx] {\n\n num_options += 1;\n\n // check for computer win from this position\n\n cache.board_state[idx] = ENUM_COMPUTER;\n\n if match_three(cache, ENUM_COMPUTER) { win_option = idx; }\n\n // check for player win from this position\n\n cache.board_state[idx] = ENUM_PLAYER;\n\n if match_three(cache, ENUM_PLAYER) { block_option = idx; }\n\n // reset\n", "file_path": "2d-games/tictactoe/src/game/game.rs", "rank": 74, "score": 171416.115770152 }, { "content": "fn time_up(cache: &mut GameData, world: &mut mgfw::ecs::World) {\n\n for i in 0..81 {\n\n let id = 3 + i;\n\n cache.icons[i].entity = id as u8;\n\n cache.icons[i].class = gen_class(cache.level);\n\n cache.icons[i].slot = ICON_OPEN;\n\n world.entity_set_visibility(id, false);\n\n update_image(cache, i, world);\n\n //\n\n cache.board[i] = ICON_OPEN;\n\n }\n\n\n\n cache.score = cache.score_prev;\n\n cache.level_timer = level_clock(cache);\n\n cache.game_timer = 0.0;\n\n cache.show_popup = true;\n\n cache.popup_timer = 3.0;\n\n cache.hover_idx = HOVER_INVALID;\n\n cache.swap_idx = SWAP_INVALID;\n\n cache.bomb_idx = BOMB_INVALID;\n", "file_path": "2d-games/match-three/src/game/game.rs", "rank": 75, "score": 169161.0299743747 }, { "content": "fn level_up(cache: &mut GameData, world: &mut mgfw::ecs::World) {\n\n cache.level += 1;\n\n for i in 0..81 {\n\n let id = 3 + i;\n\n cache.icons[i].entity = id as u8;\n\n cache.icons[i].class = gen_class(cache.level);\n\n cache.icons[i].slot = ICON_OPEN;\n\n world.entity_set_visibility(id, false);\n\n update_image(cache, i, world);\n\n //\n\n cache.board[i] = ICON_OPEN;\n\n }\n\n\n\n cache.level_timer = level_clock(cache);\n\n cache.game_timer = 0.0;\n\n cache.show_popup = true;\n\n cache.popup_timer = 3.0;\n\n cache.score_prev = cache.score;\n\n cache.hover_idx = HOVER_INVALID;\n\n cache.swap_idx = SWAP_INVALID;\n", "file_path": "2d-games/match-three/src/game/game.rs", "rank": 76, "score": 169161.0299743747 }, { "content": "struct RenderComponentManagerData {\n\n render_type: u8,\n\n}\n\n\n\npub struct RenderComponentManager {\n\n data: *mut RenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl RenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> RenderComponentManager {\n\n log(format!(\"Constructing RenderComponentManager\"));\n\n // allocate system memory in cache\n\n let sz_bytes = std::mem::size_of::<RenderComponentManagerData>() * ENTITY_SZ;\n\n let data = mgr.allocate(sz_bytes) as *mut RenderComponentManagerData;\n\n\n\n RenderComponentManager { data }\n\n }\n\n\n", "file_path": "2d-games/tictactoe/src/mgfw/ecs/component_render.rs", "rank": 77, "score": 164683.409738468 }, { "content": "struct RenderComponentManagerData {\n\n render_type: u8,\n\n}\n\n\n\npub struct RenderComponentManager {\n\n data: *mut RenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl RenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> RenderComponentManager {\n\n log(format!(\"Constructing RenderComponentManager\"));\n\n // allocate system memory in cache\n\n let sz_bytes = std::mem::size_of::<RenderComponentManagerData>() * ENTITY_SZ;\n\n let data = mgr.allocate(sz_bytes) as *mut RenderComponentManagerData;\n\n\n\n RenderComponentManager { data }\n\n }\n\n\n", "file_path": "2d-games/mahjong/src/mgfw/ecs/component_render.rs", "rank": 78, "score": 164683.409738468 }, { "content": "struct RenderComponentManagerData {\n\n render_type: u8,\n\n}\n\n\n\npub struct RenderComponentManager {\n\n data: *mut RenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl RenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> RenderComponentManager {\n\n log(format!(\"Constructing RenderComponentManager\"));\n\n // allocate system memory in cache\n\n let sz_bytes = std::mem::size_of::<RenderComponentManagerData>() * ENTITY_SZ;\n\n let data = mgr.allocate(sz_bytes) as *mut RenderComponentManagerData;\n\n\n\n RenderComponentManager { data }\n\n }\n\n\n", "file_path": "2d-games/tet-rust/src/mgfw/ecs/component_render.rs", "rank": 79, "score": 162597.27963468293 }, { "content": "struct RenderComponentManagerData {\n\n render_type: u8,\n\n}\n\n\n\npub struct RenderComponentManager {\n\n data: *mut RenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl RenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> RenderComponentManager {\n\n log(format!(\"Constructing RenderComponentManager\"));\n\n // allocate system memory in cache\n\n let sz_bytes = std::mem::size_of::<RenderComponentManagerData>() * ENTITY_SZ;\n\n let data = mgr.allocate(sz_bytes) as *mut RenderComponentManagerData;\n\n\n\n RenderComponentManager { data }\n\n }\n\n\n", "file_path": "2d-games/match-three/src/mgfw/ecs/component_render.rs", "rank": 80, "score": 162597.27963468293 }, { "content": "struct BillboardRenderComponentManagerData {\n\n texture: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n load_image_needed: bool,\n\n}\n\n\n\npub struct BillboardRenderComponentManager {\n\n cache_data: *mut BillboardRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n texture_files: std::boxed::Box<HashMap<usize, String>>,\n\n texture_handles: std::boxed::Box<HashMap<String, u32>>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl BillboardRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> BillboardRenderComponentManager {\n\n log(format!(\"Constructing BillboardRenderComponentManager\"));\n\n\n\n let fdata: HashMap<usize, String> = HashMap::new();\n", "file_path": "2d-games/tictactoe/src/mgfw/ecs/component_render_billboard.rs", "rank": 81, "score": 160579.18345084885 }, { "content": "struct BillboardRenderComponentManagerData {\n\n texture: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n load_image_needed: bool,\n\n}\n\n\n\npub struct BillboardRenderComponentManager {\n\n cache_data: *mut BillboardRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n texture_files: std::boxed::Box<HashMap<usize, String>>,\n\n texture_handles: std::boxed::Box<HashMap<String, u32>>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl BillboardRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> BillboardRenderComponentManager {\n\n log(format!(\"Constructing BillboardRenderComponentManager\"));\n\n\n\n let fdata: HashMap<usize, String> = HashMap::new();\n", "file_path": "2d-games/mahjong/src/mgfw/ecs/component_render_billboard.rs", "rank": 82, "score": 160579.18345084885 }, { "content": "struct TextRenderComponentManagerData {\n\n width: u16,\n\n num_chars: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n}\n\n\n\npub struct TextRenderComponentManager {\n\n cache_data: *mut TextRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n data: std::boxed::Box<Vec<Text>>,\n\n font: std::boxed::Box<fonts::retro_gaming::Font>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl TextRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> TextRenderComponentManager {\n\n log(format!(\"Constructing TextRenderComponentManager\"));\n\n\n\n let mut data: Vec<Text> = Vec::new();\n", "file_path": "2d-games/mahjong/src/mgfw/ecs/component_render_text.rs", "rank": 83, "score": 160579.18345084885 }, { "content": "struct TextRenderComponentManagerData {\n\n width: u16,\n\n num_chars: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n}\n\n\n\npub struct TextRenderComponentManager {\n\n cache_data: *mut TextRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n data: std::boxed::Box<Vec<Text>>,\n\n font: std::boxed::Box<fonts::retro_gaming::Font>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl TextRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> TextRenderComponentManager {\n\n log(format!(\"Constructing TextRenderComponentManager\"));\n\n\n\n let mut data: Vec<Text> = Vec::new();\n", "file_path": "2d-games/tictactoe/src/mgfw/ecs/component_render_text.rs", "rank": 84, "score": 160579.18345084885 }, { "content": "struct LineRenderComponentManagerData {\n\n num_lines: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n}\n\n\n\npub struct LineRenderComponentManager {\n\n cache_data: *mut LineRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n data: std::boxed::Box<Vec<LineBuffer>>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl LineRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> LineRenderComponentManager {\n\n log(format!(\"Constructing LineRenderComponentManager\"));\n\n\n\n let mut data: Vec<LineBuffer> = Vec::new();\n\n for _i in 0..ENTITY_SZ {\n\n data.push(LineBuffer {\n", "file_path": "2d-games/mahjong/src/mgfw/ecs/component_render_line_buffer.rs", "rank": 85, "score": 158625.7434395693 }, { "content": "struct BillboardRenderComponentManagerData {\n\n texture: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n load_image_needed: bool,\n\n}\n\n\n\npub struct BillboardRenderComponentManager {\n\n cache_data: *mut BillboardRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n texture_files: std::boxed::Box<HashMap<usize, String>>,\n\n texture_handles: std::boxed::Box<HashMap<String, u32>>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl BillboardRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> BillboardRenderComponentManager {\n\n log(format!(\"Constructing BillboardRenderComponentManager\"));\n\n\n\n let fdata: HashMap<usize, String> = HashMap::new();\n", "file_path": "2d-games/match-three/src/mgfw/ecs/component_render_billboard.rs", "rank": 86, "score": 158625.7434395693 }, { "content": "struct BillboardRenderComponentManagerData {\n\n texture: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n load_image_needed: bool,\n\n}\n\n\n\npub struct BillboardRenderComponentManager {\n\n cache_data: *mut BillboardRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n texture_files: std::boxed::Box<HashMap<usize, String>>,\n\n texture_handles: std::boxed::Box<HashMap<String, u32>>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl BillboardRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> BillboardRenderComponentManager {\n\n log(format!(\"Constructing BillboardRenderComponentManager\"));\n\n\n\n let fdata: HashMap<usize, String> = HashMap::new();\n", "file_path": "2d-games/tet-rust/src/mgfw/ecs/component_render_billboard.rs", "rank": 87, "score": 158625.7434395693 }, { "content": "struct TextRenderComponentManagerData {\n\n width: u16,\n\n num_chars: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n}\n\n\n\npub struct TextRenderComponentManager {\n\n cache_data: *mut TextRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n data: std::boxed::Box<Vec<Text>>,\n\n font: std::boxed::Box<fonts::retro_gaming::Font>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl TextRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> TextRenderComponentManager {\n\n log(format!(\"Constructing TextRenderComponentManager\"));\n\n\n\n let mut data: Vec<Text> = Vec::new();\n", "file_path": "2d-games/tet-rust/src/mgfw/ecs/component_render_text.rs", "rank": 88, "score": 158625.7434395693 }, { "content": "struct TextRenderComponentManagerData {\n\n width: u16,\n\n num_chars: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n}\n\n\n\npub struct TextRenderComponentManager {\n\n cache_data: *mut TextRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n data: std::boxed::Box<Vec<Text>>,\n\n font: std::boxed::Box<fonts::retro_gaming::Font>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl TextRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> TextRenderComponentManager {\n\n log(format!(\"Constructing TextRenderComponentManager\"));\n\n\n\n let mut data: Vec<Text> = Vec::new();\n", "file_path": "2d-games/match-three/src/mgfw/ecs/component_render_text.rs", "rank": 89, "score": 158625.7434395693 }, { "content": "struct LineRenderComponentManagerData {\n\n num_lines: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n}\n\n\n\npub struct LineRenderComponentManager {\n\n cache_data: *mut LineRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n data: std::boxed::Box<Vec<LineBuffer>>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl LineRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> LineRenderComponentManager {\n\n log(format!(\"Constructing LineRenderComponentManager\"));\n\n\n\n let mut data: Vec<LineBuffer> = Vec::new();\n\n for _i in 0..ENTITY_SZ {\n\n data.push(LineBuffer {\n", "file_path": "2d-games/tictactoe/src/mgfw/ecs/component_render_line_buffer.rs", "rank": 90, "score": 158625.7434395693 }, { "content": "struct TriangleRenderComponentManagerData {\n\n num_triangles: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n}\n\n\n\npub struct TriangleRenderComponentManager {\n\n cache_data: *mut TriangleRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n data: std::boxed::Box<Vec<TriangleBuffer>>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl TriangleRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> TriangleRenderComponentManager {\n\n log(format!(\"Constructing TriangleRenderComponentManager\"));\n\n\n\n let mut data: Vec<TriangleBuffer> = Vec::new();\n\n for _i in 0..ENTITY_SZ {\n\n data.push(TriangleBuffer {\n", "file_path": "2d-games/tictactoe/src/mgfw/ecs/component_render_triangle_buffer.rs", "rank": 91, "score": 158625.7434395693 }, { "content": "struct TriangleRenderComponentManagerData {\n\n num_triangles: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n}\n\n\n\npub struct TriangleRenderComponentManager {\n\n cache_data: *mut TriangleRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n data: std::boxed::Box<Vec<TriangleBuffer>>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl TriangleRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> TriangleRenderComponentManager {\n\n log(format!(\"Constructing TriangleRenderComponentManager\"));\n\n\n\n let mut data: Vec<TriangleBuffer> = Vec::new();\n\n for _i in 0..ENTITY_SZ {\n\n data.push(TriangleBuffer {\n", "file_path": "2d-games/mahjong/src/mgfw/ecs/component_render_triangle_buffer.rs", "rank": 92, "score": 158625.7434395693 }, { "content": "pub fn get_hover_idx(mx: i32, my: i32) -> i16 {\n\n let sx = mx - 32;\n\n let sy = my - 32;\n\n if 0 > sx || 0 > sy || 575 < sx || 575 < sy {\n\n return HOVER_INVALID;\n\n }\n\n let ix = (sx - sx % 64) / 64;\n\n let iy = (sy - sy % 64) / 64;\n\n (iy * 9 + ix) as i16\n\n}\n\n\n", "file_path": "2d-games/match-three/src/game/game.rs", "rank": 93, "score": 157539.8630526726 }, { "content": "struct LineRenderComponentManagerData {\n\n num_lines: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n}\n\n\n\npub struct LineRenderComponentManager {\n\n cache_data: *mut LineRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n data: std::boxed::Box<Vec<LineBuffer>>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl LineRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> LineRenderComponentManager {\n\n log(format!(\"Constructing LineRenderComponentManager\"));\n\n\n\n let mut data: Vec<LineBuffer> = Vec::new();\n\n for _i in 0..ENTITY_SZ {\n\n data.push(LineBuffer {\n", "file_path": "2d-games/match-three/src/mgfw/ecs/component_render_line_buffer.rs", "rank": 94, "score": 156733.8068433271 }, { "content": "struct LineRenderComponentManagerData {\n\n num_lines: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n}\n\n\n\npub struct LineRenderComponentManager {\n\n cache_data: *mut LineRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n data: std::boxed::Box<Vec<LineBuffer>>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl LineRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> LineRenderComponentManager {\n\n log(format!(\"Constructing LineRenderComponentManager\"));\n\n\n\n let mut data: Vec<LineBuffer> = Vec::new();\n\n for _i in 0..ENTITY_SZ {\n\n data.push(LineBuffer {\n", "file_path": "2d-games/tet-rust/src/mgfw/ecs/component_render_line_buffer.rs", "rank": 95, "score": 156733.8068433271 }, { "content": "struct TriangleRenderComponentManagerData {\n\n num_triangles: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n}\n\n\n\npub struct TriangleRenderComponentManager {\n\n cache_data: *mut TriangleRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n data: std::boxed::Box<Vec<TriangleBuffer>>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl TriangleRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> TriangleRenderComponentManager {\n\n log(format!(\"Constructing TriangleRenderComponentManager\"));\n\n\n\n let mut data: Vec<TriangleBuffer> = Vec::new();\n\n for _i in 0..ENTITY_SZ {\n\n data.push(TriangleBuffer {\n", "file_path": "2d-games/tet-rust/src/mgfw/ecs/component_render_triangle_buffer.rs", "rank": 96, "score": 156733.8068433271 }, { "content": "struct TriangleRenderComponentManagerData {\n\n num_triangles: u16,\n\n constructed: bool,\n\n reconstruct_needed: bool,\n\n}\n\n\n\npub struct TriangleRenderComponentManager {\n\n cache_data: *mut TriangleRenderComponentManagerData,\n\n // WARNING: Anything below this line is not in cache!\n\n data: std::boxed::Box<Vec<TriangleBuffer>>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl TriangleRenderComponentManager {\n\n pub fn new(mgr: &mut CacheManager) -> TriangleRenderComponentManager {\n\n log(format!(\"Constructing TriangleRenderComponentManager\"));\n\n\n\n let mut data: Vec<TriangleBuffer> = Vec::new();\n\n for _i in 0..ENTITY_SZ {\n\n data.push(TriangleBuffer {\n", "file_path": "2d-games/match-three/src/mgfw/ecs/component_render_triangle_buffer.rs", "rank": 97, "score": 156733.8068433271 }, { "content": "fn enemy_exp(level: &u32, eidx: &u32) -> u32 {\n\n let ret = (enemy_max_health(level, eidx) as u32) * level;\n\n ((ret as f32) * 1.5 * srand()).ceil() as u32\n\n}\n\n\n", "file_path": "text-games/lord/src/main.rs", "rank": 98, "score": 155467.53609510203 }, { "content": "fn enemy_gold(level: &u32, eidx: &u32) -> u32 {\n\n let ret = (enemy_max_health(level, eidx) as u32) * level;\n\n ((ret as f32) * srand()).ceil() as u32\n\n}\n\n\n", "file_path": "text-games/lord/src/main.rs", "rank": 99, "score": 155467.53609510203 } ]
Rust
src/window/glutin.rs
jutuon/space-boss-battles
407d3fa1f057ea0ce9b05b6f695349be1f3a141f
use std::os::raw::c_void; use glutin::{EventsLoop, GlContext, WindowBuilder, ContextBuilder, GlWindow, GlRequest, Api, VirtualKeyCode}; use input::{InputManager, Key, Input}; use renderer::{Renderer, DEFAULT_SCREEN_HEIGHT, DEFAULT_SCREEN_WIDTH}; use settings::Settings; use gui::GUI; use logic::Logic; use utils::{TimeManager, TimeMilliseconds}; use audio::{Audio, Volume, AudioPlayer}; use super::{Window, RenderingContext, WINDOW_TITLE}; pub struct GlutinWindow { rendering_context: RenderingContext, events_loop: EventsLoop, window: GlWindow, mouse_x: i32, mouse_y: i32, } impl Window for GlutinWindow { type AudioPlayer = AudioPlayerRodio; fn new(rendering_context: RenderingContext) -> Result<Self, ()> { let events_loop = EventsLoop::new(); let window_builder = WindowBuilder::new() .with_title(WINDOW_TITLE) .with_dimensions(DEFAULT_SCREEN_WIDTH as u32, DEFAULT_SCREEN_HEIGHT as u32) .with_min_dimensions(DEFAULT_SCREEN_WIDTH as u32, DEFAULT_SCREEN_HEIGHT as u32); let gl_request = match rendering_context { RenderingContext::OpenGL => GlRequest::Specific(Api::OpenGl, (3,3)), RenderingContext::OpenGLES => GlRequest::Specific(Api::OpenGlEs, (2,0)), }; let context_builder = ContextBuilder::new() .with_gl(gl_request) .with_vsync(true); let gl_window = match GlWindow::new(window_builder, context_builder, &events_loop) { Ok(window) => window, Err(error) => { println!("couldn't create window: {}", error); return Err(()); } }; unsafe { if let Err(error) = gl_window.make_current() { println!("couldn't make OpenGL context current: {}", error); return Err(()); } } let window = Self { rendering_context, window: gl_window, events_loop, mouse_x: 0, mouse_y: 0, }; Ok(window) } fn handle_events<R: Renderer>( &mut self, input_manager: &mut InputManager, renderer: &mut R, settings: &mut Settings, gui: &mut GUI, logic: &mut Logic, quit_flag: &mut bool, time_manager: &TimeManager, ) { use glutin::{Event, WindowEvent, KeyboardInput, ElementState}; let mouse_x = &mut self.mouse_x; let mouse_y = &mut self.mouse_y; self.events_loop.poll_events(|event| { match event { Event::WindowEvent { event: window_event, ..} => { match window_event { WindowEvent::Resized(width, height) => { renderer.update_screen_size(width as i32, height as i32); gui.update_position_from_half_screen_width(renderer.half_screen_width_world_coordinates()); logic.update_half_screen_width(renderer.half_screen_width_world_coordinates()); }, WindowEvent::Closed => *quit_flag = true, WindowEvent::KeyboardInput { input: KeyboardInput { state: ElementState::Pressed, virtual_keycode: Some(keycode), .. }, .. } => { if let Some(key) = virtual_keycode_to_key(keycode) { input_manager.update_key_down(key, time_manager.current_time()); } } WindowEvent::KeyboardInput { input: KeyboardInput { state: ElementState::Released, virtual_keycode: Some(keycode), .. }, .. } => { if let Some(key) = virtual_keycode_to_key(keycode) { input_manager.update_key_up(key, time_manager.current_time()); } } WindowEvent::MouseInput { state: ElementState::Released, ..} => { input_manager.update_mouse_button_up(renderer.screen_coordinates_to_world_coordinates(*mouse_x, *mouse_y)); }, WindowEvent::CursorMoved { position: (x, y), ..} => { *mouse_x = x as i32; *mouse_y = y as i32; input_manager.update_mouse_motion(renderer.screen_coordinates_to_world_coordinates(*mouse_x, *mouse_y)); }, _ => (), } }, _ => (), } }) } fn swap_buffers(&mut self) -> Result<(), ()> { self.window.swap_buffers().map_err(|error| { println!("couldn't swap buffers: {}", error); }) } fn set_fullscreen(&mut self, value: bool) { if value { let current_monitor = self.window.get_current_monitor(); self.window.set_fullscreen(Some(current_monitor)); } else { self.window.set_fullscreen(None); } } fn set_v_sync(&mut self, value: bool) { } fn rendering_context(&self) -> RenderingContext { self.rendering_context } fn gl_get_proc_address(&self, function_name: &str) -> *const c_void { self.window.get_proc_address(function_name) as *const c_void } fn add_game_controller_mappings(&mut self, game_controller_mappings: &Vec<String>) { } fn audio_player(&mut self) -> Option<Self::AudioPlayer> { None } } pub struct AudioPlayerRodio { } impl AudioPlayer for AudioPlayerRodio { type Music = AudioRodio; type Effect = AudioRodio; } pub struct AudioRodio { } impl Audio for AudioRodio { type Volume = VolumeRodio; fn load(file_path: &str) -> Result<Self, String> { unimplemented!() } fn play(&mut self) { unimplemented!() } fn change_volume(&mut self, volume: Self::Volume) { unimplemented!() } } #[derive(Debug, Clone, Copy)] pub struct VolumeRodio { } impl Volume for VolumeRodio { type Value = i32; const MAX_VOLUME: Self::Value = 0; const DEFAULT_VOLUME_PERCENTAGE: i32 = 0; fn new(volume: Self::Value) -> Self { unimplemented!() } fn value(&self) -> Self::Value { unimplemented!() } fn from_percentage(percentage: i32) -> Self { let percentage = if percentage < 0 { 0 } else if 100 < percentage { 100 } else { percentage }; VolumeRodio {} } } fn virtual_keycode_to_key(keycode: VirtualKeyCode) -> Option<Key> { let key = match keycode { VirtualKeyCode::Up | VirtualKeyCode::W => Key::Up, VirtualKeyCode::Down | VirtualKeyCode::S => Key::Down, VirtualKeyCode::Left | VirtualKeyCode::A => Key::Left, VirtualKeyCode::Right | VirtualKeyCode::D => Key::Right, VirtualKeyCode::Space | VirtualKeyCode::LControl | VirtualKeyCode::RControl => Key::Shoot, VirtualKeyCode::Return => Key::Select, VirtualKeyCode::Escape => Key::Back, _ => return None, }; Some(key) }
use std::os::raw::c_void; use glutin::{EventsLoop, GlContext, WindowBuilder, ContextBuilder, GlWindow, GlRequest, Api, VirtualKeyCode}; use input::{InputManager, Key, Input}; use renderer::{Renderer, DEFAULT_SCREEN_HEIGHT, DEFAULT_SCREEN_WIDTH}; use settings::Settings; use gui::GUI; use logic::Logic; use utils::{TimeManager, TimeMilliseconds}; use audio::{Audio, Volume, AudioPlayer}; use super::{Window, RenderingContext, WINDOW_TITLE}; pub struct GlutinWindow { rendering_context: RenderingContext, events_loop: EventsLoop, window: GlWindow, mouse_x: i32, mouse_y: i32, } impl Window for GlutinWindow { type AudioPlayer = AudioPlayerRodio; fn new(rendering_context: RenderingContext) -> Result<Self, ()> { let events_loop = EventsLoop::new(); let window_builder = WindowBuilder::new() .with_title(WINDOW_TITLE) .with_dimensions(DEFAULT_SCREEN_WIDTH as u32, DEFAULT_SCREEN_HEIGHT as u32) .with_min_dimensions(DEFAULT_SCREEN_WIDTH as u32, DEFAULT_SCREEN_HEIGHT as u32); let gl_request = match rendering_context { RenderingContext::OpenGL => GlRequest::Specific(Api::OpenGl, (3,3)), RenderingContext::OpenGLES => GlRequest::Specific(Api::OpenGlEs, (2,0)), }; let context_builder = ContextBuilder::new() .with_gl(gl_request) .with_vsync(true); let gl_window = match GlWindow::new(window_builder, context_builder, &events_loop) { Ok(window) => window, Err(error) => { println!("couldn't create window: {}", error); return Err(()); } }; unsafe { if let Err(error) = gl_window.make_current() { println!("couldn't make OpenGL context current: {}", error); return Err(()); } } let window = Self { rendering_context, window: gl_window, events_loop, mouse_x: 0, mouse_y: 0, }; Ok(window) } fn handle_events<R: Renderer>( &mut self, input_manager: &mut InputManager, renderer: &mut R, settings: &mut Settings, gui: &mut GUI, logic: &mut Logic, quit_flag: &mut bool, time_manager: &TimeManager, ) { use glutin::{Event, WindowEvent, KeyboardInput, ElementState}; let mouse_x = &mut self.mouse_x; let mouse_y = &mut self.mouse_y; self.events_loop.poll_events(|event| { match event { Event::WindowEvent { event: window_event, ..} => { match window_event { WindowEvent::Resized(width, height) => { renderer.update_screen_size(width as i32, height as i32); gui.update_position_from_half_screen_width(renderer.half_screen_width_world_coordinates()); logic.update_half_screen_width(renderer.half_screen_width_world_coordinates()); }, WindowEvent::Closed => *quit_flag = true, WindowEvent::KeyboardInput { input: KeyboardInput { state: ElementState::Pressed, virtual_keycode: Some(keycode), .. }, .. } => {
} WindowEvent::KeyboardInput { input: KeyboardInput { state: ElementState::Released, virtual_keycode: Some(keycode), .. }, .. } => { if let Some(key) = virtual_keycode_to_key(keycode) { input_manager.update_key_up(key, time_manager.current_time()); } } WindowEvent::MouseInput { state: ElementState::Released, ..} => { input_manager.update_mouse_button_up(renderer.screen_coordinates_to_world_coordinates(*mouse_x, *mouse_y)); }, WindowEvent::CursorMoved { position: (x, y), ..} => { *mouse_x = x as i32; *mouse_y = y as i32; input_manager.update_mouse_motion(renderer.screen_coordinates_to_world_coordinates(*mouse_x, *mouse_y)); }, _ => (), } }, _ => (), } }) } fn swap_buffers(&mut self) -> Result<(), ()> { self.window.swap_buffers().map_err(|error| { println!("couldn't swap buffers: {}", error); }) } fn set_fullscreen(&mut self, value: bool) { if value { let current_monitor = self.window.get_current_monitor(); self.window.set_fullscreen(Some(current_monitor)); } else { self.window.set_fullscreen(None); } } fn set_v_sync(&mut self, value: bool) { } fn rendering_context(&self) -> RenderingContext { self.rendering_context } fn gl_get_proc_address(&self, function_name: &str) -> *const c_void { self.window.get_proc_address(function_name) as *const c_void } fn add_game_controller_mappings(&mut self, game_controller_mappings: &Vec<String>) { } fn audio_player(&mut self) -> Option<Self::AudioPlayer> { None } } pub struct AudioPlayerRodio { } impl AudioPlayer for AudioPlayerRodio { type Music = AudioRodio; type Effect = AudioRodio; } pub struct AudioRodio { } impl Audio for AudioRodio { type Volume = VolumeRodio; fn load(file_path: &str) -> Result<Self, String> { unimplemented!() } fn play(&mut self) { unimplemented!() } fn change_volume(&mut self, volume: Self::Volume) { unimplemented!() } } #[derive(Debug, Clone, Copy)] pub struct VolumeRodio { } impl Volume for VolumeRodio { type Value = i32; const MAX_VOLUME: Self::Value = 0; const DEFAULT_VOLUME_PERCENTAGE: i32 = 0; fn new(volume: Self::Value) -> Self { unimplemented!() } fn value(&self) -> Self::Value { unimplemented!() } fn from_percentage(percentage: i32) -> Self { let percentage = if percentage < 0 { 0 } else if 100 < percentage { 100 } else { percentage }; VolumeRodio {} } } fn virtual_keycode_to_key(keycode: VirtualKeyCode) -> Option<Key> { let key = match keycode { VirtualKeyCode::Up | VirtualKeyCode::W => Key::Up, VirtualKeyCode::Down | VirtualKeyCode::S => Key::Down, VirtualKeyCode::Left | VirtualKeyCode::A => Key::Left, VirtualKeyCode::Right | VirtualKeyCode::D => Key::Right, VirtualKeyCode::Space | VirtualKeyCode::LControl | VirtualKeyCode::RControl => Key::Shoot, VirtualKeyCode::Return => Key::Select, VirtualKeyCode::Escape => Key::Back, _ => return None, }; Some(key) }
if let Some(key) = virtual_keycode_to_key(keycode) { input_manager.update_key_down(key, time_manager.current_time()); }
if_condition
[ { "content": "/// Returns the value of boolean reference and sets\n\n/// references value to false.\n\nfn return_and_reset(value: &mut bool) -> bool {\n\n let original_value: bool = *value;\n\n *value = false;\n\n original_value\n\n}\n\n\n\nimpl Input for InputManager {\n\n fn up(&self) -> bool { self.keyboard.up }\n\n fn down(&self) -> bool { self.keyboard.down }\n\n fn left(&self) -> bool { self.keyboard.left }\n\n fn right(&self) -> bool { self.keyboard.right }\n\n fn shoot(&self) -> bool { self.keyboard.shoot }\n\n\n\n fn key_hit_up(&mut self) -> bool { self.keyboard.key_hit_up.key_hit() }\n\n fn key_hit_down(&mut self) -> bool { self.keyboard.key_hit_down.key_hit() }\n\n fn key_hit_left(&mut self) -> bool { self.keyboard.key_hit_left.key_hit() }\n\n fn key_hit_right(&mut self) -> bool { self.keyboard.key_hit_right.key_hit() }\n\n fn key_hit_enter(&mut self) -> bool { return_and_reset(&mut self.keyboard.key_hit_enter) }\n\n fn key_hit_back(&mut self) -> bool { return_and_reset(&mut self.keyboard.key_hit_back) }\n\n\n\n fn mouse_button_hit(&mut self) -> bool { return_and_reset(&mut self.mouse.mouse_button_hit) }\n\n fn mouse_motion(&mut self) -> bool { return_and_reset(&mut self.mouse.mouse_motion) }\n\n fn mouse_location(&self) -> &Point2<f32> { &self.mouse.mouse_location }\n\n}\n\n\n", "file_path": "src/input.rs", "rank": 0, "score": 200497.20988544158 }, { "content": "/// Writes bindings for OpenGL 3.3 or OpenGL ES 2.0\n\n/// to a specific file.\n\n///\n\n/// If file already exists, this function will not overwrite it.\n\n/// This function will check if file already exists with function\n\n/// `file_exists_and_contains_message`\n\n///\n\nfn create_bindings(opengl: OpenGLApi, file_path: &mut PathBuf) {\n\n\n\n let message: &'static [u8] = b\"//\n\n//\n\n// This file is auto generated.\n\n//\n\n//\n\n\";\n\n\n\n if file_exists_and_contains_message(&file_path, message) {\n\n return;\n\n }\n\n\n\n let mut file = File::create(&file_path).unwrap();\n\n\n\n file.write(message).unwrap();\n\n\n\n let registry;\n\n\n\n match opengl {\n", "file_path": "gl/build.rs", "rank": 1, "score": 158227.5429720546 }, { "content": "/// Settings depending on current game difficulty.\n\nstruct LogicSettings {\n\n screen_width_half: f32,\n\n player_laser_damage: i32,\n\n enemy_laser_damage: i32,\n\n enemy_hit_damage_16_milliseconds: i32,\n\n enemy_shooting_speed_milliseconds: u32,\n\n difficulty: Difficulty,\n\n}\n\n\n\nimpl LogicSettings {\n\n /// Create new `LogicSettings`.\n\n fn new() -> LogicSettings {\n\n LogicSettings {\n\n screen_width_half: 0.0,\n\n player_laser_damage: 0,\n\n enemy_laser_damage: 0,\n\n enemy_hit_damage_16_milliseconds: 0,\n\n enemy_shooting_speed_milliseconds: 0,\n\n difficulty: Difficulty::Normal,\n\n }\n", "file_path": "src/logic/mod.rs", "rank": 2, "score": 124574.5206953964 }, { "content": "/// Convert char to position index at game's font tile map.\n\n///\n\n/// First item in tuple will be x index, and second item will be y index.\n\nfn tile_map_index_from_char(c: char) -> (u32, u32) {\n\n match c {\n\n '0' => (0,0),\n\n '1' => (1,0),\n\n '2' => (2,0),\n\n '3' => (3,0),\n\n '4' => (4,0),\n\n '5' => (5,0),\n\n '6' => (6,0),\n\n '7' => (7,0),\n\n '8' => (8,0),\n\n '9' => (9,0),\n\n 'A' => (10,0),\n\n 'B' => (11,0),\n\n 'C' => (12,0),\n\n 'D' => (13,0),\n\n 'E' => (14,0),\n\n 'F' => (15,0),\n\n\n\n 'G' => (0,1),\n", "file_path": "src/gui/components.rs", "rank": 3, "score": 122762.95637460462 }, { "content": "fn keycode_to_key(keycode: Keycode) -> Option<Key> {\n\n let key = match keycode {\n\n Keycode::Up | Keycode::W => Key::Up,\n\n Keycode::Down | Keycode::S => Key::Down,\n\n Keycode::Left | Keycode::A => Key::Left,\n\n Keycode::Right | Keycode::D => Key::Right,\n\n Keycode::Space | Keycode::LCtrl | Keycode::RCtrl => Key::Shoot,\n\n Keycode::Return => Key::Select,\n\n PAUSE_KEY => Key::Back,\n\n _ => return None,\n\n };\n\n\n\n Some(key)\n\n}\n\n\n\n\n", "file_path": "src/window/sdl2.rs", "rank": 4, "score": 119330.06704111779 }, { "content": "/// Interface for game components requiring user input information.\n\n///\n\n/// Key hits and button hits will reset to false when method is called.\n\npub trait Input {\n\n /// Is up key down currently\n\n fn up(&self) -> bool;\n\n /// Is down key down currently\n\n fn down(&self) -> bool;\n\n /// Is left key down currently\n\n fn left(&self) -> bool;\n\n /// Is right key down currently\n\n fn right(&self) -> bool;\n\n /// Is shoot key down currently\n\n fn shoot(&self) -> bool;\n\n\n\n /// Key hit for up key.\n\n fn key_hit_up(&mut self) -> bool;\n\n /// Key hit for down key.\n\n fn key_hit_down(&mut self) -> bool;\n\n /// Key hit for left key.\n\n fn key_hit_left(&mut self) -> bool;\n\n /// Key hit for right key.\n\n fn key_hit_right(&mut self) -> bool;\n", "file_path": "src/input.rs", "rank": 6, "score": 112212.07613916582 }, { "content": "/// Input handling for GUILayer.\n\n///\n\n/// Includes default implementation for handling input for vertical button groups.\n\npub trait GUILayerInputHandler : GUILayer {\n\n /// Implementation for this is required for default input handling.\n\n fn get_buttons_mut(&mut self) -> &mut GUIGroup<GUIButton>;\n\n\n\n /// Override this method to do something before sending the `GUIEvent`\n\n /// to the `GUI`.\n\n fn layer_specific_operations(&mut self, _event: &mut GUIEvent) {}\n\n\n\n /// Override this method to do additional input handling in addition\n\n /// to the default input handling. This method will be called\n\n /// in the else block of default `handle_input` function.\n\n fn layer_specific_input_handling<T: Input>(&mut self, _input: &mut T) -> Option<GUIEvent> { None }\n\n\n\n /// Default implementation for handling input for vertical button groups.\n\n /// Keyboard and mouse input are supported.\n\n fn handle_input<T: Input>(&mut self, input: &mut T) -> Option<GUIEvent> {\n\n if input.key_hit_up() {\n\n self.get_buttons_mut().selection_up();\n\n None\n\n } else if input.key_hit_down() {\n", "file_path": "src/gui/mod.rs", "rank": 7, "score": 106914.44857742687 }, { "content": "/// Create `VertexArray` with vertex and texture\n\n/// coordinate data of square.\n\n///\n\n/// Vertex data will be set to attribute index 0 and\n\n/// texture data will be set to attribute index 1 when rendering\n\n/// with this vertex array.\n\nfn create_square() -> VertexArray {\n\n let mut square = VertexArray::new(6);\n\n\n\n let size : f32 = 0.5;\n\n\n\n let vertex_data: [f32; 18] = [\n\n size, -size, 0.0,\n\n size, size, 0.0,\n\n -size, size, 0.0,\n\n\n\n size, -size, 0.0,\n\n -size, size, 0.0,\n\n -size, -size, 0.0,\n\n ];\n\n let texture_coordinates_data: [f32; 12] = [\n\n 1.0, 0.0,\n\n 1.0, 1.0,\n\n 0.0, 1.0,\n\n\n\n 1.0, 0.0,\n\n 0.0, 1.0,\n\n 0.0, 0.0,\n\n ];\n\n\n\n square.add_static_buffer(&vertex_data, 3, 0);\n\n square.add_static_buffer(&texture_coordinates_data, 2, 1);\n\n\n\n square\n\n}", "file_path": "src/renderer/mod.rs", "rank": 8, "score": 104734.61467764448 }, { "content": "pub trait Window: Sized {\n\n type AudioPlayer: AudioPlayer;\n\n\n\n fn new(RenderingContext) -> Result<Self, ()>;\n\n\n\n fn handle_events<R: Renderer>(\n\n &mut self,\n\n &mut InputManager,\n\n &mut R,\n\n &mut Settings,\n\n &mut GUI,\n\n &mut Logic,\n\n quit_flag: &mut bool,\n\n &TimeManager,\n\n );\n\n\n\n fn swap_buffers(&mut self) -> Result<(), ()>;\n\n\n\n fn set_fullscreen(&mut self, bool);\n\n\n\n fn set_v_sync(&mut self, bool);\n\n\n\n fn rendering_context(&self) -> RenderingContext;\n\n\n\n fn gl_get_proc_address(&self, &str) -> *const c_void;\n\n\n\n fn add_game_controller_mappings(&mut self, &Vec<String>);\n\n\n\n fn audio_player(&mut self) -> Option<Self::AudioPlayer>;\n\n}", "file_path": "src/window/mod.rs", "rank": 9, "score": 103127.85544113029 }, { "content": "/// Return OpenGL renderer string.\n\npub fn get_renderer_string<'a>() -> &'a CStr {\n\n unsafe {\n\n let ptr_to_str = gl_raw::GetString(gl_raw::RENDERER) as *const c_char;\n\n CStr::from_ptr(ptr_to_str)\n\n }\n\n}", "file_path": "gl/src/gl_wrapper/mod.rs", "rank": 10, "score": 101111.5276524671 }, { "content": "/// Create shader of type `ShaderType` from shader source code.\n\n///\n\n/// # Panics\n\n/// * There is error compiling the shader.\n\n/// * Shader code contains 0 byte.\n\nfn load_shader(shader_type: ShaderType, source_code: &str) -> Shader {\n\n let shader_text = CString::new(source_code).unwrap();\n\n\n\n match Shader::new(shader_type, shader_text) {\n\n Ok(shader) => shader,\n\n Err(message) => {\n\n println!(\"shader compile error\\n{}\", message);\n\n panic!();\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/renderer/shader.rs", "rank": 11, "score": 92718.18716712292 }, { "content": "/// Interface for renderers.\n\n///\n\n/// This enables you to write different renderers without\n\n/// changing other codes.\n\npub trait Renderer {\n\n /// Start rendering new frame. Call this first.\n\n fn start(&mut self);\n\n /// Render game logic.\n\n fn render(&mut self, &Logic, only_background: bool);\n\n /// Render GUI.\n\n fn render_gui(&mut self, &GUI);\n\n /// End rendering of new frame. Call this last.\n\n fn end<W: Window>(&mut self, &mut W);\n\n /// Converts screen coordinates to world coordinates.\n\n ///\n\n /// # Coordinates\n\n /// * Start form top left corner of the window.\n\n /// * Are in pixels.\n\n /// * Are at range [0, i32::MAX].\n\n // FIXME: use unsigned values for coordinates? SDL2 event makes i32 values.\n\n fn screen_coordinates_to_world_coordinates(&self, x: i32, y: i32) -> Point2<f32>;\n\n\n\n /// Screen width in world coordinates divided by 2.\n\n fn half_screen_width_world_coordinates(&self) -> f32;\n", "file_path": "src/renderer/mod.rs", "rank": 12, "score": 88977.06048355292 }, { "content": "/// Store keyboard state.\n\n///\n\n/// Supports key hits and key down info.\n\n/// For up, down, left and right keys, key hits are implemented with `KeyHitGenerator` which\n\n/// generates key hits if key is kept down.\n\nstruct KeyboardManager {\n\n up: bool,\n\n down: bool,\n\n left: bool,\n\n right: bool,\n\n shoot: bool,\n\n\n\n key_hit_left: KeyHitGenerator,\n\n key_hit_right: KeyHitGenerator,\n\n key_hit_up: KeyHitGenerator,\n\n key_hit_down: KeyHitGenerator,\n\n\n\n key_hit_enter: bool,\n\n key_hit_back: bool,\n\n}\n\n\n\nimpl KeyboardManager {\n\n\n\n /// Creates new `KeyboardManager`\n\n pub fn new() -> KeyboardManager {\n", "file_path": "src/input.rs", "rank": 13, "score": 87784.98741738623 }, { "content": "/// Store mouse location and button hit\n\nstruct MouseManager {\n\n mouse_motion: bool,\n\n mouse_button_hit: bool,\n\n mouse_location: Point2<f32>,\n\n}\n\n\n\nimpl MouseManager {\n\n /// Create new `MouseManager`.\n\n pub fn new() -> MouseManager {\n\n MouseManager {\n\n mouse_motion: false,\n\n mouse_button_hit: false,\n\n mouse_location: Point2::new(0.0, 0.0),\n\n }\n\n }\n\n\n\n /// Reset mouse button hit.\n\n pub fn reset_button_hits(&mut self) {\n\n self.mouse_button_hit = false;\n\n }\n", "file_path": "src/input.rs", "rank": 14, "score": 87776.38740041175 }, { "content": "/// Component information for rendering is only required for GUILayer.\n\npub trait GUILayer {\n\n fn components<'a>(&'a self) -> GUIComponentReferences<'a>;\n\n}\n\n\n", "file_path": "src/gui/mod.rs", "rank": 15, "score": 85761.87268985756 }, { "content": "/// Position updates and calculations for components\n\n/// with alignment.\n\npub trait GUIPosition {\n\n /// Updates position from argument `width_half` which is\n\n /// screen_width/2.0.\n\n fn update_position_from_half_screen_width(&mut self, width_half: f32);\n\n /// Component width.\n\n fn width(&self) -> f32;\n\n /// Set component x position.\n\n fn set_x(&mut self, x: f32);\n\n /// Get current alignment setting.\n\n fn alignment(&self) -> GUIComponentAlignment;\n\n\n\n /// Calculate and return new x position for component.\n\n ///\n\n /// Component width and alignment is used to perform the position calculation.\n\n ///\n\n /// # Arguments\n\n /// * `new_x` is x coordinate where user wants to position the component.\n\n fn calculate_component_position(&self, new_x: f32) -> f32 {\n\n let mut x = new_x;\n\n\n", "file_path": "src/gui/components.rs", "rank": 16, "score": 85753.90226657773 }, { "content": "/// Collision detection, state setting and event saving for components\n\n/// providing user interaction.\n\npub trait GUIUserInteraction {\n\n /// If point is inside the component area, return true.\n\n fn collision(&self, point: &Point2<f32>) -> bool;\n\n /// Set new state to component.\n\n fn set_state(&mut self, state: GUIComponentState);\n\n /// Get event data.\n\n fn event_data(&self) -> GUIEvent;\n\n /// Set event data.\n\n fn set_event_data(&mut self, data: GUIEvent);\n\n}\n\n\n", "file_path": "src/gui/components.rs", "rank": 17, "score": 83504.41587045942 }, { "content": "/// Add and remove game controllers, route game controller events to `KeyboardManager`\n\nstruct GameControllerManager {\n\n joystick_subsystem: JoystickSubsystem,\n\n game_controller_subsystem: GameControllerSubsystem,\n\n game_controllers: Vec<GameController>,\n\n}\n\n\n\nimpl GameControllerManager {\n\n /// Create new `GameControllerManager`\n\n fn new(joystick_subsystem: JoystickSubsystem, game_controller_subsystem: GameControllerSubsystem) -> GameControllerManager {\n\n GameControllerManager {\n\n joystick_subsystem,\n\n game_controller_subsystem,\n\n game_controllers: Vec::new(),\n\n }\n\n }\n\n\n\n /// Adds new game controller from SDL2 joystick id to `GameControllerManager`.\n\n ///\n\n /// If the joystick doesn't have a game controller mapping, method will create default\n\n /// mapping for the joystick and return the created mapping.\n", "file_path": "src/window/sdl2.rs", "rank": 18, "score": 81904.53398453155 }, { "content": "/// Build shader program from source code string slices.\n\n///\n\n/// # Panics\n\n/// * There is error compiling or linking the shaders.\n\n/// * Shader code contains 0 byte.\n\n///\n\n/// # Vertex attribute variable indexes\n\n/// * variable \"vertex\", index 0\n\n/// * variable \"texture_coordinates_attribute\", index 1\n\n///\n\nfn create_program(vertex_shader_code: &str, fragment_shader_code: &str) -> Program {\n\n let vertex_shader = load_shader(ShaderType::Vertex, vertex_shader_code);\n\n let fragment_shader = load_shader(ShaderType::Fragment, fragment_shader_code);\n\n\n\n let mut vertex_attributes = VertexAttributeIndexBinder::new();\n\n vertex_attributes.add_attribute(0, \"vertex\");\n\n vertex_attributes.add_attribute(1, \"texture_coordinates_attribute\");\n\n\n\n match Program::new(vertex_shader, fragment_shader, vertex_attributes) {\n\n Ok(program) => program,\n\n Err(message) => {\n\n println!(\"program creation error:\\n{}\", message);\n\n panic!();\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/renderer/shader.rs", "rank": 19, "score": 80308.0836934818 }, { "content": "/// Color for rendering.\n\npub trait Color {\n\n /// Get color.\n\n fn color(&self) -> &Vector3<f32>;\n\n}\n\n\n", "file_path": "src/renderer/mod.rs", "rank": 20, "score": 79492.28388599325 }, { "content": "type GameControllerMapping = String;\n\n\n", "file_path": "src/window/sdl2.rs", "rank": 21, "score": 77878.32429305784 }, { "content": "/// Model matrix for rendering.\n\npub trait ModelMatrix {\n\n /// Get model matrix.\n\n fn model_matrix(&self) -> &Matrix4<f32>;\n\n}\n\n\n", "file_path": "src/renderer/mod.rs", "rank": 22, "score": 76864.66301605711 }, { "content": "/// Return OpenGL vendor string.\n\npub fn get_vendor_string<'a>() -> &'a CStr {\n\n unsafe {\n\n let ptr_to_str = gl_raw::GetString(gl_raw::VENDOR) as *const c_char;\n\n CStr::from_ptr(ptr_to_str)\n\n }\n\n}\n\n\n", "file_path": "gl/src/gl_wrapper/mod.rs", "rank": 23, "score": 76471.45159694026 }, { "content": "/// Return OpenGL version string.\n\npub fn get_version_string<'a>() -> &'a CStr {\n\n unsafe {\n\n let ptr_to_str = gl_raw::GetString(gl_raw::VERSION) as *const c_char;\n\n CStr::from_ptr(ptr_to_str)\n\n }\n\n}\n\n\n", "file_path": "gl/src/gl_wrapper/mod.rs", "rank": 24, "score": 76471.45159694026 }, { "content": "/// Should game object be destroyed?\n\npub trait CanDestroy {\n\n /// If this is true then game object should be destroyed.\n\n fn destroy(&self) -> bool;\n\n}\n\n\n", "file_path": "src/logic/common.rs", "rank": 25, "score": 76212.54127883565 }, { "content": "/// Create uniform specific to one shader program.\n\n///\n\n/// `program_name` argument is for displaying program name in the possible error message.\n\n///\n\n/// # Panics\n\n/// * If `name` argument contains 0 byte.\n\n/// * If there is not uniform with name that equals argument `name` in the shader program.\n\nfn create_uniform<T: Uniform>(name: &str, program: &Program, program_name: &str) -> T {\n\n let uniform_result = T::new(CString::new(name).unwrap(), &program);\n\n\n\n match uniform_result {\n\n Ok(uniform) => uniform,\n\n Err(error) => {\n\n println!(\"error: {:?}\\n uniform name: {}\\n program name: {}\\n\", error, name, program_name);\n\n panic!();\n\n },\n\n }\n\n}", "file_path": "src/renderer/shader.rs", "rank": 26, "score": 75339.65030809291 }, { "content": "/// Render tile from tile map.\n\npub trait TileLocationInfo {\n\n /// Tile's location information.\n\n ///\n\n /// # Vector3 components\n\n /// vector[0]: movement in x direction.\n\n /// vector[1]: movement in y direction.\n\n /// vector[2]: scaling factor for texture coordinates.\n\n ///\n\n /// Shader will multiply texture coordinates with scaling factor\n\n /// and then add x and y movement to multiplied texture coordinates.\n\n ///\n\n /// For tile rendering, scaling factor should be less than 1.0.\n\n /// In practice, the scaling factor will make square represented by texture coordinates\n\n /// smaller, positioned at lower left corner of the texture. Then with x and y movement\n\n /// values you can move that square to a specific location on a tile map.\n\n fn tile_info(&self) -> &Vector3<f32>;\n\n}\n\n\n\n/// OpenGL 3.0 and OpenGL ES 2.0 renderer.\n\n///\n", "file_path": "src/renderer/mod.rs", "rank": 27, "score": 74449.09145086881 }, { "content": "pub trait Volume: Copy + Clone {\n\n type Value;\n\n const MAX_VOLUME: Self::Value;\n\n const DEFAULT_VOLUME_PERCENTAGE: i32;\n\n\n\n /// Create new volume value limited to [0; MAX_VOLUME].\n\n fn new(Self::Value) -> Self;\n\n fn value(&self) -> Self::Value;\n\n\n\n /// Create Volume value from integer representing\n\n /// volume percentage. Clamps integer to range [0; 100].\n\n fn from_percentage(i32) -> Self;\n\n}\n\n\n", "file_path": "src/audio.rs", "rank": 28, "score": 74446.20407779123 }, { "content": "/// Returns true if file exists and starts with specific bytes.\n\n///\n\n/// This function will panic if bytes in the beginning of the file\n\n/// does not match.\n\nfn file_exists_and_contains_message(file_path: &PathBuf, message: &[u8]) -> bool {\n\n let _file_result = File::open(&file_path);\n\n\n\n if let Ok(mut file) = _file_result {\n\n let mut vector = Vec::with_capacity(message.len());\n\n\n\n for _ in message {\n\n vector.push(0);\n\n }\n\n\n\n let mut file_content: Box<[u8]> = vector.into_boxed_slice();\n\n\n\n file.read(&mut file_content).unwrap();\n\n\n\n if file_content.iter().ne(message) {\n\n panic!(\"Unknown or modified generated bindings file found, remove {:?} and build again\", file_path);\n\n } else {\n\n return true;\n\n }\n\n }\n\n\n\n return false;\n\n}", "file_path": "gl/build.rs", "rank": 29, "score": 73691.91121255193 }, { "content": "/// Trait for nicer game object container updates.\n\npub trait UpdateContent<T> {\n\n /// Run check_object closure for every game object\n\n /// currently stored to the container implementing this trait.\n\n ///\n\n /// If closure returns true, the game object will be removed. This will be done with the index_buffer,\n\n /// where location indexes of game objects will be stored.\n\n fn update(&mut self, index_buffer: &mut Vec<usize>, check_object: &mut FnMut(&mut T) -> bool);\n\n}\n\n\n\nimpl <T> UpdateContent<T> for Vec<T> {\n\n fn update(&mut self, index_buffer: &mut Vec<usize>, check_object: &mut FnMut(&mut T) -> bool) {\n\n for (i, object) in self.iter_mut().enumerate() {\n\n if check_object(object) {\n\n index_buffer.push(i);\n\n }\n\n }\n\n\n\n for i in index_buffer.iter().rev() {\n\n self.swap_remove(*i);\n\n }\n\n\n\n index_buffer.clear();\n\n }\n\n}", "file_path": "src/logic/common.rs", "rank": 30, "score": 72213.99026220052 }, { "content": "/// Basic game object functionality.\n\n///\n\n/// See also the `Data` struct documentation.\n\npub trait GameObject: GameObjectData<f32> {\n\n\n\n /// Moves game object to current direction.\n\n fn forward(&mut self, amount: f32) {\n\n self.data_mut().position += self.data().direction * amount;\n\n\n\n self.data_mut().update_model_matrix_position();\n\n }\n\n\n\n /// Turn game object's current direction. Angle is in radians.\n\n fn turn(&mut self, angle: f32) {\n\n self.data_mut().rotation += angle;\n\n\n\n self.data_mut().update_rotation(true);\n\n }\n\n\n\n /// Turns game object, but does not update model matrix, so\n\n /// game object won't look like it was turned.\n\n fn turn_without_updating_model_matrix(&mut self, angle: f32) {\n\n self.data_mut().rotation += angle;\n", "file_path": "src/logic/common.rs", "rank": 31, "score": 64751.06976202876 }, { "content": "/// Functions for accessing game object's position data.\n\npub trait GameObjectData<T: BaseFloat> {\n\n fn data(&self) -> &Data<T>;\n\n fn data_mut(&mut self) -> &mut Data<T>;\n\n}\n\n\n", "file_path": "src/logic/common.rs", "rank": 32, "score": 64747.065808210435 }, { "content": "/// Creates specific size CString.\n\n///\n\n/// # Panics\n\n/// If function's internally created Vec<u8> buffer's\n\n/// length and argument len not match this function panics.\n\n/// This should never happen.\n\nfn create_string_buffer(len: usize) -> CString {\n\n let mut buffer: Vec<u8> = Vec::with_capacity(len);\n\n\n\n for _ in 0..len {\n\n buffer.push(b' ');\n\n }\n\n\n\n if buffer.len() != len {\n\n panic!(\"buffer and log length differs\");\n\n }\n\n\n\n CString::new(buffer).unwrap()\n\n}\n\n\n", "file_path": "gl/src/gl_wrapper/shader.rs", "rank": 33, "score": 63249.36388839057 }, { "content": "/// State for parsing the next argument.\n\nenum ArgumentParserState {\n\n MusicFilePath,\n\n}", "file_path": "src/settings.rs", "rank": 34, "score": 63013.8515420613 }, { "content": "/// Check command line arguments, initialize game and start game loop.\n\nfn main() {\n\n let arguments = match Arguments::parse(env::args()) {\n\n Ok(arguments) => arguments,\n\n Err(unknown_argument) => {\n\n println!(\"unknown argument: \\\"{}\\\"\", unknown_argument);\n\n println!(\"{}\", COMMAND_LINE_HELP_TEXT);\n\n return;\n\n }\n\n };\n\n\n\n if arguments.show_help() {\n\n println!(\"{}\", COMMAND_LINE_HELP_TEXT);\n\n return;\n\n }\n\n\n\n\n\n #[cfg(not(feature = \"gles\"))]\n\n let rendering_context = RenderingContext::OpenGL;\n\n\n\n #[cfg(feature = \"gles\")]\n", "file_path": "src/main.rs", "rank": 35, "score": 54732.92601144452 }, { "content": "fn main() {\n\n let current_dir_path = env::current_dir().unwrap().into_os_string();\n\n\n\n create_bindings(OpenGLApi::Gl, &mut Path::new(&current_dir_path).join(\"src/gl_generated.rs\"));\n\n create_bindings(OpenGLApi::Gles, &mut Path::new(&current_dir_path).join(\"src/gl_es_generated.rs\"));\n\n}\n\n\n\n/// OpenGL API type\n", "file_path": "gl/build.rs", "rank": 36, "score": 54732.92601144452 }, { "content": "/// Send static data to GPU with Vertex Buffer Object\n\nstruct VertexBufferStatic {\n\n id: GLuint,\n\n attribute_component_count: GLint,\n\n}\n\n\n\nimpl VertexBufferStatic {\n\n /// Sends static data to GPU.\n\n ///\n\n /// # Arguments\n\n /// * `data` - Float data which is sent to GPU.\n\n /// * `attribute_component_count` - Number of floats in one vertex attribute.\n\n ///\n\n /// # Safety\n\n /// This function does not check if data length and `attribute_component_count` match.\n\n unsafe fn new(data: &[f32], attribute_component_count: GLint) -> VertexBufferStatic {\n\n let mut id: GLuint = 0;\n\n\n\n gl_raw::GenBuffers(1, &mut id);\n\n gl_raw::BindBuffer(gl_raw::ARRAY_BUFFER, id);\n\n\n", "file_path": "gl/src/gl_wrapper/buffer.rs", "rank": 37, "score": 49827.08151887071 }, { "content": "/// All sound effect's that the game requires.\n\nstruct AllSoundEffects<A: Audio> {\n\n laser: A,\n\n explosion: A,\n\n laser_bomb_launch: A,\n\n laser_bomb_explosion: A,\n\n player_laser_hits_laser_cannon: A,\n\n}\n\n\n\nimpl <A: Audio> AllSoundEffects<A> {\n\n\n\n /// Loads all sound effects that the game requires.\n\n fn new(default_volume: A::Volume) -> Result<Self, String> {\n\n\n\n let mut sounds = AllSoundEffects {\n\n laser: A::load(\"game_files/audio/laser.wav\")?,\n\n explosion: A::load(\"game_files/audio/explosion.wav\")?,\n\n laser_bomb_launch: A::load(\"game_files/audio/laser_bomb_launch.wav\")?,\n\n laser_bomb_explosion: A::load(\"game_files/audio/laser_bomb_explosion.wav\")?,\n\n player_laser_hits_laser_cannon: A::load(\"game_files/audio/player_laser_hits_laser_cannon.wav\")?,\n\n };\n", "file_path": "src/audio.rs", "rank": 38, "score": 49356.28558811029 }, { "content": "pub trait AudioPlayer {\n\n type Music: Audio;\n\n type Effect: Audio;\n\n}\n\n\n\n/// Store music, sound effects, volume values.\n\npub struct AudioManager<P: AudioPlayer> {\n\n _player: Option<P>,\n\n sound_effects: SoundEffectManager<P::Effect>,\n\n music: Option<P::Music>,\n\n music_volume: <P::Music as Audio>::Volume,\n\n effect_volume: <P::Effect as Audio>::Volume,\n\n}\n\n\n\nimpl <P: AudioPlayer> AudioManager<P> {\n\n /// Create new `AudioManager`.\n\n ///\n\n /// Sound effects will be loaded from default locations. If there is\n\n /// sound effect loading error, all sound effects will be disabled.\n\n ///\n", "file_path": "src/audio.rs", "rank": 39, "score": 48363.95564486635 }, { "content": "/// Play sound effects.\n\npub trait SoundEffectPlayer {\n\n /// Play laser sound at next update.\n\n fn laser(&mut self);\n\n /// Play laser bomb launch sound at next update.\n\n fn laser_bomb_launch(&mut self);\n\n /// Play laser bomb explosion sound at next update.\n\n fn laser_bomb_explosion(&mut self);\n\n /// Play explosion sound at next update.\n\n fn explosion(&mut self);\n\n /// Play player laser hits laser cannon sound at next update.\n\n fn player_laser_hits_laser_cannon(&mut self);\n\n /// Play sound effects that are set to be played if\n\n /// sound effects are available.\n\n fn update(&mut self);\n\n}\n\n\n", "file_path": "src/audio.rs", "rank": 40, "score": 47045.08032928683 }, { "content": "pub trait Audio: Sized {\n\n type Volume: Volume;\n\n\n\n fn load(&str) -> Result<Self, String>;\n\n fn play(&mut self);\n\n fn change_volume(&mut self, volume: Self::Volume);\n\n}\n\n\n", "file_path": "src/audio.rs", "rank": 41, "score": 46754.77154985638 }, { "content": "/// Common functionality between different types of uniforms.\n\npub trait Uniform\n\n where Self: Sized {\n\n\n\n type Data;\n\n\n\n /// Create new uniform.\n\n ///\n\n /// # Arguments\n\n /// * `name` - Name of the uniform.\n\n /// * `program` - Uniform's shader program.\n\n fn new(name: CString, program: &Program) -> Result<Self, UniformError> {\n\n let location;\n\n\n\n unsafe {\n\n location = gl_raw::GetUniformLocation(program.id(), name.as_ptr());\n\n }\n\n\n\n if location == -1 {\n\n Err(UniformError::UniformNotFoundOrGLPrefix)\n\n } else {\n", "file_path": "gl/src/gl_wrapper/uniform.rs", "rank": 42, "score": 45836.79121469803 }, { "content": "### Retry with OpenGL (and OpenGL ES)\n\n\n\nAfter two computer graphics courses this spring, I now did have the knowledge to utilize the power of the GPU with OpenGL API, so I started\n\nplaning to retrieve my game programming project. I could make my own renderer for the game and I could get some credits from coding the game\n\nthanks to the programming project course at my university (that is why I'm writing this self-evaluation document by the way).\n\n\n\nFirst I red the new Rust book, to recall how Rust code was written and to learn Rust programming concepts better than the last time.\n\nAfter that I did some experiments with using OpenGL from Rust code and then started writing the game. More about the game's current structure in\n\nthe following chapter.\n\n\n\n\n\n## 2. Implementation\n\n\n\nThe following is an overview of game's current architecture.\n\n\n\n### Component diagram of main components\n\n\n\n![Alt text](/documentation/component_diagram.png?raw=true \"Component diagram\")\n\n\n\nColor | Meaning\n\n------|------------------\n\nWhite | My component\n\nGray | External component\n\nBlue | File\n\n\n\nNote that at game initialization, more components depend on SDL2. Also SDL2 Rust binding's types like `Button` and `Keycode` are\n\nused in the components so those make additional dependency and some components are borrowed to each other at updates, like `AudioManager` is borrowed to\n\n`Logic` when updating the game logic.\n\n\n\n### Component interfaces\n\n\n\nTraits `Renderer`, `SoundEffectPlayer` and `Input`, make `OpenGLRenderer`, `AudioManager` and `InputManager` easily replaceable.\n\n\n\n\n\n### Crates currently used\n\n\n\n* [cgmath](https://github.com/brendanzab/cgmath)\n\n* [rand](https://github.com/rust-lang-nursery/rand)\n\n* [image](https://github.com/PistonDevelopers/image)\n\n* [sdl2](https://github.com/Rust-SDL2/rust-sdl2)\n\n* [gl_generator](https://github.com/brendanzab/gl-rs/tree/master/gl_generator)\n\n\n\n\n", "file_path": "documentation/project-self-evaluation.md", "rank": 43, "score": 41384.46271621286 }, { "content": "## 3. Self-evaluation\n\n\n\nThe project was quite successful. I learned basics of the Rust and the game works well on Raspberry Pi.\n\n\n\nI didn't have any major code design issues with Rust, what was opposite what I thought before starting the project.\n\nBut I guess there are still improvements to be made to the code. It takes time to learn how to fully utilize Rust's type system.\n\n\n\nI learned that there should be two separate projection matrixes for GUI and game logic and\n\nmethods to get display size in world coordinates for both GUI and game logic. That would allow easy GUI and game scaling.\n\nCurrently there is only one projection matrix and method for getting display width in world coordinates.\n\n\n\nBecause of learning purposes, I decided to make my own GUI toolkit and OpenGL wrapper library. This also allowed me to\n\nbuild any features that the game needed.\n\n\n\nMy original plans had deadline for the project at end of August. I managed to finnish the game before that.\n\n\n\nAnd yes, its hard to design software that has lot features in a way which keeps maintainability at acceptable rate, just like\n\nhow its told at lectures.\n\n\n\n## 4. List of references used during the project\n\n\n\n* SDL2 and SDL2_mixer documentation\n\n* Crate documentation\n\n* Rust standard library documentation\n\n* https://learnopengl.com/\n\n\n\n## 5. Information for project maintainers\n\n\n\nSee the project's readme file for building instructions.\n\n\n\n## 6. Future of the project\n\n\n\nThere are still things to do listed at project readme file. Also changing my GUI toolkit and OpenGL wrapper to proper libraries\n\nlike `conrod` and `glium`, should be considered.\n\n\n\n## 7. Conclusions\n\n\n\nProject went quite well, but like in every software project, there are still improvements to be made.\n", "file_path": "documentation/project-self-evaluation.md", "rank": 44, "score": 41378.068552336255 }, { "content": "### The Unity3D version of Space Boss Battles\n\n\n\nMy old game needed a proper rewrite: frame rate was locked to 40 fps, no full screen support, only software rendering and Windows was the only supported\n\nplatform. Back then when I did rewrite the game with Unity3D, I was also interested in mobile game development.\n\nUnity3D seemed like an interesting choice for doing the rewrite, easy\n\nexporting to multiple platforms including mobile, GPU rendering and C# support.\n\nIt also had tools to make 3D games more easily.\n\n\n\nAfter a couple of weeks I had working build with kinda the same\n\nfeature set like the original game. It ran well on my desktop computer, but on my Android smartphone which I had back then, the\n\nHuawei U8800 Ideos X5, it ran poorly. The fact that the game and/or Unity3D ran poorly\n\non my smartphone was not nice and the game wasn't designed to play with a touch screen, so I kinda lost motivation to the project.\n\n\n\n### Raspberry Pi games with native code\n\n\n\nSome months later I got an idea that I should make games for Raspberry Pi. At that time I only had the Model B (512 MB RAM) version of\n\nRaspberry Pi. I thought that Raspberry Pi's ARMv6 CPU would be too slow for running a game on top of any kind of virtual machines with\n\ngarbage collection, so I started to look out for best languages and libraries for making games which will be compiled to native code.\n\n\n\nSDL2 and its built in Renderer had Raspberry Pi support, so started experimenting with the C++ and SDL2. I made a simple benchmarking program\n\nwhat rendered moving game objects that would bounce from sides of the screen. It worked ok on Raspberry Pi, about 150 objects was the limit when\n\nframe rate went under 60 fps and that was on 1080p resolution. At the time being I didn't know how to use OpenGL ES, so I couldn't test if I could get more\n", "file_path": "documentation/project-self-evaluation.md", "rank": 45, "score": 41377.37749810086 }, { "content": "# Project self-evaluation\n\n\n\nDate: 2017-08-28\n\n\n\n## Abstract\n\n\n\nI rewrote my old 2D arcade space shooter game Space Boss Battles in Rust programming language. Programming went quite well and I didn't\n\nhave any major difficulties even if I didn't have much programming experience with Rust.\n\n\n\n\n\n## Table of contents\n\n\n\n1. [Project description, background and goals](#1-project-description-background-and-goals)\n\n2. [Implementation](#2-implementation)\n\n3. [Self-evaluation](#3-self-evaluation)\n\n4. [List of sources used during the project](#4-list-of-sources-used-during-the-project)\n\n5. [Information for project maintainers](#5-information-for-project-maintainers)\n\n6. [Future of the project](#6-future-of-the-project)\n\n7. [Conclusions](#7-conclusions)\n\n\n\n\n\n## 1. Project description, background and goals\n\n\n\nProject's aim was to do something enough challenging to learn the Rust programming language and create a game that will\n\nrun on a low-end hardware like Raspberry Pi Model B (512MB RAM). I decided to recreate my old game, Space Boss Battles, that\n\nwas written in the CoolBasic programming language. Making an rewrite of an existing game also had that upside I wouldn't\n\nhave to create a new textures and sound effects.\n\n\n\nThe roots of this project kinda start about an year ago when I recreated this same game with Unity3D.\n\n\n", "file_path": "documentation/project-self-evaluation.md", "rank": 46, "score": 41375.77140813314 }, { "content": "performance using directly OpenGL ES, rather than an SDL2 renderer API which was rendering with OpenGL ES.\n\n\n\nAfter that I started to think should I change the C++ to something more nicer and modern and something that will be more useful in the future.\n\nI could learn the modern C++ or learn Rust. Rust seemed more promising and easier to learn, and I wouldn't have to mess with header files.\n\n\n\nI created that same benchmarking program with Rust and SDL2 bindings for it. It went quite well. After that I did try to start recreating Space Boss Battles\n\nwith SDL2's built in 2D renderer. I ran to some design issues and there was no time to continue that project.\n\n\n", "file_path": "documentation/project-self-evaluation.md", "rank": 47, "score": 41373.98111087716 }, { "content": "/// Settings parser states.\n\nenum SettingsParserMode {\n\n Settings,\n\n GameControllerMappings,\n\n}\n\n\n\n\n\n/// Setting and it's name as text.\n\npub struct SettingContainer {\n\n name: &'static str,\n\n setting_type: SettingType,\n\n}\n\n\n\nimpl SettingContainer {\n\n /// Create new `SettingContainer`.\n\n pub fn new(name: &'static str, setting_type: SettingType) -> SettingContainer {\n\n SettingContainer { name, setting_type }\n\n }\n\n\n\n /// Try setting a new boolean value to the `SettingContainer`.\n\n ///\n", "file_path": "src/settings.rs", "rank": 48, "score": 40894.267178165894 }, { "content": " key_hit: bool,\n\n }\n\n\n\n impl KeyHitGenerator {\n\n /// Create new `KeyHitGenerator` which `milliseconds_between_key_hits` field is set to `300`.\n\n pub fn new() -> KeyHitGenerator {\n\n KeyHitGenerator {\n\n milliseconds_between_key_hits: 300,\n\n timer: Timer::new(),\n\n state: None,\n\n key_hit: false,\n\n }\n\n }\n\n\n\n /// Updates generators state from `KeyEvent`.\n\n pub fn update_from_key_event(&mut self, key_event: KeyEvent, current_time: &TimeMilliseconds) {\n\n match key_event {\n\n KeyEvent::KeyUp => self.up(),\n\n KeyEvent::KeyDown => self.down(current_time),\n\n }\n", "file_path": "src/input.rs", "rank": 49, "score": 36172.18804705256 }, { "content": "\n\n /// Handle key up event.\n\n ///\n\n /// Creates a key hit if method is called when\n\n /// generators state is `Some(KeyHitState::NormalMode)`.\n\n ///\n\n /// Generators state will be set to `None`.\n\n fn up(&mut self) {\n\n if let Some(KeyHitState::NormalMode) = self.state {\n\n self.key_hit = true;\n\n } else {\n\n self.key_hit = false;\n\n };\n\n\n\n self.state = None;\n\n }\n\n\n\n /// Returns true if key hit has been happened.\n\n ///\n\n /// This method will also clear the current key hit.\n", "file_path": "src/input.rs", "rank": 50, "score": 36168.534182219584 }, { "content": "pub struct InputManager {\n\n keyboard: KeyboardManager,\n\n mouse: MouseManager,\n\n}\n\n\n\nimpl InputManager {\n\n /// Create new InputManger.\n\n pub fn new() -> InputManager {\n\n InputManager {\n\n keyboard: KeyboardManager::new(),\n\n mouse: MouseManager::new(),\n\n }\n\n }\n\n\n\n /// Handle key up event.\n\n pub fn update_key_up(&mut self, key: Key, current_time: &TimeMilliseconds) {\n\n self.keyboard.update_keys(key, KeyEvent::KeyUp, current_time);\n\n }\n\n\n\n /// Handle keyboard key down event.\n", "file_path": "src/input.rs", "rank": 51, "score": 36167.20200999764 }, { "content": " self.key_hit = true;\n\n }\n\n },\n\n _ => (),\n\n }\n\n }\n\n\n\n /// Handle key down event.\n\n ///\n\n /// Sets generators state to `Some(KeyHitState::NormalMode)` and resets\n\n /// generators internal timer if generators current state is `None`.\n\n fn down(&mut self, current_time: &TimeMilliseconds) {\n\n match self.state {\n\n None => {\n\n self.state = Some(KeyHitState::NormalMode);\n\n self.timer.reset(current_time);\n\n },\n\n _ => (),\n\n }\n\n }\n", "file_path": "src/input.rs", "rank": 52, "score": 36166.66432594196 }, { "content": " }\n\n\n\n /// Update method will generate key hits if\n\n ///\n\n /// * There is enough time passed from the last key hit.\n\n /// * `key_down` argument is true.\n\n pub fn update(&mut self, current_time: &TimeMilliseconds, key_down: bool) {\n\n if !key_down {\n\n return;\n\n }\n\n\n\n match self.state {\n\n Some(KeyHitState::NormalMode) => {\n\n if self.timer.check(current_time, self.milliseconds_between_key_hits) {\n\n self.state = Some(KeyHitState::ScrollMode);\n\n self.key_hit = true;\n\n }\n\n },\n\n Some(KeyHitState::ScrollMode) => {\n\n if self.timer.check(current_time, self.milliseconds_between_key_hits) {\n", "file_path": "src/input.rs", "rank": 53, "score": 36165.575986905664 }, { "content": " /// Key hit for enter key.\n\n fn key_hit_enter(&mut self) -> bool;\n\n /// Key hit for back key.\n\n fn key_hit_back(&mut self) -> bool;\n\n\n\n /// Button hit for any mouse button.\n\n fn mouse_button_hit(&mut self) -> bool;\n\n /// Is mouse location update occurred.\n\n /// Resets to false.\n\n fn mouse_motion(&mut self) -> bool;\n\n /// Current location of mouse in world coordinates.\n\n fn mouse_location(&self) -> &Point2<f32>;\n\n}\n\n\n\n/// Handles user input events and stores current input state.\n\n///\n\n/// Currently supported input methods are\n\n/// * Keyboard\n\n/// * Mouse\n\n/// * Game controller\n", "file_path": "src/input.rs", "rank": 54, "score": 36164.81809993794 }, { "content": " pub fn key_hit(&mut self) -> bool {\n\n if self.key_hit {\n\n self.clear();\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n /// Clears current key hit.\n\n pub fn clear(&mut self) {\n\n self.key_hit = false;\n\n }\n\n }\n\n}", "file_path": "src/input.rs", "rank": 55, "score": 36162.96811152858 }, { "content": " self.reset_key_hits();\n\n\n\n self.key_hit_up.update(current_time, self.up);\n\n self.key_hit_down.update(current_time, self.down);\n\n self.key_hit_left.update(current_time, self.left);\n\n self.key_hit_right.update(current_time, self.right);\n\n }\n\n}\n\n\n\n// TODO: Touch screen support.\n\n\n\nmod utils {\n\n\n\n //! Utilities for `input` module's objects.\n\n\n\n use utils::{Timer, TimeMilliseconds};\n\n\n\n /// Key press states.\n\n #[derive(Clone)]\n\n pub enum KeyEvent {\n", "file_path": "src/input.rs", "rank": 56, "score": 36161.77924445034 }, { "content": " KeyEvent::KeyUp => (false, true),\n\n KeyEvent::KeyDown => (true, false),\n\n };\n\n\n\n match key {\n\n Key::Up => {\n\n self.up = key_down_field;\n\n self.key_hit_up.update_from_key_event(key_event, current_time);\n\n },\n\n Key::Down => {\n\n self.down = key_down_field;\n\n self.key_hit_down.update_from_key_event(key_event, current_time);\n\n }\n\n Key::Left => {\n\n self.left = key_down_field;\n\n self.key_hit_left.update_from_key_event(key_event, current_time);\n\n }\n\n Key::Right => {\n\n self.right = key_down_field;\n\n self.key_hit_right.update_from_key_event(key_event, current_time);\n", "file_path": "src/input.rs", "rank": 57, "score": 36159.81499354905 }, { "content": " pub fn update_key_down(&mut self, key: Key, current_time: &TimeMilliseconds) {\n\n self.keyboard.update_keys(key, KeyEvent::KeyDown, current_time);\n\n }\n\n\n\n /// Handle mouse motion event.\n\n pub fn update_mouse_motion(&mut self, point: Point2<f32>) {\n\n self.mouse.update_mouse_motion(point);\n\n }\n\n\n\n /// Handle mouse button up event.\n\n pub fn update_mouse_button_up(&mut self, point: Point2<f32>) {\n\n self.mouse.update_mouse_button_up(point);\n\n }\n\n\n\n /// Resets `MouseManager` button hits and updates `KeyboardManager`\n\n pub fn update(&mut self, current_time: &TimeMilliseconds) {\n\n self.mouse.reset_button_hits();\n\n self.keyboard.update(current_time);\n\n }\n\n}\n\n\n", "file_path": "src/input.rs", "rank": 58, "score": 36159.43551444922 }, { "content": " KeyboardManager {\n\n up: false,\n\n down: false,\n\n left: false,\n\n right: false,\n\n shoot: false,\n\n\n\n key_hit_left: KeyHitGenerator::new(),\n\n key_hit_right: KeyHitGenerator::new(),\n\n key_hit_up: KeyHitGenerator::new(),\n\n key_hit_down: KeyHitGenerator::new(),\n\n\n\n key_hit_enter: false,\n\n key_hit_back: false,\n\n }\n\n }\n\n\n\n /// Updates `KeyboardManager`'s fields from keyboard event\n\n pub fn update_keys(&mut self, key: Key, key_event: KeyEvent, current_time: &TimeMilliseconds) {\n\n let (key_down_field, key_hit_field) = match key_event {\n", "file_path": "src/input.rs", "rank": 59, "score": 36157.377728785024 }, { "content": " KeyUp,\n\n KeyDown,\n\n }\n\n\n\n /// KeyHitGenerator's states.\n\n enum KeyHitState {\n\n /// Normal key hits.\n\n NormalMode,\n\n /// Generator generated key hits.\n\n ScrollMode,\n\n }\n\n\n\n /// Generate key hits.\n\n ///\n\n /// Generates key hits from key up event and if the key is pressed down\n\n /// long enough, the generator will generate multiple key hits.\n\n pub struct KeyHitGenerator {\n\n milliseconds_between_key_hits: u32,\n\n timer: Timer,\n\n state: Option<KeyHitState>,\n", "file_path": "src/input.rs", "rank": 60, "score": 36156.843824477546 }, { "content": "use self::utils::{KeyEvent, KeyHitGenerator};\n\n\n\n\n\npub enum Key {\n\n Up,\n\n Down,\n\n Left,\n\n Right,\n\n Shoot,\n\n Select,\n\n Back,\n\n}\n\n\n\n/// Interface for game components requiring user input information.\n\n///\n\n/// Key hits and button hits will reset to false when method is called.\n", "file_path": "src/input.rs", "rank": 61, "score": 36156.80447761385 }, { "content": "\n\n /// Handle mouse motion event.\n\n pub fn update_mouse_motion(&mut self, point: Point2<f32>) {\n\n self.mouse_motion = true;\n\n self.mouse_location = point;\n\n }\n\n\n\n /// Handle mouse button up event.\n\n pub fn update_mouse_button_up(&mut self, point: Point2<f32>) {\n\n self.mouse_button_hit = true;\n\n self.mouse_location = point;\n\n }\n\n}\n\n\n", "file_path": "src/input.rs", "rank": 62, "score": 36154.610364641645 }, { "content": " }\n\n Key::Shoot => self.shoot = key_down_field,\n\n Key::Select => self.key_hit_enter = key_hit_field,\n\n Key::Back => self.key_hit_back = key_hit_field,\n\n }\n\n }\n\n\n\n /// Reset key hit fields and `KeyHitGenerator`s\n\n fn reset_key_hits(&mut self) {\n\n self.key_hit_enter = false;\n\n self.key_hit_back = false;\n\n\n\n self.key_hit_up.clear();\n\n self.key_hit_down.clear();\n\n self.key_hit_left.clear();\n\n self.key_hit_right.clear();\n\n }\n\n\n\n /// Reset key hit fields and `KeyHitGenerator`s and updates `KeyHitGenerator`s\n\n pub fn update(&mut self, current_time: &TimeMilliseconds) {\n", "file_path": "src/input.rs", "rank": 63, "score": 36152.31052813938 }, { "content": "/*\n\nsrc/input.rs, 2017-09-01\n\n\n\nCopyright (c) 2017 Juuso Tuononen\n\n\n\nThis file is licensed under\n\n\n\nApache License, Version 2.0\n\n\n\nor\n\n\n\nMIT License\n\n*/\n\n\n\n//! Input handling.\n\n\n\nuse cgmath::Point2;\n\n\n\nuse utils::TimeMilliseconds;\n\n\n", "file_path": "src/input.rs", "rank": 64, "score": 36148.59258606197 }, { "content": " pub fn print_joystick_events(&self) -> bool {\n\n self.command_line_arguments.print_joystick_events\n\n }\n\n\n\n /// Is fps count printing enabled.\n\n pub fn print_fps_count(&self) -> bool {\n\n self.command_line_arguments.print_fps_count\n\n }\n\n\n\n /// Applies current settings from field `settings`.\n\n pub fn apply_current_settings<T: Renderer, W: Window, P: AudioPlayer>(&self, renderer: &mut T, gui: &mut GUI, audio_manager: &mut AudioManager<P>, window: &mut W) {\n\n for setting in &self.settings {\n\n Settings::apply_setting(setting.get_value(), renderer, gui, audio_manager, window);\n\n }\n\n }\n\n\n\n /// Apply setting provided as argument.\n\n pub fn apply_setting<T: Renderer, W: Window, P: AudioPlayer>(setting: SettingType, _renderer: &mut T, gui: &mut GUI, audio_manager: &mut AudioManager<P>, window: &mut W) {\n\n match setting {\n\n SettingType::Boolean(BooleanSetting::FullScreen, value) => window.set_fullscreen(value),\n\n SettingType::Boolean(BooleanSetting::ShowFpsCounter, value) => gui.set_show_fps_counter(value),\n\n SettingType::Boolean(BooleanSetting::VSync , value) => window.set_v_sync(value),\n\n SettingType::Integer(IntegerSetting::SoundEffectVolume, value) => audio_manager.set_sound_effect_volume(value),\n\n SettingType::Integer(IntegerSetting::MusicVolume, value) => audio_manager.set_music_volume(value),\n\n }\n\n }\n\n}\n\n\n\n/// Settings parser states.\n", "file_path": "src/settings.rs", "rank": 65, "score": 36125.31040000221 }, { "content": " /// Returns true if new value was set.\n\n ///\n\n /// Works only if `SettingsContainer`'s current setting is same `BooleanSetting` as argument `setting`.\n\n fn set_if_boolean_setting_matches(&mut self, setting: BooleanSetting, value: bool) -> bool {\n\n if let &mut SettingType::Boolean(container_setting, ref mut old_value) = &mut self.setting_type {\n\n if container_setting == setting {\n\n *old_value = value;\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n }\n\n\n\n /// Try setting a new integer value to the `SettingContainer`.\n\n ///\n\n /// Returns true if new value was set.\n\n ///\n\n /// Works only if `SettingsContainer`'s current setting is same `IntegerSetting` as argument `setting`.\n\n fn set_if_integer_setting_matches(&mut self, setting: IntegerSetting, value: i32) -> bool {\n", "file_path": "src/settings.rs", "rank": 66, "score": 36118.90241621343 }, { "content": " match setting.get_value() {\n\n SettingType::Boolean(event, _) => {\n\n if value == \"true\" {\n\n setting.set_if_boolean_setting_matches(event, true);\n\n } else if value == \"false\" {\n\n setting.set_if_boolean_setting_matches(event, false);\n\n } else {\n\n println!(\"error when parsing value \\\"{}\\\" for setting \\\"{}\\\": not a boolean value\", value, setting.get_name());\n\n }\n\n },\n\n SettingType::Integer(event, _) => {\n\n match value.parse::<i32>() {\n\n Ok(number) => {\n\n setting.set_if_integer_setting_matches(event, number);\n\n },\n\n Err(error) => println!(\"error when parsing value \\\"{}\\\" for setting \\\"{}\\\": {}\", value, setting.get_name(), error),\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/settings.rs", "rank": 67, "score": 36117.52179156437 }, { "content": "use renderer::Renderer;\n\n\n\nuse gui::GUI;\n\n\n\nuse audio::{AudioManager, AudioPlayer};\n\n\n\nuse window::Window;\n\n\n\nconst SETTINGS_FILE_NAME: &'static str = \"space_boss_battles_settings.txt\";\n\n\n\n/// Settings with integer value.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum IntegerSetting {\n\n SoundEffectVolume,\n\n MusicVolume,\n\n}\n\n\n\n/// Settings with boolean value.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum BooleanSetting {\n", "file_path": "src/settings.rs", "rank": 68, "score": 36114.40494052895 }, { "content": " arguments.print_joystick_events = true;\n\n } else if arg == \"--help\" || arg == \"-h\" {\n\n arguments.show_help = true;\n\n } else if arg == \"--music\" {\n\n argument_parser_state = Some(ArgumentParserState::MusicFilePath);\n\n } else {\n\n return Err(arg);\n\n }\n\n },\n\n }\n\n }\n\n\n\n // TODO: Return error if argument_parser_state is not None\n\n // at the end of argument parsing.\n\n\n\n Ok(arguments)\n\n }\n\n\n\n /// Is there argument `--help` or `-h` found.\n\n pub fn show_help(&self) -> bool {\n\n self.show_help\n\n }\n\n\n\n /// Possible user defined music file path.\n\n pub fn music_file_path(&self) -> &Option<String> {\n\n &self.music_file_path\n\n }\n\n}\n\n\n", "file_path": "src/settings.rs", "rank": 69, "score": 36110.67037397474 }, { "content": " /// Create new `Settings`.\n\n ///\n\n /// Read settings from file and load found game controller mappings to\n\n /// `GameControllerSubsystem`.\n\n pub fn new(command_line_arguments: Arguments, effect_default_volume_percentage: i32, music_default_volume_percentage: i32) -> Settings {\n\n let settings = vec![\n\n SettingContainer::new(\"Full screen\", SettingType::Boolean(BooleanSetting::FullScreen, false)),\n\n SettingContainer::new(\"FPS counter\", SettingType::Boolean(BooleanSetting::ShowFpsCounter, false)),\n\n SettingContainer::new(\"VSync\", SettingType::Boolean(BooleanSetting::VSync, true)),\n\n SettingContainer::new(\"Music volume\", SettingType::Integer(IntegerSetting::MusicVolume, music_default_volume_percentage)),\n\n SettingContainer::new(\"Effect volume\", SettingType::Integer(IntegerSetting::SoundEffectVolume, effect_default_volume_percentage)),\n\n ];\n\n\n\n let mut settings = Settings {\n\n settings: settings,\n\n controller_mappings: Vec::new(),\n\n command_line_arguments,\n\n };\n\n\n\n settings.load();\n", "file_path": "src/settings.rs", "rank": 70, "score": 36110.42867417091 }, { "content": " if setting.set_if_boolean_setting_matches(event, value) {\n\n return;\n\n }\n\n }\n\n },\n\n SettingType::Integer(event, value) => {\n\n for setting in &mut self.settings {\n\n if setting.set_if_integer_setting_matches(event, value) {\n\n return;\n\n }\n\n }\n\n },\n\n }\n\n\n\n println!(\"unimplemented setting found: {:?}\", new_value);\n\n }\n\n\n\n /// Save settings to a file specified by const `SETTINGS_FILE_NAME`.\n\n ///\n\n /// Saves current settings from `Vec<SettingsContainer>` field and game controller\n", "file_path": "src/settings.rs", "rank": 71, "score": 36109.31261050882 }, { "content": "\n\n settings\n\n }\n\n\n\n /// Get settings.\n\n pub fn get_settings(&self) -> &Vec<SettingContainer> {\n\n &self.settings\n\n }\n\n\n\n /// Updates new value to `SettingContainer` existing in field `Vec<SettingContainer>`.\n\n ///\n\n /// Update will only happen to first found `IntegerSetting` or `BooleanSetting` that\n\n /// matches with the argument `new_value`.\n\n pub fn update_setting(&mut self, new_value: SettingType) {\n\n // FIXME: Change Vec<SettingContainer> to better system, so there won't\n\n // be need to find correct setting with loop.\n\n\n\n match new_value {\n\n SettingType::Boolean(event, value) => {\n\n for setting in &mut self.settings {\n", "file_path": "src/settings.rs", "rank": 72, "score": 36108.37451579471 }, { "content": " if let &mut SettingType::Integer(container_setting, ref mut old_value) = &mut self.setting_type {\n\n if container_setting == setting {\n\n *old_value = value;\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n }\n\n\n\n /// Get setting's name text.\n\n pub fn get_name(&self) -> &str {\n\n self.name\n\n }\n\n\n\n /// Get setting data.\n\n pub fn get_value(&self) -> SettingType {\n\n self.setting_type\n\n }\n\n}\n", "file_path": "src/settings.rs", "rank": 73, "score": 36108.32402794537 }, { "content": " FullScreen,\n\n ShowFpsCounter,\n\n VSync,\n\n}\n\n\n\n/// Setting and it's value.\n\n#[derive(Copy, Clone, Debug)]\n\npub enum SettingType {\n\n Boolean(BooleanSetting, bool),\n\n Integer(IntegerSetting, i32),\n\n}\n\n\n\n/// Save and load settings. Handle command line argument settings.\n\npub struct Settings {\n\n settings: Vec<SettingContainer>,\n\n controller_mappings: Vec<String>,\n\n command_line_arguments: Arguments,\n\n}\n\n\n\nimpl Settings {\n", "file_path": "src/settings.rs", "rank": 74, "score": 36107.24799288257 }, { "content": "\n\n/// Parsed command line arguments.\n\n///\n\n/// # Supported arguments\n\n/// * `--fps`\n\n/// * `--joystick-events`\n\n/// * `--help` or `-h`\n\n/// * `--music path_to_music_file`\n\npub struct Arguments {\n\n show_help: bool,\n\n print_fps_count: bool,\n\n print_joystick_events: bool,\n\n music_file_path: Option<String>,\n\n}\n\n\n\nimpl Arguments {\n\n /// Parse command line arguments\n\n ///\n\n /// Returns with Err(unknown_argument) if there is\n\n /// unknown argument.\n", "file_path": "src/settings.rs", "rank": 75, "score": 36107.03285990601 }, { "content": " /// # In the generated documentation, the following game controller mapping\n\n /// # may be wrapped to multiple lines but its really a one line of text.\n\n /// 03000000100800000300000010010000,USB Gamepad , a:b2, b:b1, y:b0, x:b3, start:b9, guide:b12, back:b8, dpup:h0.1, dpleft:h0.8, dpdown:h0.4, dpright:h0.2, leftshoulder:b6, rightshoulder:b7, leftstick:b10, rightstick:b11, leftx:a0, lefty:a1, rightx:a3, righty:a2, lefttrigger:b4, righttrigger:b5\n\n ///\n\n /// ```\n\n pub fn load(&mut self) {\n\n let mut file = match File::open(SETTINGS_FILE_NAME) {\n\n Ok(file) => file,\n\n Err(error) => {\n\n println!(\"couldn't load settings: {}\", error);\n\n return;\n\n },\n\n };\n\n\n\n let mut settings_text = String::new();\n\n\n\n if let Err(error) = file.read_to_string(&mut settings_text) {\n\n println!(\"couldn't load settings: {}\", error);\n\n return;\n\n }\n", "file_path": "src/settings.rs", "rank": 76, "score": 36106.24650764078 }, { "content": " pub fn parse(args: Args) -> Result<Arguments, String> {\n\n let mut arguments = Arguments {\n\n show_help: false,\n\n print_fps_count: false,\n\n print_joystick_events: false,\n\n music_file_path: None,\n\n };\n\n\n\n let mut argument_parser_state = None;\n\n\n\n for arg in args.skip(1) {\n\n match argument_parser_state {\n\n Some(ArgumentParserState::MusicFilePath) => {\n\n arguments.music_file_path = Some(arg);\n\n argument_parser_state = None;\n\n },\n\n None => {\n\n if arg == \"--fps\" {\n\n arguments.print_fps_count = true;\n\n } else if arg == \"--joystick-events\" {\n", "file_path": "src/settings.rs", "rank": 77, "score": 36105.78120943017 }, { "content": " settings_text.push_str(setting.get_name());\n\n settings_text.push('=');\n\n settings_text.push_str(&value.to_string());\n\n settings_text.push('\\n');\n\n }\n\n }\n\n }\n\n\n\n settings_text.push_str(\"\\n[GameControllerMappings]\\n# https://wiki.libsdl.org/SDL_GameControllerAddMapping\\n\\n\");\n\n\n\n for mapping in &self.controller_mappings {\n\n settings_text.push_str(mapping);\n\n settings_text.push('\\n');\n\n }\n\n\n\n let mut file = match File::create(SETTINGS_FILE_NAME) {\n\n Ok(file) => file,\n\n Err(error) => {\n\n println!(\"couldn't save settings: {}\", error);\n\n return;\n", "file_path": "src/settings.rs", "rank": 78, "score": 36105.63721747332 }, { "content": " /// mappings from `Vec<String>`.\n\n ///\n\n /// For file format example, see load function's documentation.\n\n ///\n\n /// If saving the file fails, error message will be printed to\n\n /// standard output.\n\n pub fn save(&self) {\n\n let mut settings_text = String::new();\n\n\n\n settings_text.push_str(\"# Settings file for Space Boss Battles\\n\\n[Settings]\\n\");\n\n\n\n for setting in &self.settings {\n\n match setting.get_value() {\n\n SettingType::Boolean(_, value) => {\n\n settings_text.push_str(setting.get_name());\n\n settings_text.push('=');\n\n settings_text.push_str(&value.to_string());\n\n settings_text.push('\\n');\n\n },\n\n SettingType::Integer(_, value) => {\n", "file_path": "src/settings.rs", "rank": 79, "score": 36105.220036397965 }, { "content": " /// match that key-value pair to available settings in `Vec<SettingsContainer>` field.\n\n ///\n\n /// If parser finds `[GameControllerMappings]` section, it adds all following non empty lines to\n\n /// `Vec<String>` field named `controller_mappings`.\n\n ///\n\n /// ## Example file\n\n ///\n\n /// ```text\n\n /// # Settings file for Space Boss Battles\n\n ///\n\n /// [Settings]\n\n /// Full screen=false\n\n /// FPS counter=false\n\n /// VSync=true\n\n /// Music volume=128\n\n /// Effect volume=128\n\n ///\n\n /// [GameControllerMappings]\n\n /// # https://wiki.libsdl.org/SDL_GameControllerAddMapping\n\n ///\n", "file_path": "src/settings.rs", "rank": 80, "score": 36103.078784163474 }, { "content": " }\n\n };\n\n\n\n if let Err(error) = file.write_all(settings_text.as_bytes()) {\n\n println!(\"couldn't save settings: {}\", error);\n\n }\n\n }\n\n\n\n /// Load settings from a file specified by const `SETTINGS_FILE_NAME`.\n\n ///\n\n /// If opening or reading the settings file fails or there is parsing error, an error message\n\n /// will be printed out to standard output.\n\n ///\n\n /// # File format\n\n ///\n\n /// Note that parser will trim every line it reads from the file.\n\n ///\n\n /// Empty lines will be skipped and lines starting with `#` will be treated as comments.\n\n ///\n\n /// If parser finds `[Settings]` section, it tries to parse key-value pairs `setting name=value` and\n", "file_path": "src/settings.rs", "rank": 81, "score": 36102.136832948214 }, { "content": "\n\n let mut settings_parser = None;\n\n\n\n for line in settings_text.lines() {\n\n let line = line.trim();\n\n\n\n if line == \"\" || line.starts_with(\"#\") {\n\n continue;\n\n } else if line == \"[Settings]\" {\n\n settings_parser = Some(SettingsParserMode::Settings);\n\n continue;\n\n } else if line == \"[GameControllerMappings]\" {\n\n settings_parser = Some(SettingsParserMode::GameControllerMappings);\n\n continue;\n\n }\n\n\n\n match settings_parser {\n\n Some(SettingsParserMode::Settings) => {\n\n let mut iterator = line.split(\"=\");\n\n let name = match iterator.next() {\n", "file_path": "src/settings.rs", "rank": 82, "score": 36100.367946533785 }, { "content": "\n\n },\n\n Some(SettingsParserMode::GameControllerMappings) => {\n\n self.controller_mappings.push(line.to_string());\n\n },\n\n None => (),\n\n }\n\n }\n\n }\n\n\n\n pub fn game_controller_mappings(&self) -> &Vec<String> {\n\n &self.controller_mappings\n\n }\n\n\n\n /// Adds game controller mapping to `Vec<String>` located at `controller_mappings` field.\n\n pub fn add_game_controller_mapping(&mut self, mapping: String) {\n\n self.controller_mappings.push(mapping);\n\n }\n\n\n\n /// Is joystick event printing enabled.\n", "file_path": "src/settings.rs", "rank": 83, "score": 36099.60157704286 }, { "content": " Some(name) => name,\n\n None => {\n\n println!(\"couldn't load settings, invalid setting: {}\", line);\n\n continue;\n\n }\n\n };\n\n\n\n let value = match iterator.next() {\n\n Some(name) => name,\n\n None => {\n\n println!(\"couldn't load settings, invalid setting: {}\", line);\n\n continue;\n\n }\n\n };\n\n\n\n for setting in &mut self.settings {\n\n if setting.get_name() != name {\n\n continue;\n\n }\n\n\n", "file_path": "src/settings.rs", "rank": 84, "score": 36099.258830670995 }, { "content": "/*\n\nsrc/settings.rs, 2017-08-24\n\n\n\nCopyright (c) 2017 Juuso Tuononen\n\n\n\nThis file is licensed under\n\n\n\nApache License, Version 2.0\n\n\n\nor\n\n\n\nMIT License\n\n*/\n\n\n\n//! Settings loading and saving, command line arguments.\n\n\n\nuse std::env::Args;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\n\n", "file_path": "src/settings.rs", "rank": 85, "score": 36097.057401996135 }, { "content": "\n\n /// Update renderer to match new screen size.\n\n fn update_screen_size(&mut self, new_width_in_pixels: i32, new_height_in_pixels: i32);\n\n\n\n /// Get current screen width in pixels\n\n fn screen_width_pixels(&self) -> i32;\n\n}\n\n\n\nimpl Renderer for OpenGLRenderer {\n\n\n\n /// Clears OpenGL color buffer.\n\n fn start(&mut self) {\n\n unsafe {\n\n gl_raw::Clear(gl_raw::COLOR_BUFFER_BIT);\n\n }\n\n }\n\n\n\n fn render(&mut self, logic: &Logic, only_background: bool) {\n\n self.texture_shader.use_program();\n\n\n", "file_path": "src/renderer/mod.rs", "rank": 86, "score": 34150.62727107683 }, { "content": " for text in gui.get_gui_fps_counter().texts().into_iter() {\n\n self.render_text(text);\n\n }\n\n }\n\n }\n\n\n\n /// Swap color buffers and check OpenGL errors.\n\n fn end<W: Window>(&mut self, window: &mut W) {\n\n window.swap_buffers().expect(\"couldn't swap rendering buffers\");\n\n\n\n while let Err(error) = gl::GLError::get_error() {\n\n println!(\"OpenGL error: {:?}\", error);\n\n }\n\n }\n\n\n\n /// Converts x and y to OpenGL normalized device coordinates [-1.0,1.0] and\n\n /// multiplies converted coordinates with `inverse_projection_matrix`.\n\n fn screen_coordinates_to_world_coordinates(&self, x: i32, y: i32) -> Point2<f32> {\n\n let width = self.screen_width/2;\n\n let height = self.screen_height/2;\n", "file_path": "src/renderer/mod.rs", "rank": 87, "score": 34144.99183799414 }, { "content": "\n\nuse cgmath::{Vector3, Matrix4, Point2, Vector4};\n\nuse cgmath;\n\nuse cgmath::prelude::*;\n\n\n\nuse gl::buffer::*;\n\nuse gl::texture::*;\n\nuse gl::gl_raw;\n\nuse gl;\n\n\n\nuse renderer::texture::Textures;\n\nuse renderer::shader::*;\n\n\n\nuse logic::{Logic, LaserColor};\n\n\n\nuse gui::GUI;\n\nuse gui::components::GUIText;\n\n\n\npub const DEFAULT_SCREEN_WIDTH: i32 = 640;\n\npub const DEFAULT_SCREEN_HEIGHT: i32 = 480;\n", "file_path": "src/renderer/mod.rs", "rank": 88, "score": 34144.56656033966 }, { "content": "\n\n fn half_screen_width_world_coordinates(&self) -> f32 {\n\n self.half_screen_width_world_coordinates\n\n }\n\n\n\n fn screen_width_pixels(&self) -> i32 {\n\n self.screen_width\n\n }\n\n}\n\n\n\nimpl OpenGLRenderer {\n\n /// Creates new OpenGLRenderer.\n\n pub fn new<W: Window>(window: &W) -> OpenGLRenderer {\n\n gl_raw::load_with(|name| window.gl_get_proc_address(name));\n\n\n\n unsafe {\n\n gl_raw::ClearColor(0.0,0.0,0.0,1.0);\n\n }\n\n\n\n println!(\"OpenGL context information:\");\n", "file_path": "src/renderer/mod.rs", "rank": 89, "score": 34141.43427300366 }, { "content": "/*\n\nsrc/renderer/mod.rs, 2017-09-01\n\n\n\nCopyright (c) 2017 Juuso Tuononen\n\n\n\nThis file is licensed under\n\n\n\nApache License, Version 2.0\n\n\n\nor\n\n\n\nMIT License\n\n*/\n\n\n\n//! Render GUI and Logic.\n\n\n\nmod texture;\n\nmod shader;\n\n\n\nuse window::{Window, RenderingContext};\n", "file_path": "src/renderer/mod.rs", "rank": 90, "score": 34140.53149780922 }, { "content": " /// Tell OpenGL to use this shader program.\n\n pub fn use_program(&mut self) {\n\n self.program.use_program();\n\n }\n\n}\n\n\n\n/// Render with specific color. Supports OpenGL 3.3 and OpenGL ES 2.0.\n\npub struct ColorShader {\n\n program: Program,\n\n projection: UniformMatrix4,\n\n model: UniformMatrix4,\n\n color: UniformVector3,\n\n}\n\n\n\nimpl ColorShader {\n\n /// Creates new ColorShader\n\n ///\n\n /// # Panics\n\n /// If there is some error in creating the shader or uniforms.\n\n pub fn new() -> ColorShader {\n", "file_path": "src/renderer/shader.rs", "rank": 91, "score": 34139.932434970135 }, { "content": " pub fn use_program(&mut self) {\n\n self.program.use_program();\n\n }\n\n}\n\n\n\n/// Render tile map tiles. Supports OpenGL 3.3 and OpenGL ES 2.0.\n\npub struct TileMapShader {\n\n program: Program,\n\n projection: UniformMatrix4,\n\n model: UniformMatrix4,\n\n tile_position_change_x_y_and_scaling_factor: UniformVector3,\n\n}\n\n\n\nimpl TileMapShader {\n\n /// Creates new TileMapShader\n\n ///\n\n /// # Panics\n\n /// If there is some error in creating the shader or uniforms.\n\n pub fn new() -> TileMapShader {\n\n\n", "file_path": "src/renderer/shader.rs", "rank": 92, "score": 34138.99173308625 }, { "content": "use std::ffi::CString;\n\n\n\nuse gl::shader::*;\n\nuse gl::uniform::*;\n\n\n\nuse cgmath::{Matrix4, Vector3};\n\n\n\n/// Render with texture. Supports OpenGL 3.3 and OpenGL ES 2.0.\n\npub struct TextureShader {\n\n program: Program,\n\n projection: UniformMatrix4,\n\n model: UniformMatrix4,\n\n}\n\n\n\nimpl TextureShader {\n\n /// Creates new TextureShader\n\n ///\n\n /// # Panics\n\n /// If there is some error in creating the shader or uniforms.\n\n pub fn new() -> TextureShader {\n", "file_path": "src/renderer/shader.rs", "rank": 93, "score": 34137.764946908115 }, { "content": " self.render_color_rectangle_with_color(laser, &BLUE_COLOR);\n\n }\n\n }\n\n\n\n if logic.get_explosion().visible() {\n\n for particle in logic.get_explosion().particles() {\n\n self.render_color_rectangle_with_color(particle, &PARTICLE_COLOR);\n\n }\n\n }\n\n }\n\n\n\n fn render_gui(&mut self, gui: &GUI) {\n\n let components = gui.components();\n\n\n\n self.color_shader.use_program();\n\n\n\n for button in components.buttons() {\n\n self.render_color_rectangle(button);\n\n }\n\n\n", "file_path": "src/renderer/mod.rs", "rank": 94, "score": 34136.890148428836 }, { "content": " let x: f32 = (x - width) as f32 / width as f32;\n\n let y: f32 = (y - height) as f32 / -height as f32;\n\n\n\n let vector = self.inverse_projection_matrix * Vector4::new(x, y, 0.0, 1.0);\n\n\n\n Point2::new(vector.x,vector.y)\n\n }\n\n\n\n /// Updates fields `screen_width` and `screen_height`,\n\n /// OpenGL viewport, and projection matrix to match current screen size.\n\n fn update_screen_size(&mut self, new_width_in_pixels: i32, new_height_in_pixels: i32) {\n\n unsafe {\n\n gl_raw::Viewport(0,0,new_width_in_pixels, new_height_in_pixels);\n\n }\n\n\n\n self.screen_width = new_width_in_pixels;\n\n self.screen_height = new_height_in_pixels;\n\n\n\n self.update_projection_matrix();\n\n }\n", "file_path": "src/renderer/mod.rs", "rank": 95, "score": 34135.4495335935 }, { "content": "\n\n /// Tell OpenGL to use this shader program.\n\n pub fn use_program(&mut self) {\n\n self.program.use_program();\n\n }\n\n}\n\n\n\n/// Build shader program from source code string slices.\n\n///\n\n/// # Panics\n\n/// * There is error compiling or linking the shaders.\n\n/// * Shader code contains 0 byte.\n\n///\n\n/// # Vertex attribute variable indexes\n\n/// * variable \"vertex\", index 0\n\n/// * variable \"texture_coordinates_attribute\", index 1\n\n///\n", "file_path": "src/renderer/shader.rs", "rank": 96, "score": 34131.19044352184 }, { "content": "\n\n renderer\n\n }\n\n\n\n /// Updates `OpenGLRenderer` fields `half_screen_width_world_coordinates`,\n\n /// `projection_matrix` and `inverse_projection_matrix` from fields `screen_width` and `screen_height`\n\n ///\n\n /// # Errors\n\n /// If inverse matrix calculation fails `inverse_projection_matrix` field will be set to identity matrix.\n\n fn update_projection_matrix(&mut self) {\n\n self.half_screen_width_world_coordinates = (self.screen_width as f32 /self.screen_height as f32) * SCREEN_TOP_Y_VALUE_IN_WORLD_COORDINATES;\n\n self.projection_matrix = cgmath::ortho::<f32>(-self.half_screen_width_world_coordinates, self.half_screen_width_world_coordinates, -SCREEN_TOP_Y_VALUE_IN_WORLD_COORDINATES, SCREEN_TOP_Y_VALUE_IN_WORLD_COORDINATES, 1.0, -1.0);\n\n\n\n match self.projection_matrix.inverse_transform() {\n\n Some(matrix) => self.inverse_projection_matrix = matrix,\n\n None => {\n\n println!(\"Calculating inverse projection matrix failed\");\n\n self.inverse_projection_matrix = Matrix4::identity();\n\n }\n\n };\n", "file_path": "src/renderer/mod.rs", "rank": 97, "score": 34131.15911552715 }, { "content": "use image::png::PNGDecoder;\n\nuse image::{ImageDecoder, DecodingResult, ColorType};\n\n\n\n/// Available textures.\n\npub enum Textures {\n\n Player,\n\n Enemy,\n\n EnemyWithShield,\n\n Background,\n\n Font,\n\n Shield,\n\n LaserCannonGreen,\n\n LaserCannonRed,\n\n LaserBomb,\n\n TextureCount,\n\n}\n\n\n\nimpl Textures {\n\n /// Loads textures to an array.\n\n ///\n", "file_path": "src/renderer/texture.rs", "rank": 98, "score": 34131.0889736528 }, { "content": " /// * Can't read image dimensions, color type or data.\n\n /// * If image data is not unsigned bytes.\n\n /// * Image color type is not RGBA or RGB.\n\n fn load(file_path: &str) -> Texture {\n\n let img_file = File::open(file_path).expect(\"img opening fail\");\n\n let mut img = PNGDecoder::new(img_file);\n\n\n\n let (width, height) = img.dimensions().expect(\"img dimensions fail\");\n\n\n\n let rgba;\n\n match img.colortype().expect(\"img color type fail\") {\n\n ColorType::RGBA(_) => rgba = true,\n\n ColorType::RGB(_) => rgba = false,\n\n _ => panic!(\"image's color type is not RGB or RGBA\"),\n\n }\n\n\n\n let img_data_result = img.read_image().expect(\"img decoding fail\");\n\n\n\n let img_data = match img_data_result {\n\n DecodingResult::U8(data) => data,\n\n _ => panic!(\"unknown image data\"),\n\n };\n\n\n\n Texture::new(width, height, img_data, rgba)\n\n }\n\n}", "file_path": "src/renderer/texture.rs", "rank": 99, "score": 34130.76110940569 } ]
Rust
crates/fluvio-test/src/tests/longevity/mod.rs
bohlmannc/fluvio
b5a3105600b6886c55d76707d369fa59f5d9673b
pub mod producer; pub mod consumer; use core::panic; use std::any::Any; use std::num::ParseIntError; use std::time::Duration; use structopt::StructOpt; use fluvio_test_derive::fluvio_test; use fluvio_test_util::test_meta::environment::EnvironmentSetup; use fluvio_test_util::test_meta::{TestOption, TestCase}; use fluvio_test_util::async_process; #[derive(Debug, Clone)] pub struct LongevityTestCase { pub environment: EnvironmentSetup, pub option: LongevityTestOption, } impl From<TestCase> for LongevityTestCase { fn from(test_case: TestCase) -> Self { let longevity_option = test_case .option .as_any() .downcast_ref::<LongevityTestOption>() .expect("LongevityTestOption") .to_owned(); LongevityTestCase { environment: test_case.environment, option: longevity_option, } } } #[derive(Debug, Clone, StructOpt, Default, PartialEq)] #[structopt(name = "Fluvio Longevity Test")] pub struct LongevityTestOption { #[structopt(long, parse(try_from_str = parse_seconds), default_value = "3600")] runtime_seconds: Duration, #[structopt(long, default_value = "1000")] record_size: usize, #[structopt(long, default_value = "1")] pub producers: u32, #[structopt(long, default_value = "1")] pub consumers: u32, #[structopt(long, short)] verbose: bool, } fn parse_seconds(s: &str) -> Result<Duration, ParseIntError> { let seconds = s.parse::<u64>()?; Ok(Duration::from_secs(seconds)) } impl TestOption for LongevityTestOption { fn as_any(&self) -> &dyn Any { self } } #[fluvio_test(topic = "longevity")] pub fn longevity(mut test_driver: FluvioTestDriver, mut test_case: TestCase) { let option: LongevityTestCase = test_case.into(); println!("Starting Longevity Test"); println!("Expected runtime: {:?}", option.option.runtime_seconds); println!("# Consumers: {}", option.option.consumers); println!("# Producers: {}", option.option.producers); if !option.option.verbose { println!("Run with `--verbose` flag for more test output"); } let mut consumer_wait = Vec::new(); for i in 0..option.option.consumers { println!("Starting Consumer #{}", i); let consumer = async_process!(async { test_driver .connect() .await .expect("Connecting to cluster failed"); consumer::consumer_stream(test_driver.clone(), option.clone(), i).await }); consumer_wait.push(consumer); } let mut producer_wait = Vec::new(); for i in 0..option.option.producers { println!("Starting Producer #{}", i); let producer = async_process!(async { test_driver .connect() .await .expect("Connecting to cluster failed"); producer::producer(test_driver, option, i).await }); producer_wait.push(producer); } let _: Vec<_> = consumer_wait .into_iter() .map(|c| c.join().expect("Consumer thread fail")) .collect(); let _: Vec<_> = producer_wait .into_iter() .map(|p| p.join().expect("Producer thread fail")) .collect(); }
pub mod producer; pub mod consumer; use core::panic; use std::any::Any; use std::num::ParseIntError; use std::time::Duration; use structopt::StructOpt; use fluvio_test_derive::fluvio_test; use fluvio_test_util::test_meta::environment::EnvironmentSetup; use fluvio_test_util::test_meta::{TestOption, TestCase}; use fluvio_test_util::async_process; #[derive(Debug, Clone)] pub struct LongevityTestCase { pub environment: EnvironmentSetup, pub option: LongevityTestOption, } impl From<TestCase> for LongevityTestCase { fn from(test_case: TestCase) -> Self { let longevity_option = test_cas
} #[derive(Debug, Clone, StructOpt, Default, PartialEq)] #[structopt(name = "Fluvio Longevity Test")] pub struct LongevityTestOption { #[structopt(long, parse(try_from_str = parse_seconds), default_value = "3600")] runtime_seconds: Duration, #[structopt(long, default_value = "1000")] record_size: usize, #[structopt(long, default_value = "1")] pub producers: u32, #[structopt(long, default_value = "1")] pub consumers: u32, #[structopt(long, short)] verbose: bool, } fn parse_seconds(s: &str) -> Result<Duration, ParseIntError> { let seconds = s.parse::<u64>()?; Ok(Duration::from_secs(seconds)) } impl TestOption for LongevityTestOption { fn as_any(&self) -> &dyn Any { self } } #[fluvio_test(topic = "longevity")] pub fn longevity(mut test_driver: FluvioTestDriver, mut test_case: TestCase) { let option: LongevityTestCase = test_case.into(); println!("Starting Longevity Test"); println!("Expected runtime: {:?}", option.option.runtime_seconds); println!("# Consumers: {}", option.option.consumers); println!("# Producers: {}", option.option.producers); if !option.option.verbose { println!("Run with `--verbose` flag for more test output"); } let mut consumer_wait = Vec::new(); for i in 0..option.option.consumers { println!("Starting Consumer #{}", i); let consumer = async_process!(async { test_driver .connect() .await .expect("Connecting to cluster failed"); consumer::consumer_stream(test_driver.clone(), option.clone(), i).await }); consumer_wait.push(consumer); } let mut producer_wait = Vec::new(); for i in 0..option.option.producers { println!("Starting Producer #{}", i); let producer = async_process!(async { test_driver .connect() .await .expect("Connecting to cluster failed"); producer::producer(test_driver, option, i).await }); producer_wait.push(producer); } let _: Vec<_> = consumer_wait .into_iter() .map(|c| c.join().expect("Consumer thread fail")) .collect(); let _: Vec<_> = producer_wait .into_iter() .map(|p| p.join().expect("Producer thread fail")) .collect(); }
e .option .as_any() .downcast_ref::<LongevityTestOption>() .expect("LongevityTestOption") .to_owned(); LongevityTestCase { environment: test_case.environment, option: longevity_option, } }
function_block-function_prefixed
[ { "content": "fn cluster_cleanup(option: EnvironmentSetup) {\n\n if option.cluster_delete() {\n\n let mut setup = TestCluster::new(option);\n\n\n\n let cluster_cleanup_wait = async_process!(async {\n\n setup.remove_cluster().await;\n\n });\n\n let _ = cluster_cleanup_wait\n\n .join()\n\n .expect(\"Cluster cleanup wait failed\");\n\n }\n\n}\n\n\n", "file_path": "crates/fluvio-test/src/main.rs", "rank": 0, "score": 229012.67951653217 }, { "content": "pub fn format_json(value: &[u8], suppress: bool) -> Option<String> {\n\n let maybe_json = match serde_json::from_slice(value) {\n\n Ok(value) => Some(value),\n\n Err(e) if !suppress => Some(serde_json::json!({\n\n \"error\": format!(\"{}\", e),\n\n })),\n\n _ => None,\n\n };\n\n\n\n maybe_json.and_then(|json| serde_json::to_string(&json).ok())\n\n}\n\n\n\n// -----------------------------------\n\n// Text\n\n// -----------------------------------\n\n\n", "file_path": "crates/fluvio-cli/src/consume/record_format.rs", "rank": 1, "score": 223131.62942763866 }, { "content": "// FIXME: Need to confirm SPU options count match cluster. Offer self-correcting behavior\n\nfn cluster_setup(option: &EnvironmentSetup) -> Result<(), ()> {\n\n let cluster_setup_wait = async_process!(async {\n\n if option.remove_cluster_before() {\n\n println!(\"Deleting existing cluster before starting test\");\n\n let mut setup = TestCluster::new(option.clone());\n\n setup.remove_cluster().await;\n\n }\n\n\n\n if option.cluster_start() || option.remove_cluster_before() {\n\n println!(\"Starting cluster and testing connection\");\n\n let mut test_cluster = TestCluster::new(option.clone());\n\n\n\n test_cluster\n\n .start()\n\n .await\n\n .expect(\"Unable to connect to fresh test cluster\");\n\n } else {\n\n println!(\"Testing connection to Fluvio cluster in profile\");\n\n Fluvio::connect()\n\n .await\n", "file_path": "crates/fluvio-test/src/main.rs", "rank": 2, "score": 219600.23904288097 }, { "content": "/// Structure json data into table row\n\n/// Print table header if `print_header` is true\n\n/// Rows may not stay aligned with table header\n\npub fn format_basic_table_record(record: &[u8], print_header: bool) -> Option<String> {\n\n use prettytable::{Row, cell, Cell, Slice};\n\n use prettytable::format::{self, FormatBuilder};\n\n\n\n let maybe_json: serde_json::Value = match serde_json::from_slice(record) {\n\n Ok(value) => value,\n\n Err(e) => {\n\n println!(\"error parsing record as json: {}\", e);\n\n return None;\n\n }\n\n };\n\n\n\n let obj = if let Some(obj) = maybe_json.as_object() {\n\n obj\n\n } else {\n\n println!(\"error: Unable to parse json as object map\");\n\n return None;\n\n };\n\n\n\n // This is the case where we don't provide any table info. We want to print a table w/ all top-level keys as headers\n", "file_path": "crates/fluvio-cli/src/consume/record_format.rs", "rank": 3, "score": 213623.45793498296 }, { "content": "/// Updates the TableModel used to render the TUI table during `TableModel::render()`\n\n/// Attempts to update relevant rows, but appends to table if the primary key doesn't exist\n\n/// Returned String is not intended to be used\n\npub fn format_fancy_table_record(record: &[u8], table_model: &mut TableModel) -> Option<String> {\n\n let maybe_json: serde_json::Value = match serde_json::from_slice(record) {\n\n Ok(value) => value,\n\n Err(e) => {\n\n println!(\"error parsing record as json: {}\", e);\n\n return None;\n\n }\n\n };\n\n\n\n // Handle updates as objects or list of objects\n\n match maybe_json {\n\n serde_json::Value::Object(json_obj) => {\n\n update_table_row(table_model, json_obj).ok()?;\n\n }\n\n serde_json::Value::Array(vec_obj) => {\n\n let json_array = flatten_json_array_updates(vec_obj).ok()?;\n\n for json_obj in json_array {\n\n update_table_row(table_model, json_obj).ok()?;\n\n }\n\n }\n", "file_path": "crates/fluvio-cli/src/consume/record_format.rs", "rank": 4, "score": 204574.31294958584 }, { "content": "pub fn validate_versions(min: i16, max: Option<i16>, field: Option<&str>) -> Option<String> {\n\n match (max, field) {\n\n // Print name in named fields\n\n (Some(max), Some(field)) if min > max => Some(format!(\n\n \"On {}, max version({}) is less than min({}).\",\n\n field, max, min\n\n )),\n\n // No name to print in unnamed fields\n\n (Some(max), None) if min > max => {\n\n Some(format!(\"Max version({}) is less than min({}).\", max, min))\n\n }\n\n (None, Some(field)) if min < 0 => Some(format!(\n\n \"On {} min version({}) must be positive.\",\n\n field, min\n\n )),\n\n (None, None) if min < 0 => Some(format!(\"Min version({}) must be positive.\", min)),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "crates/fluvio-protocol-derive/src/ast/prop.rs", "rank": 5, "score": 201562.02683124662 }, { "content": "#[smartmodule(filter_map)]\n\npub fn filter_map(record: &Record) -> Result<Option<(Option<RecordData>, RecordData)>> {\n\n let key = record.key.clone();\n\n let string = String::from_utf8_lossy(record.value.as_ref()).to_string();\n\n let int: i32 = string.parse()?;\n\n\n\n if int % 2 == 0 {\n\n let output = int / 2;\n\n Ok(Some((key.clone(), RecordData::from(output.to_string()))))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/filter_map/src/lib.rs", "rank": 6, "score": 196402.5486305718 }, { "content": "pub trait TestOption: Debug + DynClone {\n\n fn as_any(&self) -> &dyn Any;\n\n}\n\n\n\ndyn_clone::clone_trait_object!(TestOption);\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TestCase {\n\n pub environment: EnvironmentSetup,\n\n pub option: Box<dyn TestOption>,\n\n}\n\n\n\nimpl TestCase {\n\n pub fn new(environment: EnvironmentSetup, option: Box<dyn TestOption>) -> Self {\n\n Self {\n\n environment,\n\n option,\n\n }\n\n }\n\n}\n", "file_path": "crates/fluvio-test-util/test_meta/mod.rs", "rank": 7, "score": 194086.47124663932 }, { "content": "pub trait EnvDetail: Debug + Clone {\n\n fn set_topic_name(&mut self, topic: String);\n\n fn topic_name(&self) -> String;\n\n fn is_topic_set(&self) -> bool;\n\n fn replication(&self) -> u16;\n\n fn client_log(&self) -> Option<String>;\n\n fn spu(&self) -> u16;\n\n fn remove_cluster_before(&self) -> bool;\n\n fn cluster_start(&self) -> bool;\n\n fn cluster_delete(&self) -> bool;\n\n fn develop_mode(&self) -> bool;\n\n fn skip_checks(&self) -> bool;\n\n fn tls_user(&self) -> String;\n\n fn authorization_config_map(&self) -> Option<String>;\n\n fn server_log(&self) -> Option<String>;\n\n fn log_dir(&self) -> Option<String>;\n\n fn timeout(&self) -> Duration;\n\n fn set_timeout(&mut self, timeout: Duration);\n\n fn cluster_type(&self) -> EnvironmentType;\n\n}\n", "file_path": "crates/fluvio-test-util/test_meta/environment.rs", "rank": 8, "score": 188014.07092195615 }, { "content": "pub fn default_option(index_max_interval_bytes: Size) -> ConfigOption {\n\n ConfigOption {\n\n segment_max_bytes: 100,\n\n index_max_interval_bytes,\n\n base_dir: temp_dir(),\n\n index_max_bytes: 1000,\n\n ..Default::default()\n\n }\n\n}\n\n\n\n#[derive(Builder)]\n\npub struct BatchProducer {\n\n #[builder(setter(into), default = \"0\")]\n\n base_offset: i64,\n\n #[builder(setter(into), default = \"0\")]\n\n producer_id: i64,\n\n #[builder(setter(into), default = \"2\")]\n\n pub records: u16,\n\n /// how many bytes in a record\n\n #[builder(setter, default = \"2\")]\n", "file_path": "crates/fluvio-storage/src/fixture.rs", "rank": 9, "score": 183932.29581500974 }, { "content": "#[smartmodule(map)]\n\npub fn my_map(_record: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/fluvio-smartmodule/ui-tests/pass_map.rs", "rank": 10, "score": 181974.33890212505 }, { "content": "#[smartmodule(map)]\n\npub fn map(record: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n let key = record.key.clone();\n\n let mut value = Vec::from(record.value.as_ref());\n\n\n\n value.make_ascii_uppercase();\n\n Ok((key, value.into()))\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/map/src/lib.rs", "rank": 11, "score": 181974.33890212505 }, { "content": "#[smartmodule(map)]\n\npub fn map(record: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n let key = record.key.clone();\n\n\n\n let string = std::str::from_utf8(record.value.as_ref())?;\n\n let output = SSN_RE.replace_all(string, \"***-**-****\").to_string();\n\n\n\n Ok((key, output.into()))\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/map_regex/src/lib.rs", "rank": 12, "score": 179600.09648697736 }, { "content": "#[smartmodule(map)]\n\npub fn map(record: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n let json = serde_json::from_slice::<serde_json::Value>(record.value.as_ref())?;\n\n let yaml_bytes = serde_yaml::to_vec(&json)?;\n\n\n\n Ok((record.key().cloned(), yaml_bytes.into()))\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/map_json/src/lib.rs", "rank": 13, "score": 179600.09648697736 }, { "content": "#[smartmodule(map)]\n\npub fn map(record: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n let key = record.key.clone();\n\n\n\n let string = std::str::from_utf8(record.value.as_ref())?;\n\n let int = string.parse::<i32>()?;\n\n let value = (int * 2).to_string();\n\n\n\n Ok((key, value.into()))\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/map_double/src/lib.rs", "rank": 14, "score": 179600.09648697736 }, { "content": " #[async_trait]\n\n pub trait EnvironmentDriver {\n\n /// remove cluster\n\n async fn remove_cluster(&self);\n\n\n\n /// install cluster\n\n async fn start_cluster(&self) -> StartStatus;\n\n\n\n fn create_cluster_manager(&self) -> Box<dyn SpuClusterManager>;\n\n }\n\n\n\n #[derive(Clone)]\n\n pub enum TestEnvironmentDriver {\n\n K8(Box<K8EnvironmentDriver>),\n\n Local(Box<LocalEnvDriver>),\n\n }\n\n\n\n impl TestEnvironmentDriver {\n\n /// remove cluster\n\n pub async fn remove_cluster(&self) {\n\n match self {\n", "file_path": "crates/fluvio-test-util/setup/environment/mod.rs", "rank": 15, "score": 179569.5442160347 }, { "content": "/// create batches with produce and records count\n\npub fn create_batch_with_producer(producer: i64, records: u16) -> Batch {\n\n let mut batches = Batch::default();\n\n let header = batches.get_mut_header();\n\n header.magic = 2;\n\n header.producer_id = producer;\n\n header.producer_epoch = -1;\n\n\n\n for _ in 0..records {\n\n let mut record = Record::default();\n\n let bytes: Vec<u8> = TEST_RECORD.to_owned();\n\n record.value = bytes.into();\n\n batches.add_record(record);\n\n }\n\n\n\n batches\n\n}\n\n\n", "file_path": "crates/fluvio-dataplane-protocol/src/fixture.rs", "rank": 16, "score": 178289.6845343835 }, { "content": "pub fn main_k8_loop(opt: ScOpt) {\n\n use std::time::Duration;\n\n\n\n use fluvio_future::task::run_block_on;\n\n use fluvio_future::timer::sleep;\n\n\n\n use crate::init::start_main_loop;\n\n use controllers::run_k8_operators;\n\n\n\n // parse configuration (program exits on error)\n\n let is_local = opt.is_local();\n\n let ((sc_config, auth_policy), k8_config, tls_option) = opt.parse_cli_or_exit();\n\n\n\n println!(\"Starting SC, platform: {}\", &*crate::VERSION);\n\n\n\n inspect_system();\n\n\n\n run_block_on(async move {\n\n // init k8 service\n\n let k8_client = new_shared(k8_config).expect(\"problem creating k8 client\");\n", "file_path": "crates/fluvio-sc/src/k8/mod.rs", "rank": 17, "score": 178062.6702108826 }, { "content": "#[cfg(not(target_os = \"macos\"))]\n\npub fn get_log_directory() -> &'static str {\n\n \"/tmp\"\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct DefaultLogDirectory(String);\n\n\n\nimpl Default for DefaultLogDirectory {\n\n fn default() -> Self {\n\n Self(get_log_directory().to_string())\n\n }\n\n}\n\n\n\nimpl fmt::Display for DefaultLogDirectory {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl FromStr for DefaultLogDirectory {\n", "file_path": "crates/fluvio-cluster/src/cli/start/mod.rs", "rank": 18, "score": 178062.6702108826 }, { "content": "fn assemble_requests(\n\n topic: &str,\n\n partitions_by_spu: HashMap<SpuId, HashMap<PartitionId, MemoryRecords>>,\n\n) -> Vec<(SpuId, DefaultProduceRequest)> {\n\n let mut requests: Vec<(SpuId, DefaultProduceRequest)> =\n\n Vec::with_capacity(partitions_by_spu.len());\n\n\n\n for (leader, partitions) in partitions_by_spu {\n\n let mut request = DefaultProduceRequest::default();\n\n\n\n let mut topic_request = DefaultTopicRequest {\n\n name: topic.to_string(),\n\n ..Default::default()\n\n };\n\n\n\n for (partition, records) in partitions {\n\n let mut partition_request = DefaultPartitionRequest {\n\n partition_index: partition,\n\n ..Default::default()\n\n };\n", "file_path": "crates/fluvio/src/producer/mod.rs", "rank": 19, "score": 175758.9390061865 }, { "content": "/// Given an API key, it returns max_version. None if not found\n\npub fn lookup_version(api_key: AdminPublicApiKey, versions: &[ApiVersionKey]) -> Option<i16> {\n\n for version in versions {\n\n if version.api_key == api_key as i16 {\n\n return Some(version.max_version);\n\n }\n\n }\n\n None\n\n}\n", "file_path": "crates/fluvio-sc-schema/src/versions.rs", "rank": 20, "score": 173038.49421271327 }, { "content": "pub fn start_internal_server(ctx: SharedContext) {\n\n info!(\"starting internal services\");\n\n\n\n let addr = ctx.config().private_endpoint.clone();\n\n let server = FluvioApiServer::new(addr, ctx, ScInternalService::new());\n\n server.run();\n\n}\n", "file_path": "crates/fluvio-sc/src/services/private_api/mod.rs", "rank": 21, "score": 172552.45701911225 }, { "content": "/// Print records based on their type\n\npub fn format_dynamic_record(record: &[u8]) -> String {\n\n if is_binary(record) {\n\n format_binary_record(record)\n\n } else {\n\n format!(\"{}\", String::from_utf8_lossy(record))\n\n }\n\n}\n\n\n\n// -----------------------------------\n\n// Raw\n\n// -----------------------------------\n\n\n", "file_path": "crates/fluvio-cli/src/consume/record_format.rs", "rank": 22, "score": 172454.16796266162 }, { "content": "/// parse message and generate partition records\n\npub fn format_binary_record(record: &[u8]) -> String {\n\n let mut out = String::new();\n\n out.push_str(&bytes_to_hex_dump(record));\n\n out.push_str(&hex_dump_separator());\n\n out\n\n}\n\n\n\n// -----------------------------------\n\n// Dynamic\n\n// -----------------------------------\n\n\n", "file_path": "crates/fluvio-cli/src/consume/record_format.rs", "rank": 23, "score": 172454.16796266162 }, { "content": "/// Print records in raw format\n\npub fn format_raw_record(record: &[u8]) -> String {\n\n String::from_utf8_lossy(record).to_string()\n\n}\n\n\n\n// -----------------------------------\n\n// Table (basic table)\n\n// -----------------------------------\n\n\n", "file_path": "crates/fluvio-cli/src/consume/record_format.rs", "rank": 24, "score": 172454.16796266162 }, { "content": "fn create_smartmodule(\n\n name_or_path: &str,\n\n kind: SmartModuleKind,\n\n params: BTreeMap<String, String>,\n\n) -> Result<SmartModuleInvocation> {\n\n let wasm = if PathBuf::from(name_or_path).exists() {\n\n let raw_buffer = std::fs::read(name_or_path)?;\n\n debug!(len = raw_buffer.len(), \"read wasm bytes\");\n\n let mut encoder = GzEncoder::new(raw_buffer.as_slice(), Compression::default());\n\n let mut buffer = Vec::with_capacity(raw_buffer.len());\n\n encoder.read_to_end(&mut buffer)?;\n\n SmartModuleInvocationWasm::AdHoc(buffer)\n\n } else {\n\n SmartModuleInvocationWasm::Predefined(name_or_path.to_owned())\n\n };\n\n\n\n Ok(SmartModuleInvocation {\n\n wasm,\n\n kind,\n\n params: params.into(),\n", "file_path": "crates/fluvio-cli/src/consume/mod.rs", "rank": 25, "score": 171880.109625048 }, { "content": "#[smartmodule(array_map)]\n\npub fn my_array_map(_record: &Record) -> Result<Vec<(Option<RecordData>, RecordData)>> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/fluvio-smartmodule/ui-tests/pass_array_map.rs", "rank": 26, "score": 171797.46738016815 }, { "content": "#[smartmodule(join)]\n\npub fn my_join(_record: &Record, _record1: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/fluvio-smartmodule/ui-tests/pass_join.rs", "rank": 27, "score": 170952.95527170977 }, { "content": "#[smartmodule(array_map)]\n\npub fn array_map(record: &Record) -> Result<Vec<(Option<RecordData>, RecordData)>> {\n\n // Deserialize a JSON array with any kind of values inside\n\n let array: Vec<serde_json::Value> = serde_json::from_slice(record.value.as_ref())?;\n\n\n\n // Convert each JSON value from the array back into a JSON string\n\n let strings: Vec<String> = array\n\n .into_iter()\n\n .map(|value| serde_json::to_string(&value))\n\n .collect::<core::result::Result<_, _>>()?;\n\n\n\n // Create one record from each JSON string to send\n\n let records: Vec<(Option<RecordData>, RecordData)> = strings\n\n .into_iter()\n\n .map(|s| (None, RecordData::from(s)))\n\n .collect();\n\n Ok(records)\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/array_map_json_array/src/lib.rs", "rank": 28, "score": 167679.928629347 }, { "content": "#[smartmodule(array_map)]\n\npub fn array_map(record: &Record) -> Result<Vec<(Option<RecordData>, RecordData)>> {\n\n // Deserialize a RedditListing from JSON\n\n let listing: RedditListing = serde_json::from_slice(record.value.as_ref())?;\n\n\n\n // Create a list of RedditPostData converted back into JSON strings\n\n let posts: Vec<(String, String)> = listing\n\n .data\n\n .children\n\n .into_iter()\n\n .map(|post: RedditPost| {\n\n // Convert each post into (ID, Post JSON)\n\n serde_json::to_string(&post.data).map(|json| (post.data.id, json))\n\n })\n\n .collect::<core::result::Result<_, _>>()?;\n\n\n\n // Convert each Post into a Record whose key is the Post's ID\n\n let records = posts\n\n .into_iter()\n\n .map(|(id, post)| (Some(RecordData::from(id)), RecordData::from(post)))\n\n .collect();\n\n Ok(records)\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/array_map_json_reddit/src/lib.rs", "rank": 29, "score": 167679.928629347 }, { "content": "#[smartmodule(array_map)]\n\npub fn array_map(record: &Record) -> Result<Vec<(Option<RecordData>, RecordData)>> {\n\n // Deserialize a JSON object (Map) with any kind of values inside\n\n let object: Map<String, Value> = serde_json::from_slice(record.value.as_ref())?;\n\n\n\n // Convert each JSON value from the array back into a JSON string\n\n let key_value_strings: Vec<(&String, String)> = object\n\n .iter()\n\n .map(|(key, value)| serde_json::to_string(value).map(|value| (key, value)))\n\n .collect::<core::result::Result<_, _>>()?;\n\n\n\n // Create one record from each JSON string to send\n\n let key_value_records: Vec<(Option<RecordData>, RecordData)> = key_value_strings\n\n .into_iter()\n\n .map(|(key, value)| {\n\n (\n\n Some(RecordData::from(key.to_string())),\n\n RecordData::from(value),\n\n )\n\n })\n\n .collect();\n\n Ok(key_value_records)\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/array_map_json_object/src/lib.rs", "rank": 30, "score": 167679.928629347 }, { "content": "#[smartmodule(join)]\n\npub fn join(left_record: &Record, right_record: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n let left_value: i32 = std::str::from_utf8(left_record.value.as_ref())?.parse()?;\n\n let right_value: i32 = std::str::from_utf8(right_record.value.as_ref())?.parse()?;\n\n let value = left_value + right_value;\n\n\n\n Ok((None, value.to_string().into()))\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/join/src/lib.rs", "rank": 31, "score": 166668.2397690558 }, { "content": "#[smartmodule(map, params)]\n\npub fn map(_record: &Record, _opt: &MapOpt) -> Result<(Option<RecordData>, RecordData)> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/fluvio-smartmodule/ui-tests/pass_map_with_params.rs", "rank": 32, "score": 166668.2397690558 }, { "content": "pub fn install_println<S: AsRef<str>>(string: S) {\n\n if std::env::var(\"FLUVIO_BOOTSTRAP\").is_ok() {\n\n println!(\"\\x1B[1;34mfluvio:\\x1B[0m {}\", string.as_ref());\n\n } else {\n\n println!(\"{}\", string.as_ref());\n\n }\n\n}\n", "file_path": "crates/fluvio-cli/src/install/mod.rs", "rank": 33, "score": 166383.5968267048 }, { "content": "pub fn generate_smartmodule(config: &SmartModuleConfig, func: &SmartModuleFn) -> TokenStream {\n\n match config.kind {\n\n SmartModuleKind::Filter => {\n\n self::filter::generate_filter_smartmodule(func, config.has_params)\n\n }\n\n SmartModuleKind::Map => self::map::generate_map_smartmodule(func, config.has_params),\n\n SmartModuleKind::FilterMap => {\n\n self::filter_map::generate_filter_map_smartmodule(func, config.has_params)\n\n }\n\n SmartModuleKind::Aggregate => {\n\n self::aggregate::generate_aggregate_smartmodule(func, config.has_params)\n\n }\n\n SmartModuleKind::ArrayMap => {\n\n self::array_map::generate_array_map_smartmodule(func, config.has_params)\n\n }\n\n SmartModuleKind::Join => self::join::generate_join_smartmodule(func, config.has_params),\n\n }\n\n}\n", "file_path": "crates/fluvio-smartmodule-derive/src/generator/mod.rs", "rank": 34, "score": 165982.0700475419 }, { "content": "/// Print a single record in text format\n\npub fn format_text_record(record: &[u8], suppress: bool) -> String {\n\n if is_binary(record) && !suppress {\n\n format!(\"binary: ({} bytes)\", record.len())\n\n } else {\n\n format!(\"{}\", String::from_utf8_lossy(record))\n\n }\n\n}\n\n\n\n// -----------------------------------\n\n// Binary\n\n// -----------------------------------\n\n\n", "file_path": "crates/fluvio-cli/src/consume/record_format.rs", "rank": 35, "score": 161614.92705553767 }, { "content": "pub fn impl_smart_opt(input: DeriveInput) -> syn::Result<TokenStream> {\n\n let name = &input.ident;\n\n\n\n // parse out all the field names in the struct as `Ident`s\n\n let fields = match input.data {\n\n Data::Struct(st) => st.fields,\n\n _ => {\n\n return Err(syn::Error::new_spanned(\n\n input.ident,\n\n \"SmartOpt derive macro only can be used on structs.\",\n\n ))\n\n }\n\n };\n\n\n\n let idents: Vec<&Ident> = fields\n\n .iter()\n\n .filter_map(|field| field.ident.as_ref())\n\n .collect::<Vec<&Ident>>();\n\n\n\n let keys: Vec<String> = idents\n", "file_path": "crates/fluvio-smartmodule-derive/src/generator/opt.rs", "rank": 36, "score": 156924.38329611864 }, { "content": "/// create server and spin up services, but don't run server\n\npub fn create_services(\n\n local_spu: SpuConfig,\n\n internal: bool,\n\n public: bool,\n\n) -> (\n\n DefaultSharedGlobalContext,\n\n Option<InternalApiServer>,\n\n Option<SpuPublicServer>,\n\n) {\n\n let ctx = FileReplicaContext::new_shared_context(local_spu);\n\n\n\n let public_ep_addr = ctx.config().public_socket_addr().to_owned();\n\n let private_ep_addr = ctx.config().private_socket_addr().to_owned();\n\n\n\n let public_server = if public {\n\n Some(create_public_server(public_ep_addr, ctx.clone()))\n\n } else {\n\n None\n\n };\n\n\n", "file_path": "crates/fluvio-spu/src/start.rs", "rank": 37, "score": 156215.47726946673 }, { "content": "/// Fetch OS information\n\nfn os_info() -> Option<String> {\n\n use sysinfo::SystemExt;\n\n let sys = sysinfo::System::new_all();\n\n\n\n let info = format!(\n\n \"{} {} (kernel {})\",\n\n sys.name()?,\n\n sys.os_version()?,\n\n sys.kernel_version()?,\n\n );\n\n\n\n Some(info)\n\n}\n", "file_path": "crates/fluvio-cli/src/version.rs", "rank": 38, "score": 152613.4254839422 }, { "content": "#[fluvio_test(name = \"producer\", topic = \"producer-test\")]\n\npub fn run(mut test_driver: FluvioTestDriver, mut test_case: TestCase) {\n\n let test_case: ProducerTestCase = test_case.into();\n\n let total_records = test_case.option.num_records;\n\n\n\n // If we assign more producers than records to split\n\n // then set # of producers to the # of records\n\n let producers = if total_records > test_case.option.producers {\n\n test_case.option.producers\n\n } else {\n\n println!(\n\n \"More producers than records to split. Reducing number to {}\",\n\n total_records\n\n );\n\n total_records\n\n };\n\n\n\n println!(\"\\nStarting Producer test\");\n\n println!(\"Producers: {}\", producers);\n\n println!(\"# Records: {}\", total_records);\n\n\n", "file_path": "crates/fluvio-test/src/tests/producer.rs", "rank": 39, "score": 152211.58106022532 }, { "content": "#[fluvio_test(name = \"consumer\", topic = \"producer-test\")]\n\npub fn run(mut test_driver: FluvioTestDriver, mut test_case: TestCase) {\n\n let test_case: ConsumerTestCase = test_case.into();\n\n let consumers = test_case.option.consumers;\n\n let partition = test_case.option.partition;\n\n let is_multi = test_case.option.multi_partition;\n\n let raw_offset = test_case.option.offset;\n\n\n\n // We'll assume for now that structopt is handling mutual exclusivity\n\n let offset = if test_case.option.offset_beginning {\n\n Offset::from_beginning(raw_offset as u32)\n\n } else if test_case.option.offset_end {\n\n Offset::from_end(raw_offset as u32)\n\n } else {\n\n Offset::absolute(raw_offset.into()).expect(\"Couldn't create absolute offset\")\n\n };\n\n\n\n println!(\"\\nStarting Consumer test\");\n\n\n\n println!(\"Consumers: {}\", consumers);\n\n println!(\"Starting offset: {:?}\", &offset);\n", "file_path": "crates/fluvio-test/src/tests/consumer.rs", "rank": 40, "score": 152157.182628043 }, { "content": "#[fluvio_test(topic = \"test-bug\")]\n\npub fn concurrent(mut test_driver: TestDriver, mut test_case: TestCase) {\n\n println!(\"Testing concurrent consumer and producer\");\n\n let option: ConcurrentTestCase = test_case.into();\n\n\n\n run_block_on(async {\n\n let (sender, receiver) = std::sync::mpsc::channel();\n\n spawn(consumer::consumer_stream(\n\n test_driver.clone(),\n\n option.clone(),\n\n receiver,\n\n ));\n\n producer::producer(&test_driver, option, sender).await;\n\n });\n\n}\n", "file_path": "crates/fluvio-test/src/tests/concurrent/mod.rs", "rank": 41, "score": 151678.24495506694 }, { "content": "/// Copy a byte array into an instance's linear memory\n\n/// and return the offset relative to the module's memory.\n\npub fn copy_memory_to_instance(\n\n store: &mut Store<()>,\n\n instance: &Instance,\n\n bytes: &[u8],\n\n) -> Result<isize, Error> {\n\n // Get the \"memory\" export of the module.\n\n // If the module does not export it, just panic,\n\n // since we are not going to be able to copy the data.\n\n let memory = instance\n\n .get_memory(&mut *store, MEMORY)\n\n .ok_or_else(|| anyhow!(\"Missing memory\"))?;\n\n\n\n // The module is not using any bindgen libraries,\n\n // so it should export its own alloc function.\n\n //\n\n // Get the guest's exported alloc function, and call it with the\n\n // length of the byte array we are trying to copy.\n\n // The result is an offset relative to the module's linear memory,\n\n // which is used to copy the bytes into the module's memory.\n\n // Then, return the offset.\n", "file_path": "crates/fluvio-smartengine/src/smartmodule/memory.rs", "rank": 42, "score": 151518.02296753335 }, { "content": "fn get_flush_policy_from_config(option: &ConfigOption) -> FlushPolicy {\n\n if option.flush_idle_msec > 0 {\n\n FlushPolicy::IdleFlush {\n\n delay_millis: option.flush_idle_msec,\n\n }\n\n } else if option.flush_write_count == 0 {\n\n FlushPolicy::NoFlush\n\n } else if option.flush_write_count == 1 {\n\n FlushPolicy::EveryWrite\n\n } else {\n\n FlushPolicy::CountWrites {\n\n n_writes: option.flush_write_count,\n\n write_tracking: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl MutFileRecords {\n\n pub async fn create(\n\n base_offset: Offset,\n", "file_path": "crates/fluvio-storage/src/mut_records.rs", "rank": 43, "score": 150477.2652372119 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\nfn home_dir() -> Option<PathBuf> {\n\n None\n\n}\n\n\n\nuse serde::Deserialize;\n\nuse serde::Serialize;\n\n\n\nuse fluvio_types::defaults::{CLI_CONFIG_PATH};\n\nuse crate::{FluvioConfig, FluvioError};\n\n\n\nuse super::TlsPolicy;\n\n\n\n#[derive(Error, Debug)]\n\npub enum ConfigError {\n\n #[error(transparent)]\n\n ConfigFileError(#[from] IoError),\n\n #[error(\"Failed to deserialize Fluvio config\")]\n\n TomlError(#[from] toml::de::Error),\n\n #[error(\"Config has no active profile\")]\n\n NoActiveProfile,\n", "file_path": "crates/fluvio/src/config/config.rs", "rank": 44, "score": 150319.56433354216 }, { "content": "// start server\n\npub fn create_internal_server(addr: String, ctx: DefaultSharedGlobalContext) -> InternalApiServer {\n\n info!(\n\n \"starting SPU: {} at internal service at: {}\",\n\n ctx.local_spu_id(),\n\n addr\n\n );\n\n\n\n FluvioApiServer::new(addr, ctx, InternalService::new())\n\n}\n", "file_path": "crates/fluvio-spu/src/services/internal/mod.rs", "rank": 45, "score": 149765.34051064978 }, { "content": "pub fn create_public_server(addr: String, ctx: DefaultSharedGlobalContext) -> SpuPublicServer {\n\n info!(\n\n spu_id = ctx.local_spu_id(),\n\n %addr,\n\n \"Starting SPU public service:\",\n\n );\n\n\n\n FluvioApiServer::new(addr, ctx, PublicService::new())\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct PublicService {\n\n _0: (), // Prevent construction\n\n}\n\n\n\nimpl PublicService {\n\n pub fn new() -> Self {\n\n PublicService { _0: () }\n\n }\n\n}\n", "file_path": "crates/fluvio-spu/src/services/public/mod.rs", "rank": 46, "score": 149765.34051064978 }, { "content": "#[fluvio_test(topic = \"test\")]\n\npub fn smoke(mut test_driver: FluvioTestDriver, mut test_case: TestCase) {\n\n let smoke_test_case: SmokeTestCase = test_case.into();\n\n\n\n // If connector tests requested\n\n let maybe_connector = if let Some(ref connector_config) =\n\n smoke_test_case.option.connector_config\n\n {\n\n let connector_process = async_process!(async {\n\n test_driver\n\n .connect()\n\n .await\n\n .expect(\"Connecting to cluster failed\");\n\n\n\n // Add a connector CRD\n\n let admin = test_driver.client().admin().await;\n\n // Create a managed connector\n\n let config = ConnectorConfig::from_file(&connector_config).unwrap();\n\n let spec: ManagedConnectorSpec = config.clone().into();\n\n let name = spec.name.clone();\n\n\n", "file_path": "crates/fluvio-test/src/tests/smoke/mod.rs", "rank": 47, "score": 149504.77002731012 }, { "content": "fn create_batches(records: Vec<Record>) -> Vec<Batch> {\n\n if records.write_size(0) < *MAX_BATCH_SIZE_BYTES || records.len() == 1 {\n\n let batch = Batch::from(records);\n\n vec![batch]\n\n } else {\n\n debug!(\"Splitting batch into multiple batches\");\n\n let mut batches = Vec::new();\n\n let mut current_batch = Batch::new();\n\n for record in records {\n\n if current_batch.write_size(0) + record.write_size(0) > *MAX_BATCH_SIZE_BYTES {\n\n debug!(\n\n len = current_batch.write_size(0),\n\n \"Created batch with length\"\n\n );\n\n\n\n batches.push(current_batch);\n\n current_batch = Batch::new();\n\n }\n\n current_batch.add_record(record);\n\n }\n", "file_path": "crates/fluvio/src/producer/mod.rs", "rank": 49, "score": 149399.99103142333 }, { "content": "#[smartmodule(array_map, params)]\n\npub fn my_array_map(\n\n _record: &Record,\n\n _opt: &ArrayOpt,\n\n) -> Result<Vec<(Option<RecordData>, RecordData)>> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/fluvio-smartmodule/ui-tests/pass_array_map_with_params.rs", "rank": 50, "score": 147270.35889150185 }, { "content": "pub fn cert_dir() -> PathBuf {\n\n std::env::current_dir().unwrap().join(\"tls\").join(\"certs\")\n\n}\n\n\n", "file_path": "crates/fluvio-test-util/tls.rs", "rank": 51, "score": 147193.42182655275 }, { "content": "pub fn create_batch() -> Batch {\n\n create_batch_with_producer(12, 2)\n\n}\n\n\n", "file_path": "crates/fluvio-dataplane-protocol/src/fixture.rs", "rank": 52, "score": 147193.42182655275 }, { "content": "fn parse_key_val(s: &str) -> Result<(String, String)> {\n\n let pos = s.find('=').ok_or_else(|| {\n\n CliError::InvalidArg(format!(\"invalid KEY=value: no `=` found in `{}`\", s))\n\n })?;\n\n Ok((s[..pos].parse()?, s[pos + 1..].parse()?))\n\n}\n\n\n\nimpl ConsumeOpt {\n\n #[instrument(\n\n skip(self, fluvio),\n\n name = \"Consume\",\n\n fields(topic = %self.topic, partition = self.partition),\n\n )]\n\n pub async fn process(self, fluvio: &Fluvio) -> Result<()> {\n\n let maybe_tableformat = if let Some(ref tableformat_name) = self.table_format {\n\n let admin = fluvio.admin().await;\n\n let tableformats = admin.list::<TableFormatSpec, _>(vec![]).await?;\n\n\n\n let mut found = None;\n\n\n", "file_path": "crates/fluvio-cli/src/consume/mod.rs", "rank": 53, "score": 146376.48055818246 }, { "content": "pub fn rand_record() -> Record {\n\n let len: u16 = rand::random();\n\n let record: Vec<u8> = (0..len).map(|_| rand::random::<u8>()).collect();\n\n record\n\n}\n\n\n", "file_path": "crates/fluvio-test/src/tests/concurrent/util.rs", "rank": 54, "score": 145017.50268457015 }, { "content": "pub fn default_convert_from_k8<S>(\n\n k8_obj: K8Obj<S::K8Spec>,\n\n) -> Result<MetadataStoreObject<S, K8MetaItem>, K8ConvertError<S::K8Spec>>\n\nwhere\n\n S: K8ExtendedSpec,\n\n S::IndexKey: TryFrom<String> + Display,\n\n <S::IndexKey as TryFrom<String>>::Error: Debug,\n\n <<S as K8ExtendedSpec>::K8Spec as K8Spec>::Status: Into<S::Status>,\n\n S::K8Spec: Into<S>,\n\n{\n\n let k8_name = k8_obj.metadata.name.clone();\n\n let result: Result<S::IndexKey, _> = k8_name.try_into();\n\n match result {\n\n Ok(key) => {\n\n // convert K8 Spec/Status into Metadata Spec/Status\n\n let local_spec = k8_obj.spec.into();\n\n let local_status = k8_obj.status.into();\n\n\n\n let ctx_item_result: Result<K8MetaItem, _> = k8_obj.metadata.try_into();\n\n match ctx_item_result {\n", "file_path": "crates/fluvio-stream-model/src/store/k8.rs", "rank": 55, "score": 142945.7577505212 }, { "content": "#[fluvio_test(topic = \"test-multiple-partition\")]\n\npub fn multiple_partition(mut test_driver: TestDriver, mut test_case: TestCase) -> TestResult {\n\n println!(\"Testing multiple partition consumer\");\n\n\n\n let option: MultiplePartitionTestCase = test_case.into();\n\n\n\n run_block_on(async {\n\n spawn(producer::producer(test_driver.clone(), option.clone()));\n\n\n\n consumer::consumer_stream(&test_driver, option).await;\n\n });\n\n}\n", "file_path": "crates/fluvio-test/src/tests/multiple_partitions/mod.rs", "rank": 56, "score": 141359.007134596 }, { "content": "pub fn main_loop(opt: SpuOpt) {\n\n use std::time::Duration;\n\n\n\n use sysinfo::{System, SystemExt};\n\n use tracing::info;\n\n\n\n use fluvio_future::task::run_block_on;\n\n use fluvio_future::timer::sleep;\n\n // parse configuration (program exits on error)\n\n let (spu_config, tls_acceptor_option) = opt.process_spu_cli_or_exit();\n\n\n\n println!(\"starting spu server (id:{})\", spu_config.id);\n\n\n\n let mut sys = System::new_all();\n\n sys.refresh_all();\n\n info!(version = &*crate::VERSION, \"Platform\");\n\n info!(commit = env!(\"GIT_HASH\"), \"Git\");\n\n info!(name = ?sys.name(),\"System\");\n\n info!(kernel = ?sys.kernel_version(),\"System\");\n\n info!(os_version = ?sys.long_os_version(),\"System\");\n", "file_path": "crates/fluvio-spu/src/start.rs", "rank": 57, "score": 141250.17448820145 }, { "content": "/// Process server based on output type\n\npub fn format_spu_response_output<O>(\n\n out: std::sync::Arc<O>,\n\n spus: Vec<Metadata<SpuSpec>>,\n\n output_type: OutputType,\n\n) -> Result<(), ClusterCliError>\n\nwhere\n\n O: Terminal,\n\n{\n\n if !spus.is_empty() {\n\n let spu_list = ListSpus(spus);\n\n out.render_list(&spu_list, output_type)?;\n\n } else {\n\n t_println!(out, \"no spu\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nimpl TableOutputHandler for ListSpus {\n\n /// table header implementation\n", "file_path": "crates/fluvio-cluster/src/cli/spu/display.rs", "rank": 58, "score": 140970.8807619448 }, { "content": "/// Return separator for hex dump\n\npub fn hex_dump_separator() -> String {\n\n \"------------------------------------------------------------------------------\\n\".to_owned()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::bytes_to_hex_dump;\n\n\n\n #[test]\n\n fn test_bytes_to_hex_dump() {\n\n let records: Vec<u8> = vec![\n\n 123, 10, 32, 32, 32, 32, 34, 112, 97, 114, 116, 105, 116, 105, 111, 110, 115, 34, 58,\n\n 32, 91, 10, 32, 32, 32, 32, 32, 32, 32, 32, 123, 10, 32, 32, 32, 32, 32, 32, 32, 32,\n\n 32, 32, 32, 32, 34, 105, 100, 34, 58, 32, 48, 44, 10, 32, 32, 32, 32, 32, 32, 32, 32,\n\n 32, 32, 32, 32, 34, 114, 101, 112, 108, 105, 99, 97, 115, 34, 58, 32, 91, 10, 32, 32,\n\n 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 53, 48, 48, 49, 44, 10, 32, 32,\n\n 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 53, 48, 48, 50, 44, 10, 32, 32,\n\n 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 53, 48, 48, 51, 10, 32, 32, 32,\n\n 32, 32, 32, 32, 32, 32, 32, 32, 32, 93, 10, 32, 32, 32, 32, 32, 32, 32, 32, 125, 10,\n\n 32, 32, 32, 32, 93, 10, 125,\n", "file_path": "crates/fluvio-extension-common/src/common/hex_dump.rs", "rank": 59, "score": 140970.8807619448 }, { "content": "#[instrument(skip(request, auth_ctx, sink, end_event))]\n\npub fn handle_watch_request<AC>(\n\n request: RequestMessage<ObjectApiWatchRequest>,\n\n auth_ctx: &AuthServiceContext<AC>,\n\n sink: ExclusiveFlvSink,\n\n end_event: Arc<StickyEvent>,\n\n) -> Result<(), IoError> {\n\n debug!(\"handling watch request\");\n\n let (header, req) = request.get_header_request();\n\n\n\n match req {\n\n ObjectApiWatchRequest::Topic(_) => WatchController::<TopicSpec>::update(\n\n sink,\n\n end_event,\n\n auth_ctx.global_ctx.topics().clone(),\n\n header,\n\n ),\n\n ObjectApiWatchRequest::Spu(_) => WatchController::<SpuSpec>::update(\n\n sink,\n\n end_event,\n\n auth_ctx.global_ctx.spus().clone(),\n", "file_path": "crates/fluvio-sc/src/services/public_api/watch.rs", "rank": 60, "score": 140970.8807619448 }, { "content": "fn validate_key_separator(separator: String) -> std::result::Result<(), String> {\n\n if separator.is_empty() {\n\n return Err(\n\n \"must be non-empty. If using '=', type it as '--key-separator \\\"=\\\"'\".to_string(),\n\n );\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl ProduceOpt {\n\n pub async fn process(self, fluvio: &Fluvio) -> Result<()> {\n\n let producer = fluvio.topic_producer(&self.topic).await?;\n\n\n\n if self.raw {\n\n // Read all input and send as one record\n\n let buffer = match &self.file {\n\n Some(path) => std::fs::read(&path)?,\n\n None => {\n\n let mut buffer = Vec::new();\n\n std::io::Read::read_to_end(&mut std::io::stdin(), &mut buffer)?;\n", "file_path": "crates/fluvio-cli/src/produce/mod.rs", "rank": 61, "score": 139688.46991684294 }, { "content": "pub fn install_bin<P: AsRef<Path>, B: AsRef<[u8]>>(bin_path: P, bytes: B) -> Result<()> {\n\n use std::io::Write as _;\n\n\n\n let bin_path = bin_path.as_ref();\n\n\n\n // Create directories to bin_path if they do not exist\n\n let parent = bin_path\n\n .parent()\n\n .ok_or_else(|| IoError::new(ErrorKind::NotFound, \"parent directory not found\"))?;\n\n std::fs::create_dir_all(&parent)?;\n\n\n\n // Write bin to temporary file\n\n let tmp_dir = tempdir::TempDir::new_in(parent, \"fluvio-tmp\")?;\n\n let tmp_path = tmp_dir.path().join(\"fluvio\");\n\n let mut tmp_file = File::create(&tmp_path)?;\n\n tmp_file.write_all(bytes.as_ref())?;\n\n\n\n // Mark the file as executable\n\n make_executable(&mut tmp_file)?;\n\n\n\n // Rename (atomic move on unix) temp file to destination\n\n std::fs::rename(&tmp_path, &bin_path)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/fluvio-cli/src/install/mod.rs", "rank": 62, "score": 138547.96175201354 }, { "content": "/// Prompt the user about a new available version of the Fluvio CLI\n\npub fn prompt_available_update(latest_version: &Version) {\n\n println!();\n\n println!(\"💡 An update to Fluvio is available!\");\n\n println!(\n\n \"💡 Run 'fluvio update' to install v{} of Fluvio\",\n\n &latest_version\n\n );\n\n}\n", "file_path": "crates/fluvio-cli/src/install/update.rs", "rank": 63, "score": 137203.5525655761 }, { "content": " pub trait Spec: Default + Debug + Clone + PartialEq {\n\n const LABEL: &'static str;\n\n type Status: Status;\n\n type Owner: Spec;\n\n type IndexKey: Debug + Eq + Hash + Clone + ToString;\n\n }\n\n\n", "file_path": "crates/fluvio-stream-model/src/core.rs", "rank": 64, "score": 136573.19691222077 }, { "content": " pub trait Status: Default + Debug + Clone + PartialEq {}\n\n\n", "file_path": "crates/fluvio-stream-model/src/core.rs", "rank": 65, "score": 136573.19691222077 }, { "content": "/// Collects the metadata of Fluvio extensions installed on the system\n\npub fn subcommand_metadata() -> Result<Vec<SubcommandMetadata>> {\n\n let mut metadata = Vec::new();\n\n\n\n let extensions = crate::install::get_extensions()?;\n\n for path in extensions {\n\n let result = Command::new(&path).arg(\"metadata\").result();\n\n let output = match result {\n\n Ok(out) => out.stdout,\n\n _ => continue,\n\n };\n\n\n\n let json_result = serde_json::from_slice::<FluvioExtensionMetadata>(&output);\n\n if let Ok(meta) = json_result {\n\n let subcommand = SubcommandMetadata { path, meta };\n\n metadata.push(subcommand);\n\n }\n\n }\n\n\n\n Ok(metadata)\n\n}\n", "file_path": "crates/fluvio-cli/src/metadata.rs", "rank": 66, "score": 135867.1782005803 }, { "content": "pub fn variant_size(num: i64) -> usize {\n\n let mut v = (num << 1) ^ (num >> 31);\n\n let mut bytes = 1;\n\n\n\n while (v & 0xffffff80) != 0 {\n\n bytes += 1;\n\n v >>= 7;\n\n }\n\n\n\n bytes\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use std::io::Cursor;\n\n use bytes::{BytesMut, BufMut};\n\n use super::varint_decode;\n\n use super::variant_encode;\n\n use super::variant_size;\n", "file_path": "crates/fluvio-protocol/src/core/varint.rs", "rank": 67, "score": 135867.1782005803 }, { "content": "/// Detects the target triple of the current build and returns\n\n/// the name of a compatible build target on packages.fluvio.io.\n\n///\n\n/// Returns `Some(Target)` if there is a compatible target, or\n\n/// `None` if this target is unsupported or has no compatible target.\n\npub fn package_target() -> Result<Target, Error> {\n\n let target = PACKAGE_TARGET.parse()?;\n\n Ok(target)\n\n}\n\n\n\n/// An object representing a specific build target for an artifact\n\n/// being managed by fluvio-index.\n\n///\n\n/// This type is generally constructed using `FromStr` via the\n\n/// `parse` method.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use fluvio_index::Target;\n\n/// let target: Target = \"x86_64-unknown-linux-musl\".parse().unwrap();\n\n/// ```\n\n#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize)]\n\n#[serde(transparent)]\n\npub struct Target(Cow<'static, str>);\n", "file_path": "crates/fluvio-package-index/src/target.rs", "rank": 68, "score": 135867.1782005803 }, { "content": "/// Render a single check status\n\npub fn render_check_status(check_status: &CheckStatus) {\n\n println!(\"{}\", check_status.msg());\n\n}\n\n\n", "file_path": "crates/fluvio-cluster/src/check/render.rs", "rank": 69, "score": 135318.90488540006 }, { "content": "/// Render a single check result\n\npub fn render_check_result(check_result: &CheckResult) {\n\n println!(\"{}\", check_result.msg());\n\n}\n\n\n", "file_path": "crates/fluvio-cluster/src/check/render.rs", "rank": 70, "score": 135318.90488540006 }, { "content": "pub fn hash_messages(messages: &[String]) -> String {\n\n let mut hasher = md5::Md5::new();\n\n for m in messages.iter() {\n\n hasher.update(m);\n\n }\n\n format!(\"{:X?}\", hasher.finalize())\n\n}\n\n\n", "file_path": "crates/fluvio-test/src/tests/concurrent/util.rs", "rank": 71, "score": 133892.30121200386 }, { "content": "pub fn hash_record(record: &[u8]) -> String {\n\n format!(\"{:X}\", md5::Md5::digest(record))\n\n}\n", "file_path": "crates/fluvio-test/src/tests/concurrent/util.rs", "rank": 72, "score": 133892.30121200386 }, { "content": "/// Wrap an inner record stream and only stream until a given number of records have been fetched.\n\n///\n\n/// This is used for \"disable continuous\" mode. In this mode, we first make a FetchOffsetPartitionResponse\n\n/// in order to see the starting and ending offsets currently available for this partition.\n\n/// Based on the starting offset the caller asks for, we can figure out the \"record count\", or\n\n/// how many records from the start onward we know for sure we can stream without waiting.\n\n/// We then use `TakeRecords` to stop the stream as soon as we reach that point, so the user\n\n/// (e.g. on the CLI) does not spend any time waiting for new records to be produced, they are\n\n/// simply given all the records that are already available.\n\nstruct TakeRecords<S> {\n\n remaining: i64,\n\n stream: S,\n\n}\n\n\n\nimpl<S> TakeRecords<S>\n\nwhere\n\n S: Stream<Item = Result<DefaultStreamFetchResponse, ErrorCode>> + std::marker::Unpin,\n\n{\n\n pub fn new(stream: S, until: i64) -> Self {\n\n Self {\n\n remaining: until,\n\n stream,\n\n }\n\n }\n\n}\n\n\n\nimpl<S> Stream for TakeRecords<S>\n\nwhere\n\n S: Stream<Item = Result<DefaultStreamFetchResponse, ErrorCode>> + std::marker::Unpin,\n", "file_path": "crates/fluvio/src/consumer.rs", "rank": 73, "score": 132527.06518375617 }, { "content": "pub fn generate_map_smartmodule(func: &SmartModuleFn, has_params: bool) -> TokenStream {\n\n let user_code = &func.func;\n\n let user_fn = &func.name;\n\n\n\n let params_parsing = if has_params {\n\n quote!(\n\n use std::convert::TryInto;\n\n\n\n let params = match smartmodule_input.params.try_into(){\n\n Ok(params) => params,\n\n Err(err) => return SmartModuleInternalError::ParsingExtraParams as i32,\n\n };\n\n\n\n )\n\n } else {\n\n quote!()\n\n };\n\n\n\n let function_call = if has_params {\n\n quote!(\n", "file_path": "crates/fluvio-smartmodule-derive/src/generator/map.rs", "rank": 74, "score": 132303.1877333024 }, { "content": "pub fn generate_filter_smartmodule(func: &SmartModuleFn, has_params: bool) -> TokenStream {\n\n let user_fn = &func.name;\n\n let user_code = func.func;\n\n\n\n let params_parsing = if has_params {\n\n quote!(\n\n use std::convert::TryInto;\n\n\n\n let params = match smartmodule_input.params.try_into(){\n\n Ok(params) => params,\n\n Err(err) => return SmartModuleInternalError::ParsingExtraParams as i32,\n\n };\n\n )\n\n } else {\n\n quote!()\n\n };\n\n\n\n let function_call = if has_params {\n\n quote!(\n\n super:: #user_fn(&record, &params)\n", "file_path": "crates/fluvio-smartmodule-derive/src/generator/filter.rs", "rank": 75, "score": 132303.1877333024 }, { "content": "pub fn generate_aggregate_smartmodule(func: &SmartModuleFn, has_params: bool) -> TokenStream {\n\n let user_code = &func.func;\n\n let user_fn = &func.name;\n\n\n\n let params_parsing = if has_params {\n\n quote!(\n\n use std::convert::TryInto;\n\n\n\n let params = match smartmodule_input.base.params.try_into(){\n\n Ok(params) => params,\n\n Err(err) => return SmartModuleInternalError::ParsingExtraParams as i32,\n\n };\n\n\n\n )\n\n } else {\n\n quote!()\n\n };\n\n\n\n let function_call = if has_params {\n\n quote!(\n", "file_path": "crates/fluvio-smartmodule-derive/src/generator/aggregate.rs", "rank": 76, "score": 132303.1877333024 }, { "content": "pub fn generate_join_smartmodule(func: &SmartModuleFn, has_params: bool) -> TokenStream {\n\n let user_code = &func.func;\n\n let user_fn = &func.name;\n\n\n\n let params_parsing = if has_params {\n\n quote!(\n\n use std::convert::TryInto;\n\n\n\n let params = match smartmodule_input.params.try_into(){\n\n Ok(params) => params,\n\n Err(err) => return SmartModuleInternalError::ParsingExtraParams as i32,\n\n };\n\n\n\n )\n\n } else {\n\n quote!()\n\n };\n\n\n\n let function_call = if has_params {\n\n quote!(\n", "file_path": "crates/fluvio-smartmodule-derive/src/generator/join.rs", "rank": 77, "score": 132303.1877333024 }, { "content": "fn default_option() -> ConfigOption {\n\n ConfigOption {\n\n segment_max_bytes: 10000,\n\n base_dir: temp_dir().join(TEST_REP_DIR),\n\n index_max_interval_bytes: 1000,\n\n index_max_bytes: 1000,\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "crates/fluvio-storage/tests/replica_test.rs", "rank": 78, "score": 132140.54543533764 }, { "content": "#[proc_macro_derive(RequestApi, attributes(varint, fluvio))]\n\npub fn fluvio_request(tokens: TokenStream) -> TokenStream {\n\n let inputs = parse_macro_input![tokens as syn::DeriveInput];\n\n\n\n let expanded = generate_request_traits(&inputs);\n\n expanded.into()\n\n}\n\n\n\n/// Custom derive for generating default structure\n\n///\n\n/// Example:\n\n///\n\n/// ```\n\n/// use fluvio_protocol::derive::FluvioDefault;\n\n///\n\n/// #[derive(FluvioDefault)]\n\n/// #[fluvio(default)]\n\n/// pub struct SimpleRecord {\n\n/// #[fluvio(default = \"12\")]\n\n/// val: u8\n\n/// }\n\n///\n\n/// let record = SimpleRecord::default();\n\n/// assert_eq!(record.val, 12);\n\n/// ```\n\n///\n\n/// `default` assignment can be any Rust expression.\n", "file_path": "crates/fluvio-protocol-derive/src/lib.rs", "rank": 79, "score": 132007.65353182782 }, { "content": "#[proc_macro_derive(Decoder, attributes(varint, fluvio))]\n\npub fn fluvio_decode(tokens: TokenStream) -> TokenStream {\n\n let input = parse_macro_input![tokens as ast::DeriveItem];\n\n let expanded = generate_decode_trait_impls(&input);\n\n\n\n expanded.into()\n\n}\n\n\n\n/// Custom derive for encoding structure or enum to bytes using Kafka protocol format.\n\n/// This assumes all fields(or enum variants) implement kafka encode traits.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use fluvio_protocol::Encoder;\n\n///\n\n/// #[derive(Encoder)]\n\n/// pub struct SimpleRecord {\n\n/// val: u8\n\n/// }\n\n///\n", "file_path": "crates/fluvio-protocol-derive/src/lib.rs", "rank": 80, "score": 132007.65353182782 }, { "content": "#[proc_macro_derive(FluvioDefault, attributes(fluvio))]\n\npub fn fluvio_default(tokens: TokenStream) -> TokenStream {\n\n let input = parse_macro_input![tokens as ast::DeriveItem];\n\n let expanded = generate_default_trait_impls(&input);\n\n\n\n expanded.into()\n\n}\n", "file_path": "crates/fluvio-protocol-derive/src/lib.rs", "rank": 81, "score": 132007.65353182782 }, { "content": "pub fn create_recordset(num_records: u16) -> RecordSet {\n\n let records = RecordSet::default();\n\n records.add(create_batch_with_producer(12, num_records))\n\n}\n\n\n\npub const TEST_RECORD: &[u8] = &[10, 20];\n\n\n", "file_path": "crates/fluvio-dataplane-protocol/src/fixture.rs", "rank": 82, "score": 132007.65353182782 }, { "content": "#[proc_macro_derive(SmartOpt)]\n\npub fn smartopt_derive(input: TokenStream) -> TokenStream {\n\n use crate::generator::opt::impl_smart_opt;\n\n let input = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n impl_smart_opt(input).unwrap_or_else(|err| err.into_compile_error().into())\n\n}\n", "file_path": "crates/fluvio-smartmodule-derive/src/lib.rs", "rank": 83, "score": 132007.65353182782 }, { "content": "#[proc_macro_derive(Encoder, attributes(varint, fluvio))]\n\npub fn fluvio_encode(tokens: TokenStream) -> TokenStream {\n\n let input = parse_macro_input![tokens as ast::DeriveItem];\n\n let expanded = generate_encode_trait_impls(&input);\n\n\n\n expanded.into()\n\n}\n\n\n", "file_path": "crates/fluvio-protocol-derive/src/lib.rs", "rank": 84, "score": 132007.65353182782 }, { "content": "#[proc_macro]\n\npub fn fluvio_api(tokens: TokenStream) -> TokenStream {\n\n let inputs = parse_macro_input![tokens as syn::DeriveInput];\n\n\n\n let expanded = parse_and_generate_api(&inputs);\n\n expanded.into()\n\n}\n\n\n\n/// Custom derive for implementing Request trait.\n\n/// This derives requires `fluvio`\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use fluvio_protocol::{Encoder, Decoder};\n\n/// use fluvio_protocol::api::Request;\n\n/// use fluvio_protocol::derive::RequestApi as Request;\n\n///\n\n/// #[fluvio(default,api_min_version = 5, api_max_version = 6, api_key = 10, response = \"SimpleResponse\")]\n\n/// #[derive(Debug, Default, Encoder, Decoder, Request)]\n\n/// pub struct SimpleRequest {\n", "file_path": "crates/fluvio-protocol-derive/src/lib.rs", "rank": 85, "score": 132007.65353182782 }, { "content": "#[smartmodule(filter)]\n\npub fn my_filter(_record: &Record) -> Result<bool> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/fluvio-smartmodule/ui-tests/pass_filter.rs", "rank": 86, "score": 130959.03396039795 }, { "content": "#[smartmodule(filter)]\n\npub fn filter(record: &Record) -> Result<bool> {\n\n let string = std::str::from_utf8(record.value.as_ref())?;\n\n Ok(string.contains('a'))\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/filter/src/lib.rs", "rank": 87, "score": 130959.03396039795 }, { "content": "#[allow(clippy::needless_range_loop)]\n\npub fn bytes_to_hex_dump(record: &[u8]) -> String {\n\n let cols = 16;\n\n let record_cnt = record.len();\n\n let mut result = String::new();\n\n let mut collector = String::new();\n\n\n\n for row_idx in 0..record_cnt {\n\n // colunn index\n\n if row_idx % cols == 0 {\n\n result.push_str(&format!(\"{:08x}\", row_idx));\n\n }\n\n\n\n // spacing half way\n\n if row_idx % (cols / 2) == 0 {\n\n result.push(' ');\n\n }\n\n\n\n // convert and add character to collector\n\n collector.push_str(&byte_to_string(&record[row_idx]));\n\n\n", "file_path": "crates/fluvio-extension-common/src/common/hex_dump.rs", "rank": 88, "score": 130207.18959437606 }, { "content": " pub trait MetadataItem: Clone + Default + fmt::Debug + PartialEq {\n\n type UId: PartialEq;\n\n\n\n fn uid(&self) -> &Self::UId;\n\n\n\n /// checkif item is newer\n\n fn is_newer(&self, another: &Self) -> bool;\n\n\n\n /// if object is process of being deleted\n\n fn is_being_deleted(&self) -> bool {\n\n false\n\n }\n\n\n\n /// set string labels\n\n fn set_labels<T: Into<String>>(self, _labels: Vec<(T, T)>) -> Self {\n\n self\n\n }\n\n\n\n /// get string labels\n\n fn get_labels(&self) -> HashMap<String, String> {\n\n HashMap::new()\n\n }\n\n }\n\n\n", "file_path": "crates/fluvio-stream-model/src/core.rs", "rank": 89, "score": 129972.1900669727 }, { "content": "#[derive(Debug)]\n\nstruct CreateServicePermission;\n\n\n\n#[async_trait]\n\nimpl ClusterCheck for CreateServicePermission {\n\n async fn perform_check(&self) -> CheckResult {\n\n check_permission(RESOURCE_SERVICE)\n\n }\n\n}\n\n\n", "file_path": "crates/fluvio-cluster/src/check/mod.rs", "rank": 90, "score": 129839.0050001173 }, { "content": "#[derive(Debug)]\n\nstruct CreateCrdPermission;\n\n\n\n#[async_trait]\n\nimpl ClusterCheck for CreateCrdPermission {\n\n async fn perform_check(&self) -> CheckResult {\n\n check_permission(RESOURCE_CRD)\n\n }\n\n}\n\n\n", "file_path": "crates/fluvio-cluster/src/check/mod.rs", "rank": 91, "score": 129839.0050001173 }, { "content": "pub fn generate_array_map_smartmodule(func: &SmartModuleFn, has_params: bool) -> TokenStream {\n\n let user_code = &func.func;\n\n let user_fn = &func.name;\n\n\n\n let params_parsing = if has_params {\n\n quote!(\n\n use std::convert::TryInto;\n\n\n\n let params = match smartmodule_input.params.try_into(){\n\n Ok(params) => params,\n\n Err(err) => return SmartModuleInternalError::ParsingExtraParams as i32,\n\n };\n\n\n\n )\n\n } else {\n\n quote!()\n\n };\n\n\n\n let function_call = if has_params {\n\n quote!(\n", "file_path": "crates/fluvio-smartmodule-derive/src/generator/array_map.rs", "rank": 92, "score": 129483.32501935205 }, { "content": "pub fn generate_filter_map_smartmodule(func: &SmartModuleFn, has_params: bool) -> TokenStream {\n\n let user_code = &func.func;\n\n let user_fn = &func.name;\n\n\n\n let params_parsing = if has_params {\n\n quote!(\n\n use std::convert::TryInto;\n\n\n\n let params = match smartmodule_input.params.try_into(){\n\n Ok(params) => params,\n\n Err(err) => return SmartModuleInternalError::ParsingExtraParams as i32,\n\n };\n\n\n\n )\n\n } else {\n\n quote!()\n\n };\n\n\n\n let function_call = if has_params {\n\n quote!(\n", "file_path": "crates/fluvio-smartmodule-derive/src/generator/filter_map.rs", "rank": 93, "score": 129483.32501935205 }, { "content": "#[smartmodule(filter)]\n\npub fn filter(record: &Record) -> Result<bool> {\n\n let string = std::str::from_utf8(record.value.as_ref())?;\n\n let int = string\n\n .parse::<i32>()\n\n .map_err(FirstErrorWrapper::from)\n\n .map_err(SecondErrorWrapper::from)?;\n\n Ok(int % 2 == 0)\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/filter_odd/src/lib.rs", "rank": 94, "score": 129074.38628022191 }, { "content": "#[smartmodule(filter)]\n\npub fn filter(record: &Record) -> Result<bool> {\n\n let string = std::str::from_utf8(record.value.as_ref())?;\n\n\n\n // Check whether the Record contains a Social Security number\n\n let social_security_regex = Regex::new(r\"\\d{3}-\\d{2}-\\d{4}\").unwrap();\n\n let has_ss = social_security_regex.is_match(string);\n\n\n\n // Only accept records that _do not_ have social security numbers in them\n\n Ok(!has_ss)\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/filter_regex/src/lib.rs", "rank": 95, "score": 129074.38628022191 }, { "content": "fn build_consumer_config(test_case: ConsumerTestCase) -> ConsumerConfig {\n\n let mut config = ConsumerConfig::builder();\n\n\n\n // continuous\n\n if test_case.option.num_records == 0 {\n\n config.disable_continuous(true);\n\n }\n\n\n\n // max bytes\n\n if let Some(max_bytes) = test_case.option.max_bytes {\n\n config.max_bytes(max_bytes as i32);\n\n }\n\n\n\n config.build().expect(\"Couldn't build consumer config\")\n\n}\n\n\n\nasync fn get_single_stream(\n\n consumer: PartitionConsumer,\n\n offset: Offset,\n\n test_case: ConsumerTestCase,\n", "file_path": "crates/fluvio-test/src/tests/consumer.rs", "rank": 96, "score": 128844.34123964416 }, { "content": "#[test]\n\nfn test_decode() {\n\n let data = [0x01, 0x01];\n\n\n\n let mut buf = Cursor::new(data);\n\n\n\n let result = Parent::decode_from(&mut buf, 0);\n\n assert!(result.is_ok());\n\n let val = result.unwrap();\n\n assert!(val.child.is_some());\n\n}\n", "file_path": "crates/fluvio-protocol/tests/option.rs", "rank": 97, "score": 128322.21223172075 }, { "content": "#[test]\n\nfn test_encode() {\n\n let mut v1 = Parent::default();\n\n let child = Child { flag: true };\n\n\n\n v1.child = Some(child);\n\n let mut src = vec![];\n\n let result = v1.encode(&mut src, 0);\n\n assert!(result.is_ok());\n\n assert_eq!(src.len(), 2);\n\n assert_eq!(src[0], 0x01);\n\n assert_eq!(src[1], 0x01);\n\n}\n\n\n", "file_path": "crates/fluvio-protocol/tests/option.rs", "rank": 98, "score": 128322.21223172075 }, { "content": "pub mod producer;\n\npub mod consumer;\n\n\n\nuse std::any::Any;\n\nuse structopt::StructOpt;\n\n\n\nuse fluvio_future::task::spawn;\n\nuse fluvio_test_derive::fluvio_test;\n\nuse fluvio_test_util::test_meta::environment::EnvironmentSetup;\n\nuse fluvio_test_util::test_meta::{TestOption, TestCase};\n\nuse fluvio_future::task::run_block_on;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct MultiplePartitionTestCase {\n\n pub environment: EnvironmentSetup,\n\n pub option: MultiplePartitionTestOption,\n\n}\n\n\n\nimpl From<TestCase> for MultiplePartitionTestCase {\n\n fn from(test_case: TestCase) -> Self {\n", "file_path": "crates/fluvio-test/src/tests/multiple_partitions/mod.rs", "rank": 99, "score": 45.67853374869286 } ]
Rust
src/octez/node.rs
tzConnectBerlin/que-pasa
c79769fbe7aa8d0ef3e1d104bb45f68bf63a7c7d
use crate::octez::block::{Block, LevelMeta}; use anyhow::{anyhow, Context, Result}; use backoff::{retry, Error, ExponentialBackoff}; use chrono::{DateTime, Utc}; use curl::easy::Easy; use serde::Deserialize; use std::str::FromStr; use std::time::Duration; #[derive(Clone)] pub struct NodeClient { node_url: String, chain: String, timeout: Duration, } impl NodeClient { pub fn new(node_url: String, chain: String) -> Self { Self { node_url, chain, timeout: Duration::from_secs(20), } } pub(crate) fn head(&self) -> Result<LevelMeta> { let (meta, _) = self.level_json_internal("head")?; Ok(meta) } pub(crate) fn level_json(&self, level: u32) -> Result<(LevelMeta, Block)> { self.level_json_internal(&format!("{}", level)) } fn level_json_internal(&self, level: &str) -> Result<(LevelMeta, Block)> { let (body, _) = self .load_retry_on_nonjson(&format!("blocks/{}", level)) .with_context(|| { format!("failed to get level_json for level={}", level) })?; let mut deserializer = serde_json::Deserializer::from_str(&body); deserializer.disable_recursion_limit(); let block: Block = Block::deserialize(&mut deserializer) .with_context(|| anyhow!("failed to deserialize block json"))?; let meta = LevelMeta { level: block.header.level as u32, hash: Some(block.hash.clone()), prev_hash: Some(block.header.predecessor.clone()), baked_at: Some(Self::timestamp_from_block(&block)?), }; Ok((meta, block)) } pub(crate) fn get_contract_storage_definition( &self, contract_id: &str, level: Option<u32>, ) -> Result<serde_json::Value> { let level = match level { Some(x) => format!("{}", x), None => "head".to_string(), }; let (_, json) = self .load_retry_on_nonjson(&format!( "blocks/{}/context/contracts/{}/script", level, contract_id )) .with_context(|| { format!( "failed to get script data for contract='{}', level={}", contract_id, level ) })?; for entry in json["code"].as_array().ok_or_else(|| { anyhow!("malformed script response (missing 'code' field)") })? { if let Some(prim) = entry.as_object().ok_or_else(|| anyhow!("malformed script response ('code' array element is not an object)"))?.get("prim") { if prim == &serde_json::Value::String("storage".to_string()) { return Ok(entry["args"].as_array().ok_or_else(|| anyhow!("malformed script response ('storage' entry does not have 'args' field)"))?[0].clone()); } } else { return Err(anyhow!("malformed script response ('code' array element does not have a field 'prim')")); } } Err(anyhow!("malformed script response ('code' array does not have 'storage' entry)")) } fn parse_rfc3339(rfc3339: &str) -> Result<DateTime<Utc>> { let fixedoffset = chrono::DateTime::parse_from_rfc3339(rfc3339)?; Ok(fixedoffset.with_timezone(&Utc)) } fn timestamp_from_block(block: &Block) -> Result<DateTime<Utc>> { Self::parse_rfc3339(block.header.timestamp.as_str()) } fn load_retry_on_nonjson( &self, endpoint: &str, ) -> Result<(String, serde_json::Value)> { fn transient_err(e: anyhow::Error) -> Error<anyhow::Error> { if e.is::<curl::Error>() { let curl_err = e.downcast::<curl::Error>(); if curl_err.is_err() { let downcast_err = curl_err.err().unwrap(); error!("unexpected err on possibly transcient err downcast: {}", downcast_err); return Error::Permanent(downcast_err); } match curl_err.as_ref().ok().unwrap().code() { 7 | 28 => { warn!("transient node communication error, retrying.. err={:?}", curl_err); return Error::Transient(anyhow!("{:?}", curl_err)); } _ => {} }; let curl_err_val = curl_err.ok().unwrap(); return Error::Permanent(anyhow!( "{} {} (curl status code: {})", curl_err_val.description(), curl_err_val .extra_description() .map(|descr| format!("(verbose: {})", descr)) .unwrap_or_else(|| "".to_string()), curl_err_val.code(), )); } warn!("transient node communication error, retrying.. err={:?}", e); Error::Transient(e) } let op = || -> Result<(String, serde_json::Value)> { let body = self.load(endpoint)?; let mut deserializer = serde_json::Deserializer::from_str(&body); deserializer.disable_recursion_limit(); let deserializer = serde_stacker::Deserializer::new(&mut deserializer); let json = serde_json::Value::deserialize(deserializer)?; Ok((body, json)) }; retry(ExponentialBackoff::default(), || { op().map_err(transient_err) }) .map_err(|e| anyhow!(e)) } fn load(&self, endpoint: &str) -> Result<String> { let uri = format!("{}/chains/{}/{}", self.node_url, self.chain, endpoint); debug!("loading: {}", uri); let mut resp_data = Vec::new(); let mut handle = Easy::new(); handle .timeout(self.timeout) .with_context(|| { format!( "failed to set timeout to curl handle for uri='{}'", uri ) })?; handle.url(&uri).with_context(|| { format!("failed to call endpoint, uri='{}'", uri) })?; { let mut transfer = handle.transfer(); transfer.write_function(|new_data| { resp_data.extend_from_slice(new_data); Ok(new_data.len()) })?; transfer.perform().with_context(|| { format!("failed load response for uri='{}'", uri) })?; } let body = std::str::from_utf8(&resp_data).with_context(|| { format!("failed to parse response as utf8 for uri='{}'", uri) })?; Ok(body.to_string()) } } pub(crate) trait StorageGetter { fn get_contract_storage( &self, contract_id: &str, level: u32, ) -> Result<serde_json::Value>; fn get_bigmap_value( &self, level: u32, bigmap_id: i32, keyhash: &str, ) -> Result<Option<serde_json::Value>>; } impl StorageGetter for NodeClient { fn get_contract_storage( &self, contract_id: &str, level: u32, ) -> Result<serde_json::Value> { self.load_retry_on_nonjson(&format!( "blocks/{}/context/contracts/{}/storage", level, contract_id )) .map(|(_, json)| json) .with_context(|| { format!( "failed to get storage for contract='{}', level={}", contract_id, level ) }) } fn get_bigmap_value( &self, level: u32, bigmap_id: i32, keyhash: &str, ) -> Result<Option<serde_json::Value>> { let body = self.load(&format!( "blocks/{}/context/big_maps/{}/{}", level, bigmap_id, keyhash, )) .with_context(|| { format!( "failed to get value for bigmap (level={}, bigmap_id={}, keyhash={})", level, bigmap_id, keyhash, ) })?; Ok(serde_json::Value::from_str(&body).ok()) } }
use crate::octez::block::{Block, LevelMeta}; use anyhow::{anyhow, Context, Result}; use backoff::{retry, Error, ExponentialBackoff}; use chrono::{DateTime, Utc}; use curl::easy::Easy; use serde::Deserialize; use std::str::FromStr; use std::time::Duration; #[derive(Clone)] pub struct NodeClient { node_url: String, chain: String, timeout: Duration, } impl NodeClient { pub fn new(node_url: String, chain: String) -> Self { Self { node_url, chain, timeout: Duration::from_secs(20), } } pub(crate) fn head(&self) -> Result<LevelMeta> { let (meta, _) = self.level_json_internal("head")?; Ok(meta) } pub(crate) fn level_json(&self, level: u32) -> Result<(LevelMeta, Block)> { self.level_json_internal(&format!("{}", level)) }
pub(crate) fn get_contract_storage_definition( &self, contract_id: &str, level: Option<u32>, ) -> Result<serde_json::Value> { let level = match level { Some(x) => format!("{}", x), None => "head".to_string(), }; let (_, json) = self .load_retry_on_nonjson(&format!( "blocks/{}/context/contracts/{}/script", level, contract_id )) .with_context(|| { format!( "failed to get script data for contract='{}', level={}", contract_id, level ) })?; for entry in json["code"].as_array().ok_or_else(|| { anyhow!("malformed script response (missing 'code' field)") })? { if let Some(prim) = entry.as_object().ok_or_else(|| anyhow!("malformed script response ('code' array element is not an object)"))?.get("prim") { if prim == &serde_json::Value::String("storage".to_string()) { return Ok(entry["args"].as_array().ok_or_else(|| anyhow!("malformed script response ('storage' entry does not have 'args' field)"))?[0].clone()); } } else { return Err(anyhow!("malformed script response ('code' array element does not have a field 'prim')")); } } Err(anyhow!("malformed script response ('code' array does not have 'storage' entry)")) } fn parse_rfc3339(rfc3339: &str) -> Result<DateTime<Utc>> { let fixedoffset = chrono::DateTime::parse_from_rfc3339(rfc3339)?; Ok(fixedoffset.with_timezone(&Utc)) } fn timestamp_from_block(block: &Block) -> Result<DateTime<Utc>> { Self::parse_rfc3339(block.header.timestamp.as_str()) } fn load_retry_on_nonjson( &self, endpoint: &str, ) -> Result<(String, serde_json::Value)> { fn transient_err(e: anyhow::Error) -> Error<anyhow::Error> { if e.is::<curl::Error>() { let curl_err = e.downcast::<curl::Error>(); if curl_err.is_err() { let downcast_err = curl_err.err().unwrap(); error!("unexpected err on possibly transcient err downcast: {}", downcast_err); return Error::Permanent(downcast_err); } match curl_err.as_ref().ok().unwrap().code() { 7 | 28 => { warn!("transient node communication error, retrying.. err={:?}", curl_err); return Error::Transient(anyhow!("{:?}", curl_err)); } _ => {} }; let curl_err_val = curl_err.ok().unwrap(); return Error::Permanent(anyhow!( "{} {} (curl status code: {})", curl_err_val.description(), curl_err_val .extra_description() .map(|descr| format!("(verbose: {})", descr)) .unwrap_or_else(|| "".to_string()), curl_err_val.code(), )); } warn!("transient node communication error, retrying.. err={:?}", e); Error::Transient(e) } let op = || -> Result<(String, serde_json::Value)> { let body = self.load(endpoint)?; let mut deserializer = serde_json::Deserializer::from_str(&body); deserializer.disable_recursion_limit(); let deserializer = serde_stacker::Deserializer::new(&mut deserializer); let json = serde_json::Value::deserialize(deserializer)?; Ok((body, json)) }; retry(ExponentialBackoff::default(), || { op().map_err(transient_err) }) .map_err(|e| anyhow!(e)) } fn load(&self, endpoint: &str) -> Result<String> { let uri = format!("{}/chains/{}/{}", self.node_url, self.chain, endpoint); debug!("loading: {}", uri); let mut resp_data = Vec::new(); let mut handle = Easy::new(); handle .timeout(self.timeout) .with_context(|| { format!( "failed to set timeout to curl handle for uri='{}'", uri ) })?; handle.url(&uri).with_context(|| { format!("failed to call endpoint, uri='{}'", uri) })?; { let mut transfer = handle.transfer(); transfer.write_function(|new_data| { resp_data.extend_from_slice(new_data); Ok(new_data.len()) })?; transfer.perform().with_context(|| { format!("failed load response for uri='{}'", uri) })?; } let body = std::str::from_utf8(&resp_data).with_context(|| { format!("failed to parse response as utf8 for uri='{}'", uri) })?; Ok(body.to_string()) } } pub(crate) trait StorageGetter { fn get_contract_storage( &self, contract_id: &str, level: u32, ) -> Result<serde_json::Value>; fn get_bigmap_value( &self, level: u32, bigmap_id: i32, keyhash: &str, ) -> Result<Option<serde_json::Value>>; } impl StorageGetter for NodeClient { fn get_contract_storage( &self, contract_id: &str, level: u32, ) -> Result<serde_json::Value> { self.load_retry_on_nonjson(&format!( "blocks/{}/context/contracts/{}/storage", level, contract_id )) .map(|(_, json)| json) .with_context(|| { format!( "failed to get storage for contract='{}', level={}", contract_id, level ) }) } fn get_bigmap_value( &self, level: u32, bigmap_id: i32, keyhash: &str, ) -> Result<Option<serde_json::Value>> { let body = self.load(&format!( "blocks/{}/context/big_maps/{}/{}", level, bigmap_id, keyhash, )) .with_context(|| { format!( "failed to get value for bigmap (level={}, bigmap_id={}, keyhash={})", level, bigmap_id, keyhash, ) })?; Ok(serde_json::Value::from_str(&body).ok()) } }
fn level_json_internal(&self, level: &str) -> Result<(LevelMeta, Block)> { let (body, _) = self .load_retry_on_nonjson(&format!("blocks/{}", level)) .with_context(|| { format!("failed to get level_json for level={}", level) })?; let mut deserializer = serde_json::Deserializer::from_str(&body); deserializer.disable_recursion_limit(); let block: Block = Block::deserialize(&mut deserializer) .with_context(|| anyhow!("failed to deserialize block json"))?; let meta = LevelMeta { level: block.header.level as u32, hash: Some(block.hash.clone()), prev_hash: Some(block.header.predecessor.clone()), baked_at: Some(Self::timestamp_from_block(&block)?), }; Ok((meta, block)) }
function_block-full_function
[ { "content": "fn is_implicit_active(level: u32, contract_address: &str) -> bool {\n\n // liquidity baking has 2 implicit contract creation events in the block prior to Granada's activation block\n\n level == LIQUIDITY_BAKING_LEVEL\n\n && (contract_address == LIQUIDITY_BAKING\n\n || contract_address == LIQUIDITY_BAKING_TOKEN)\n\n}\n\n\n", "file_path": "src/octez/block.rs", "rank": 0, "score": 144920.46602961476 }, { "content": "// init config and return it also.\n\npub fn init_config() -> Result<Config> {\n\n let mut config: Config = Default::default();\n\n let matches = App::new(\"Tezos Contract Baby Indexer\")\n\n .version(QUEPASA_VERSION)\n\n .author(\"Rick Klomp <rick.klomp@tzconect.com>\")\n\n .about(\"An indexer for specific contracts\")\n\n .arg(\n\n Arg::with_name(\"contract_settings\")\n\n .short(\"c\")\n\n .long(\"contract-settings\")\n\n .value_name(\"CONTRACT_SETTINGS\")\n\n .env(\"CONTRACT_SETTINGS\")\n\n .help(\"path to the settings yaml (for contract settings)\")\n\n .takes_value(true)\n\n )\n\n .arg(\n\n Arg::with_name(\"contracts\")\n\n .long(\"contracts\")\n\n .value_name(\"CONTRACTS\")\n\n .help(\"set of additional contract settings (in syntax: <name>=<address>)\")\n", "file_path": "src/config.rs", "rank": 1, "score": 141787.3561807957 }, { "content": "fn decode_bs58_address(hex: &str) -> Result<String> {\n\n if hex.len() != 44 {\n\n return Err(anyhow!(\n\n \"44 length byte arrays only supported right now, got {} (which has len={})\",\n\n hex, hex.len()\n\n ));\n\n }\n\n let implicit = &hex[0..2] == \"00\";\n\n let kt = &hex[0..2] == \"01\";\n\n let _type = &hex[2..4];\n\n let rest = &hex[4..];\n\n let new_hex = if kt {\n\n format!(\"025a79{}\", &hex[2..42])\n\n } else if implicit {\n\n match _type {\n\n \"00\" => format!(\"06a19f{}\", rest),\n\n \"01\" => format!(\"06a1a1{}\", rest),\n\n \"02\" => format!(\"06a1a4{}\", rest),\n\n _ => return Err(anyhow!(\"Did not recognise byte array {}\", hex)),\n\n }\n\n } else {\n\n return Err(anyhow!(\"Unknown format {}\", hex));\n\n };\n\n let encoded = bs58::encode(hex::decode(new_hex.as_str())?)\n\n .with_check()\n\n .into_string();\n\n Ok(encoded)\n\n}\n\n\n", "file_path": "src/storage_value/parser.rs", "rank": 2, "score": 123350.86173023909 }, { "content": "fn schema_version(v: &str) -> String {\n\n match v {\n\n // The first versions of Que Pasa didn't follow the semantics of using\n\n // minor versioning for non-db schema related changes only\n\n \"1.0.0\" | \"1.0.1\" | \"1.0.2\" | \"1.0.3\" | \"1.0.4\" | \"1.0.5\" => {\n\n return v.to_string();\n\n }\n\n _ => {}\n\n };\n\n // Minor version bumps (_._.x) have same db schemas\n\n v.to_string()\n\n .rsplit_once(\".\")\n\n .map(|(db_ver, _)| db_ver.to_string())\n\n .unwrap_or_else(|| \"\".to_string())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 3, "score": 96797.66324208395 }, { "content": "fn init_denylist() -> HashMap<String, ()> {\n\n let mut m = HashMap::new();\n\n // Following contract is denylisted because:\n\n // type is: Pair (KeyHash, Map (String, Timestamp))\n\n // but values are of shape: [Elt]\n\n // see eg:\n\n // https://better-call.dev/mainnet/opg/opNPz4UwVgKvFkUeLDczz7yZhPYyj5VBnptqgQgfPj6Ux6yUzHa/contents\n\n m.insert(\"KT1FHAtLjG6S6tfjmrDeEySVLeP8a16T4Ngr\".to_string(), ());\n\n m\n\n}\n", "file_path": "src/contract_denylist.rs", "rank": 4, "score": 95908.4438602694 }, { "content": "// get range of args in the form 1,2,3 or 1-3. All ranges inclusive.\n\nfn range(arg: &str) -> Vec<u32> {\n\n let mut result = vec![];\n\n for h in arg.split(',') {\n\n let s = String::from(h);\n\n match s.find('-') {\n\n Some(_) => {\n\n let fromto: Vec<String> =\n\n s.split('-').map(String::from).collect();\n\n for i in fromto[0].parse::<u32>().unwrap()\n\n ..fromto[1].parse::<u32>().unwrap() + 1\n\n {\n\n result.push(i);\n\n }\n\n }\n\n None => {\n\n result.push(s.parse::<u32>().unwrap());\n\n }\n\n }\n\n }\n\n result.sort_unstable();\n\n result\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 5, "score": 90846.39122546477 }, { "content": "#[test]\n\nfn test_get_origination_operations_from_block() {\n\n use crate::octez::block::Block;\n\n let test_file =\n\n \"test/KT1U7Adyu5A7JWvEVSKjJEkG2He2SU1nATfq.level-132091.json\";\n\n let contract_id = \"KT1U7Adyu5A7JWvEVSKjJEkG2He2SU1nATfq\";\n\n let block: Block =\n\n serde_json::from_str(&debug::load_test(test_file)).unwrap();\n\n assert!(block.has_contract_origination(&contract_id));\n\n\n\n for level in vec![\n\n 132343, 123318, 123327, 123339, 128201, 132201, 132211, 132219, 132222,\n\n 132240, 132242, 132259, 132262, 132278, 132282, 132285, 132298, 132300,\n\n 132343, 132367, 132383, 132384, 132388, 132390, 135501, 138208, 149127,\n\n ] {\n\n let filename = format!(\n\n \"test/KT1U7Adyu5A7JWvEVSKjJEkG2He2SU1nATfq.level-{}.json\",\n\n level\n\n );\n\n println!(\"testing {}\", filename);\n\n let level_block: Block =\n\n serde_json::from_str(&debug::load_test(&filename)).unwrap();\n\n\n\n assert!(!level_block.has_contract_origination(&contract_id));\n\n }\n\n}\n\n\n", "file_path": "src/highlevel.rs", "rank": 6, "score": 85030.5248660236 }, { "content": "#[test]\n\nfn test_process_block() {\n\n // this tests the generated table structures against known good ones.\n\n // if it fails for a good reason, the output can be used to repopulate the\n\n // test files. To do this, execute script/generate_test_output.bash\n\n use crate::octez::block::Block;\n\n use crate::sql::insert;\n\n use crate::sql::insert::Insert;\n\n use crate::sql::table_builder::{TableBuilder, TableMap};\n\n use crate::storage_structure::relational::ASTBuilder;\n\n use crate::storage_structure::typing;\n\n use ron::ser::{to_string_pretty, PrettyConfig};\n\n use std::str::FromStr;\n\n\n\n env_logger::init();\n\n\n\n fn get_rel_ast_from_script_json(\n\n json: &serde_json::Value,\n\n ) -> Result<RelationalAST> {\n\n let storage_definition = json[\"code\"]\n\n .as_array()\n", "file_path": "src/storage_update/processor.rs", "rank": 7, "score": 85030.5248660236 }, { "content": "fn bigint(source: &str) -> Result<BigInt> {\n\n Ok(BigInt::from_str(source)?)\n\n}\n\n\n\npub(crate) fn parse_date(value: &Value) -> Result<insert::Value> {\n\n match value {\n\n Value::Int(s) => {\n\n let ts: i64 = s\n\n .to_i64()\n\n .ok_or_else(|| anyhow!(\"Num conversion failed\"))?;\n\n match Utc.timestamp_opt(ts, 0) {\n\n LocalResult::Single(t) => Ok(insert::Value::Timestamp(Some(t))),\n\n LocalResult::None => Ok(insert::Value::Timestamp(None)),\n\n LocalResult::Ambiguous(_, _) => {\n\n Err(anyhow!(\"Can't parse {:?}\", value))\n\n }\n\n }\n\n }\n\n Value::String(s) => {\n\n let fixedoffset = chrono::DateTime::parse_from_rfc3339(s.as_str())?;\n\n Ok(insert::Value::Timestamp(Some(\n\n fixedoffset.with_timezone(&Utc),\n\n )))\n\n }\n\n _ => Err(anyhow!(\"Can't parse {:?}\", value)),\n\n }\n\n}\n\n\n", "file_path": "src/storage_value/parser.rs", "rank": 8, "score": 83965.04781670679 }, { "content": "fn annotation(json: &serde_json::Value) -> Option<String> {\n\n match &json[\"annots\"][0] {\n\n serde_json::Value::String(s) => Some(s[1..].to_string()),\n\n serde_json::Value::Null => None,\n\n _ => panic!(\"unexpected annot type!: {:?}\", json[\"annots\"]),\n\n }\n\n}\n\n\n", "file_path": "src/storage_structure/typing.rs", "rank": 9, "score": 79202.29964870983 }, { "content": "fn parse_contract_settings_file(fpath: &str) -> Result<Vec<ContractID>> {\n\n let content = fs::read_to_string(fpath)?;\n\n #[derive(Serialize, Deserialize)]\n\n struct ParseType {\n\n contracts: Vec<ContractID>,\n\n }\n\n let res: ParseType = serde_yaml::from_str(&content)?;\n\n Ok(res.contracts)\n\n}\n", "file_path": "src/config.rs", "rank": 10, "score": 77440.89735206489 }, { "content": "fn is_contract(address: &str) -> bool {\n\n address.starts_with(\"KT1\") && !is_contract_denylisted(address)\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct Header {\n\n pub level: u32,\n\n pub predecessor: String,\n\n pub timestamp: String,\n\n\n\n #[serde(skip)]\n\n validation_pass: i64,\n\n #[serde(skip)]\n", "file_path": "src/octez/block.rs", "rank": 11, "score": 76644.52750213689 }, { "content": "fn ele_with_annot(ele: &Ele, annot: Option<String>) -> Ele {\n\n match &ele.name {\n\n Some(_) => ele.clone(),\n\n None => {\n\n let mut e = ele.clone();\n\n e.name = annot;\n\n e\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/storage_structure/relational.rs", "rank": 12, "score": 75093.67761691561 }, { "content": "fn ele_set_annot(ele: &Ele, annot: Option<String>) -> Ele {\n\n let mut e = ele.clone();\n\n e.name = annot;\n\n e\n\n}\n\n\n", "file_path": "src/storage_structure/relational.rs", "rank": 13, "score": 73405.7022863566 }, { "content": "CREATE UNIQUE INDEX levels_level ON levels(level);\n", "file_path": "sql/common-tables.sql", "rank": 14, "score": 62332.55362471375 }, { "content": "#[derive(Debug)]\n\nstruct Stats {\n\n counters: HashMap<String, (usize, u64)>,\n\n values: HashMap<String, String>,\n\n}\n\n\n\nimpl Stats {\n\n pub(crate) fn new() -> Self {\n\n Self {\n\n counters: HashMap::new(),\n\n values: HashMap::new(),\n\n }\n\n }\n\n\n\n pub(crate) fn add(&mut self, field: &str, n: usize) {\n\n let (c, total) = self\n\n .counters\n\n .get(field)\n\n .unwrap_or(&(0, 0));\n\n let c = c + n;\n\n let total = total + (n as u64);\n", "file_path": "src/stats.rs", "rank": 15, "score": 62197.6404572451 }, { "content": "fn main() {\n\n let orig_hook = panic::take_hook();\n\n panic::set_hook(Box::new(move |panic_info| {\n\n // invoke the default handler and exit the process\n\n orig_hook(panic_info);\n\n // wait for a bit to give time to the root error's thread to print\n\n // its error\n\n thread::sleep(std::time::Duration::from_millis(500));\n\n process::exit(1);\n\n }));\n\n\n\n let env = Env::default().filter_or(\"RUST_LOG\", \"info\");\n\n env_logger::init_from_env(env);\n\n\n\n let config = CONFIG.as_ref().unwrap();\n\n\n\n let node_cli =\n\n &node::NodeClient::new(config.node_url.clone(), \"main\".to_string());\n\n\n\n let mut dbcli = DBClient::connect(\n", "file_path": "src/main.rs", "rank": 16, "score": 61200.69487227521 }, { "content": "#[derive(Clone)]\n\nstruct MutexedState {\n\n #[allow(clippy::type_complexity)]\n\n contracts: Arc<Mutex<HashMap<ContractID, (RelationalAST, Option<u32>)>>>,\n\n level_floor: Arc<Mutex<u32>>,\n\n}\n\n\n\nimpl MutexedState {\n\n pub fn new() -> Self {\n\n Self {\n\n contracts: Arc::new(Mutex::new(HashMap::new())),\n\n level_floor: Arc::new(Mutex::new(0)),\n\n }\n\n }\n\n\n\n pub fn set_level_floor(&self) -> Result<()> {\n\n let contracts = self\n\n .contracts\n\n .lock()\n\n .map_err(|_| anyhow!(\"failed to lock contracts mutex\"))?;\n\n let mut level_floor = self\n", "file_path": "src/highlevel.rs", "rank": 17, "score": 60427.25087018346 }, { "content": "#[test]\n\nfn test_storage() {}\n", "file_path": "src/highlevel.rs", "rank": 18, "score": 59440.88797284564 }, { "content": "fn index_all_contracts(\n\n config: &config::Config,\n\n bcd_settings: &Option<(String, String)>,\n\n mut executor: highlevel::Executor,\n\n) {\n\n executor.index_all_contracts();\n\n if !config.levels.is_empty() {\n\n executor\n\n .exec_levels(\n\n config.getters_cap,\n\n config.workers_cap,\n\n config.levels.clone(),\n\n )\n\n .unwrap();\n\n #[cfg(feature = \"regression_force_update_derived\")]\n\n if true {\n\n info!(\"skipping re-populating of derived tables, always_update_derived enabled\");\n\n return;\n\n }\n\n executor\n", "file_path": "src/main.rs", "rank": 19, "score": 59440.88797284564 }, { "content": "#[test]\n\nfn test_generate() {\n\n use crate::sql::postgresql_generator::PostgresqlGenerator;\n\n use crate::storage_structure::relational::ASTBuilder;\n\n use crate::storage_structure::typing;\n\n\n\n use ron::ser::{to_string_pretty, PrettyConfig};\n\n use std::fs::File;\n\n use std::io::BufReader;\n\n use std::path::Path;\n\n use std::str::FromStr;\n\n\n\n let json = serde_json::Value::from_str(&debug::load_test(\n\n \"test/KT1U7Adyu5A7JWvEVSKjJEkG2He2SU1nATfq.script\",\n\n ))\n\n .unwrap();\n\n let storage_definition = &json[\"code\"][1][\"args\"][0];\n\n let type_ast =\n\n typing::storage_ast_from_json(&storage_definition.clone()).unwrap();\n\n println!(\"{:#?}\", type_ast);\n\n\n", "file_path": "src/highlevel.rs", "rank": 20, "score": 59440.88797284564 }, { "content": "struct ProcessedBatch {\n\n size: usize,\n\n\n\n pub levels: HashMap<i32, LevelMeta>,\n\n pub tx_contexts: Vec<TxContext>,\n\n pub txs: Vec<Tx>,\n\n pub bigmap_keyhashes: Vec<(TxContext, i32, String, String)>,\n\n\n\n pub contract_levels: Vec<(ContractID, i32, bool)>,\n\n pub contract_inserts: HashMap<ContractID, Vec<Insert>>,\n\n pub contract_deps: Vec<(i32, String, ContractID)>,\n\n pub contract_tx_contexts:\n\n HashMap<ContractID, (RelationalAST, Vec<TxContext>)>,\n\n\n\n max_id: i64,\n\n}\n\n\n\nimpl ProcessedBatch {\n\n pub fn new(max_id: i64) -> Self {\n\n Self {\n", "file_path": "src/sql/inserter.rs", "rank": 21, "score": 58820.26080646258 }, { "content": "fn insert_batch(\n\n dbcli: &mut DBClient,\n\n stats: Option<&StatsLogger>,\n\n update_derived_tables: bool,\n\n batch: &ProcessedBatch,\n\n) -> Result<()> {\n\n let mut db_tx = dbcli.transaction()?;\n\n\n\n DBClient::set_max_id(&mut db_tx, batch.get_max_id())?;\n\n DBClient::save_levels(\n\n &mut db_tx,\n\n &batch\n\n .levels\n\n .values()\n\n .collect::<Vec<&LevelMeta>>(),\n\n )?;\n\n DBClient::save_contract_deps(&mut db_tx, &batch.contract_deps)?;\n\n DBClient::save_contract_levels(&mut db_tx, &batch.contract_levels)?;\n\n\n\n DBClient::save_tx_contexts(&mut db_tx, &batch.tx_contexts)?;\n", "file_path": "src/sql/inserter.rs", "rank": 22, "score": 57843.50385913513 }, { "content": "#[test]\n\nfn test_decode() {\n\n let test_data = vec![\n\n (\n\n \"00006b82198cb179e8306c1bedd08f12dc863f328886\",\n\n \"tz1VSUr8wwNhLAzempoch5d6hLRiTh8Cjcjb\",\n\n ),\n\n (\n\n \"01d62a20fd2574884476f3da2f1a41bb8cc289f8cc00\",\n\n \"KT1U7Adyu5A7JWvEVSKjJEkG2He2SU1nATfq\",\n\n ),\n\n (\n\n // there may be a callback address specified after the address itself\n\n // (tz1..%someFunction), we want to grab the tz1 address\n\n \"016e4943f7a23ab9cbe56f48ff72f6c27e8956762400626f72726f775f63616c6c6261636b\",\n\n \"KT1JdufSdfg3WyxWJcCRNsBFV9V3x9TQBkJ2%borrow_callback\",\n\n ),\n\n ];\n\n for (from, to) in test_data {\n\n assert_eq!(to, decode_address(from).unwrap().as_str());\n\n }\n\n}\n", "file_path": "src/storage_value/parser.rs", "rank": 23, "score": 56387.04804510469 }, { "content": "#[test]\n\nfn test_normalizer() {\n\n fn tx_context(level: u32) -> TxContext {\n\n TxContext {\n\n id: None,\n\n level,\n\n operation_group_number: 0,\n\n operation_number: 0,\n\n content_number: 0,\n\n internal_number: None,\n\n contract: \"\".to_string(),\n\n }\n\n }\n\n fn op_update(bigmap: i32, ident: i32) -> Op {\n\n Op::Update {\n\n bigmap,\n\n keyhash: \"\".to_string(),\n\n key: serde_json::Value::String(format!(\"{}\", ident)),\n\n value: None,\n\n }\n\n }\n", "file_path": "src/storage_update/bigmap.rs", "rank": 24, "score": 56387.04804510469 }, { "content": "#[cfg(test)]\n\nstruct DummyStorageGetter {}\n\n#[cfg(test)]\n\nimpl crate::octez::node::StorageGetter for DummyStorageGetter {\n\n fn get_contract_storage(\n\n &self,\n\n _contract_id: &str,\n\n _level: u32,\n\n ) -> Result<serde_json::Value> {\n\n Err(anyhow!(\"dummy storage getter was not expected to be called in test_block tests\"))\n\n }\n\n\n\n fn get_bigmap_value(\n\n &self,\n\n _level: u32,\n\n _bigmap_id: i32,\n\n _keyhash: &str,\n\n ) -> Result<Option<serde_json::Value>> {\n\n Err(anyhow!(\"dummy storage getter was not expected to be called in test_block tests\"))\n\n }\n\n}\n\n\n", "file_path": "src/storage_update/processor.rs", "rank": 25, "score": 56013.638558449595 }, { "content": "#[cfg(test)]\n\nstruct DummyBigmapKeysGetter {}\n\n#[cfg(test)]\n\nimpl crate::sql::db::BigmapKeysGetter for DummyBigmapKeysGetter {\n\n fn get(\n\n &mut self,\n\n _level: u32,\n\n _bigmap_id: i32,\n\n ) -> Result<Vec<(String, String)>> {\n\n Err(anyhow!(\"dummy bigmap keys getter was not expected to be called in test_block tests\"))\n\n }\n\n}\n", "file_path": "src/storage_update/processor.rs", "rank": 26, "score": 54780.98109465827 }, { "content": "#[test]\n\nfn test_relational_ast_builder() {\n\n fn simple(n: Option<String>, t: SimpleExprTy) -> Ele {\n\n Ele {\n\n expr_type: ExprTy::SimpleExprTy(t),\n\n name: n,\n\n }\n\n }\n\n fn or(n: Option<String>, l: Ele, r: Ele) -> Ele {\n\n Ele {\n\n expr_type: ExprTy::ComplexExprTy(ComplexExprTy::OrEnumeration(\n\n Box::new(l),\n\n Box::new(r),\n\n )),\n\n name: n,\n\n }\n\n }\n\n fn pair(n: Option<String>, l: Ele, r: Ele) -> Ele {\n\n Ele {\n\n expr_type: ExprTy::ComplexExprTy(ComplexExprTy::Pair(\n\n Box::new(l),\n", "file_path": "src/storage_structure/relational.rs", "rank": 27, "score": 53828.36936133397 }, { "content": "#[test]\n\nfn test_process_storage_value() {\n\n use num::BigInt;\n\n\n\n fn numeric(i: i32) -> insert::Value {\n\n insert::Value::Numeric(PgNumeric::new(Some(BigDecimal::from(i))))\n\n }\n\n\n\n struct TestCase {\n\n name: String,\n\n rel_ast: RelationalAST,\n\n value: parser::Value,\n\n tx_context: TxContext,\n\n exp_inserts: Vec<Insert>,\n\n }\n\n let tests: Vec<TestCase> = vec![\n\n TestCase {\n\n name: \"basic string\".to_string(),\n\n rel_ast: RelationalAST::Leaf {\n\n rel_entry: RelationalEntry {\n\n table_name: \"storage\".to_string(),\n", "file_path": "src/storage_update/processor.rs", "rank": 28, "score": 53828.36936133397 }, { "content": "#[derive(Template)]\n\n#[template(path = \"repopulate-snapshot-derived.sql\", escape = \"none\")]\n\nstruct RepopulateSnapshotDerivedTmpl<'a> {\n\n contract_schema: &'a str,\n\n table: &'a str,\n\n columns: &'a [String],\n\n}\n", "file_path": "src/sql/db.rs", "rank": 29, "score": 52292.78502108219 }, { "content": "#[derive(Template)]\n\n#[template(path = \"update-snapshot-derived.sql\", escape = \"none\")]\n\nstruct UpdateSnapshotDerivedTmpl<'a> {\n\n contract_schema: &'a str,\n\n table: &'a str,\n\n columns: &'a [String],\n\n tx_context_ids: &'a [i64],\n\n}\n", "file_path": "src/sql/db.rs", "rank": 30, "score": 52292.78502108219 }, { "content": "#[derive(Template)]\n\n#[template(path = \"repopulate-changes-derived.sql\", escape = \"none\")]\n\nstruct RepopulateChangesDerivedTmpl<'a> {\n\n contract_schema: &'a str,\n\n table: &'a str,\n\n columns: &'a [String],\n\n indices: &'a [String],\n\n}\n", "file_path": "src/sql/db.rs", "rank": 31, "score": 52292.78502108219 }, { "content": "#[derive(Template)]\n\n#[template(path = \"update-changes-derived.sql\", escape = \"none\")]\n\nstruct UpdateChangesDerivedTmpl<'a> {\n\n contract_schema: &'a str,\n\n table: &'a str,\n\n columns: &'a [String],\n\n indices: &'a [String],\n\n tx_context_ids: &'a [i64],\n\n}\n\n\n\npub struct DBClient {\n\n dbconn: postgres::Client,\n\n\n\n url: String,\n\n ssl: bool,\n\n ca_cert: Option<String>,\n\n}\n\n\n\nimpl Clone for DBClient {\n\n fn clone(&self) -> Self {\n\n self.reconnect().unwrap()\n\n }\n", "file_path": "src/sql/db.rs", "rank": 32, "score": 52292.78502108219 }, { "content": "fn exit_with_err(msg: &str) {\n\n error!(\"{}\", msg);\n\n process::exit(1);\n\n}\n", "file_path": "src/main.rs", "rank": 33, "score": 50087.03307341122 }, { "content": "fn assert_contracts_ok(contracts: &[ContractID]) {\n\n if contracts.is_empty() {\n\n exit_with_err(\"zero contracts to index..\");\n\n }\n\n\n\n let mut names: HashMap<String, ()> = HashMap::new();\n\n for contract_id in contracts {\n\n if names.contains_key(&contract_id.name) {\n\n exit_with_err(format!(\"bad contract settings provided: name clash (multiple contracts assigned to name '{}'\", contract_id.name).as_str());\n\n }\n\n if is_contract_denylisted(&contract_id.address) {\n\n exit_with_err(format!(\"bad contract settings provided: denylisted contract cannot be indexed ({})\", contract_id.name).as_str());\n\n }\n\n names.insert(contract_id.name.clone(), ());\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 34, "score": 47528.3543896405 }, { "content": "fn prim(s: &str) -> Value {\n\n match s {\n\n \"False\" => Value::Bool(true),\n\n \"None\" => Value::None,\n\n _ => panic!(\"Don't know what to do with prim {}\", s),\n\n }\n\n}\n\n\n", "file_path": "src/storage_value/parser.rs", "rank": 35, "score": 46667.661123223064 }, { "content": "fn confirm_request(msg: &str) -> bool {\n\n // returns true if user confirmed, otherwise false.\n\n\n\n if CONFIG.as_ref().unwrap().always_yes {\n\n info!(\n\n \"{} -- skipping confirm request. running with always_yes enabled\",\n\n msg\n\n );\n\n return true;\n\n }\n\n\n\n loop {\n\n info!(\"{} [y]es or [n]o\", msg);\n\n let mut buf = String::new();\n\n std::io::stdin()\n\n .read_line(&mut buf)\n\n .unwrap();\n\n match buf.as_str().trim_end() {\n\n \"n\" | \"no\" => return false,\n\n \"y\" | \"yes\" => return true,\n\n _ => {}\n\n };\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 36, "score": 46667.661123223064 }, { "content": "fn assert_sane_db(dbcli: &mut DBClient) {\n\n let db_version = dbcli.get_quepasa_version().unwrap();\n\n if schema_version(&db_version)\n\n != schema_version(crate::config::QUEPASA_VERSION)\n\n {\n\n exit_with_err(\n\n format!(\n\n \"\n\nCannot target a database that was initialized with an incompatible quepasa version.\n\nThis database was initialized with Que Pasa {}, currently running Que Pasa {}.\n\nEither drop the old database namespace or keep it and target a different one.\",\n\n db_version,\n\n crate::config::QUEPASA_VERSION,\n\n )\n\n .as_str(),\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 37, "score": 44312.54956259523 }, { "content": "fn get_column_name(expr: &ExprTy) -> &str {\n\n match expr {\n\n ExprTy::ComplexExprTy(_) => \"\",\n\n ExprTy::SimpleExprTy(e) => match e {\n\n SimpleExprTy::Address => \"address\",\n\n SimpleExprTy::Bool => \"bool\",\n\n SimpleExprTy::Bytes => \"bytes\",\n\n SimpleExprTy::Int => \"int\",\n\n SimpleExprTy::Nat => \"nat\",\n\n SimpleExprTy::Mutez => \"mutez\",\n\n SimpleExprTy::String => \"string\",\n\n SimpleExprTy::KeyHash => \"keyhash\",\n\n SimpleExprTy::Signature => \"signature\",\n\n SimpleExprTy::Contract => \"contract\",\n\n SimpleExprTy::Timestamp => \"timestamp\",\n\n SimpleExprTy::Unit => \"unit\",\n\n SimpleExprTy::Stop => \"stop\",\n\n },\n\n }\n\n}\n", "file_path": "src/storage_structure/relational.rs", "rank": 38, "score": 42297.901704048636 }, { "content": "CREATE UNIQUE INDEX levels_hash ON levels(hash);\n\n\n", "file_path": "sql/common-tables.sql", "rank": 39, "score": 38954.97371158742 }, { "content": "fn lex(json: &serde_json::Value) -> serde_json::Value {\n\n if let serde_json::Value::Array(mut a) = json.clone() {\n\n if a.is_empty() {\n\n return json.clone();\n\n }\n\n a.reverse();\n\n lexer_unfold_many_pair(&mut a)\n\n } else {\n\n json.clone()\n\n }\n\n}\n\n\n\n/// Goes through the actual stored data and builds up a structure which can be used in combination with the node\n\n/// data to stash it in the database.\n\npub(crate) fn parse_lexed(json: &serde_json::Value) -> Result<Value> {\n\n if let serde_json::Value::Array(a) = json {\n\n return Ok(Value::List(\n\n a.iter()\n\n .map(|x| parse_lexed(x).unwrap())\n\n .collect(),\n", "file_path": "src/storage_value/parser.rs", "rank": 40, "score": 38374.23127194942 }, { "content": "pub struct LevelMeta {\n\n pub level: u32,\n\n pub hash: Option<String>,\n\n pub prev_hash: Option<String>,\n\n pub baked_at: Option<DateTime<Utc>>,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct Block {\n\n pub hash: String,\n\n pub header: Header,\n\n pub operations: Vec<Vec<Operation>>,\n\n\n", "file_path": "src/octez/block.rs", "rank": 41, "score": 36097.32201699839 }, { "content": "impl Block {\n\n pub(crate) fn operations(&self) -> Vec<Vec<Operation>> {\n\n self.operations.clone()\n\n }\n\n\n\n fn parse_option_i64(x: Option<&String>) -> anyhow::Result<Option<i64>> {\n\n let parsed = x.map_or(Ok(None), |s| s.parse::<i64>().map(Some))?;\n\n Ok(parsed)\n\n }\n\n\n\n pub(crate) fn map_tx_contexts<F, O>(\n\n &self,\n\n mut f: F,\n\n ) -> anyhow::Result<Vec<O>>\n\n where\n\n F: FnMut(\n\n TxContext,\n\n Tx,\n\n bool,\n\n &OperationResult,\n", "file_path": "src/octez/block.rs", "rank": 42, "score": 36095.15762661051 }, { "content": " #[serde(skip)]\n\n protocol: String,\n\n #[serde(skip)]\n\n chain_id: String,\n\n #[serde(skip)]\n\n metadata: Metadata,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub(crate) struct TxContext {\n\n pub id: Option<i64>,\n\n pub contract: String,\n\n pub level: u32,\n\n pub operation_group_number: usize,\n\n pub operation_number: usize,\n\n pub content_number: usize,\n\n pub internal_number: Option<i32>,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "src/octez/block.rs", "rank": 43, "score": 36092.65464214575 }, { "content": "pub(crate) struct Tx {\n\n pub tx_context_id: i64,\n\n\n\n pub operation_hash: String,\n\n pub source: Option<String>,\n\n pub destination: Option<String>,\n\n pub entrypoint: Option<String>,\n\n\n\n pub fee: Option<i64>,\n\n pub gas_limit: Option<i64>,\n\n pub storage_limit: Option<i64>,\n\n\n\n pub consumed_milligas: Option<i64>,\n\n pub storage_size: Option<i64>,\n\n pub paid_storage_size_diff: Option<i64>,\n\n}\n\n\n\nimpl Hash for TxContext {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.level.hash(state);\n", "file_path": "src/octez/block.rs", "rank": 44, "score": 36090.12357587795 }, { "content": " serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct Metadata {\n\n pub protocol: String,\n\n pub next_protocol: String,\n\n pub test_chain_status: TestChainStatus,\n\n pub max_operations_ttl: i64,\n\n pub max_operation_data_length: i64,\n\n pub max_block_header_length: i64,\n\n pub max_operation_list_length: Vec<MaxOperationListLength>,\n\n pub baker: String,\n\n pub level_info: LevelInfo,\n\n pub voting_period_info: VotingPeriodInfo,\n\n pub nonce_hash: ::serde_json::Value,\n\n pub consumed_gas: Option<String>,\n\n pub deactivated: Vec<::serde_json::Value>,\n\n pub balance_updates: Option<Vec<BalanceUpdate>>,\n\n}\n\n\n", "file_path": "src/octez/block.rs", "rank": 45, "score": 36090.11964247881 }, { "content": "use crate::itertools::Itertools;\n\nuse chrono::{DateTime, Utc};\n\nuse std::cmp::Ordering;\n\nuse std::hash::{Hash, Hasher};\n\n\n\nuse crate::contract_denylist::is_contract_denylisted;\n\n\n\nconst LIQUIDITY_BAKING_LEVEL: u32 = 1589247;\n\nconst LIQUIDITY_BAKING: &str = \"KT1TxqZ8QtKvLu3V3JH7Gx58n7Co8pgtpQU5\";\n\nconst LIQUIDITY_BAKING_TOKEN: &str = \"KT1AafHA1C1vk959wvHWBispY9Y2f3fxBUUo\";\n\n\n\npub(crate) fn get_implicit_origination_level(contract: &str) -> Option<u32> {\n\n if contract == LIQUIDITY_BAKING || contract == LIQUIDITY_BAKING_TOKEN {\n\n return Some(LIQUIDITY_BAKING_LEVEL);\n\n }\n\n None\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\n\n", "file_path": "src/octez/block.rs", "rank": 46, "score": 36088.739956297075 }, { "content": ")]\n\npub struct Operations {\n\n pub kind: String,\n\n pub level: i64,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct OperationMetadata {\n\n pub operation_result: Option<OperationResult>,\n\n #[serde(default)]\n\n pub internal_operation_results: Vec<InternalOperationResult>,\n\n\n\n #[serde(skip)]\n", "file_path": "src/octez/block.rs", "rank": 47, "score": 36088.179208950474 }, { "content": " pub(crate) fn active_contracts(&self) -> Vec<String> {\n\n let mut res: Vec<String> = self\n\n .map_tx_contexts(|tx_context, _tx, _is_origination, _op_res| {\n\n Ok(Some(tx_context.contract))\n\n })\n\n .unwrap();\n\n if self.header.level == LIQUIDITY_BAKING_LEVEL {\n\n res.push(LIQUIDITY_BAKING.to_string());\n\n res.push(LIQUIDITY_BAKING_TOKEN.to_string());\n\n }\n\n res.iter()\n\n .filter(|address| is_contract(address))\n\n .unique()\n\n .cloned()\n\n .collect()\n\n }\n\n}\n\n\n", "file_path": "src/octez/block.rs", "rank": 48, "score": 36087.7292981026 }, { "content": " Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct InternalOperationResult {\n\n pub kind: String,\n\n pub source: String,\n\n pub nonce: i64,\n\n pub amount: Option<String>,\n\n pub destination: Option<String>,\n\n pub parameters: Option<Parameters>,\n\n pub result: OperationResult,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n", "file_path": "src/octez/block.rs", "rank": 49, "score": 36086.84537184999 }, { "content": " .internal_operation_results\n\n .iter()\n\n .enumerate()\n\n {\n\n if internal_op.result.status != \"applied\" {\n\n continue;\n\n }\n\n if let Some(internal_dest_addr) =\n\n &internal_op.destination\n\n {\n\n if is_contract(internal_dest_addr) {\n\n let fres = f(\n\n TxContext {\n\n id: None,\n\n level: self.header.level,\n\n contract:\n\n internal_dest_addr\n\n .to_string(),\n\n operation_group_number,\n\n operation_number,\n", "file_path": "src/octez/block.rs", "rank": 50, "score": 36086.40636106215 }, { "content": " self.contract.hash(state);\n\n self.operation_group_number.hash(state);\n\n self.operation_number.hash(state);\n\n self.content_number.hash(state);\n\n self.internal_number.hash(state);\n\n }\n\n}\n\n\n\n// Manual impl PartialEq in order to exclude the <id> field\n\nimpl PartialEq for TxContext {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.level == other.level\n\n && self.contract == other.contract\n\n && self.operation_group_number == other.operation_group_number\n\n && self.operation_number == other.operation_number\n\n && self.content_number == other.content_number\n\n && self.internal_number == other.internal_number\n\n }\n\n}\n\nimpl PartialOrd for TxContext {\n", "file_path": "src/octez/block.rs", "rank": 51, "score": 36086.332323229086 }, { "content": " #[serde(skip)]\n\n chain_id: String,\n\n #[serde(skip)]\n\n branch: String,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct Content {\n\n pub slot: Option<i64>,\n\n pub metadata: OperationMetadata,\n\n pub destination: Option<String>,\n\n pub source: Option<String>,\n\n pub parameters: Option<Parameters>,\n", "file_path": "src/octez/block.rs", "rank": 52, "score": 36085.26693189906 }, { "content": " if res != Ordering::Equal {\n\n return Some(res);\n\n }\n\n let res = self\n\n .internal_number\n\n .cmp(&other.internal_number);\n\n if res != Ordering::Equal {\n\n return Some(res);\n\n }\n\n Some(Ordering::Equal)\n\n }\n\n}\n\n\n\nimpl Eq for TxContext {}\n\nimpl Ord for TxContext {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n self.partial_cmp(other).unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/octez/block.rs", "rank": 53, "score": 36084.68997240848 }, { "content": " .metadata\n\n .internal_operation_results\n\n {\n\n if result.destination == destination {\n\n return true;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n is_implicit_active(self.header.level, contract_address)\n\n }\n\n\n\n pub(crate) fn has_contract_origination(\n\n &self,\n\n contract_address: &str,\n\n ) -> bool {\n\n if self.header.level == 1589247\n\n && (contract_address == LIQUIDITY_BAKING\n", "file_path": "src/octez/block.rs", "rank": 54, "score": 36084.5610349709 }, { "content": " delegate: Option<String>,\n\n #[serde(skip)]\n\n balance_updates: Vec<BalanceUpdate>,\n\n #[serde(skip)]\n\n slots: Vec<i64>,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct OperationResult {\n\n #[serde(default)]\n\n pub originated_contracts: Vec<String>,\n\n pub status: String,\n\n pub storage: Option<::serde_json::Value>,\n", "file_path": "src/octez/block.rs", "rank": 55, "score": 36084.28657817593 }, { "content": " serde_derive::Deserialize,\n\n)]\n\npub struct Result {\n\n pub status: String,\n\n pub storage: Option<::serde_json::Value>,\n\n pub big_map_diff: Option<Vec<BigMapDiff>>,\n\n pub lazy_storage_diff: Option<Vec<LazyStorageDiff>>,\n\n\n\n #[serde(skip)]\n\n balance_updates: Option<Vec<BalanceUpdate>>,\n\n #[serde(skip)]\n\n consumed_gas: Option<String>,\n\n #[serde(skip)]\n\n consumed_milligas: Option<String>,\n\n #[serde(skip)]\n\n storage_size: Option<String>,\n\n #[serde(skip)]\n\n paid_storage_size_diff: Option<String>,\n\n}\n\n*/\n", "file_path": "src/octez/block.rs", "rank": 56, "score": 36084.06854182776 }, { "content": " Self::parse_option_i64(\n\n internal_op.result\n\n .paid_storage_size_diff\n\n .as_ref(),\n\n )?,\n\n },\n\n false,\n\n &internal_op.result,\n\n )?;\n\n if let Some(elem) = fres {\n\n res.push(elem);\n\n }\n\n }\n\n }\n\n\n\n for contract in\n\n &internal_op.result.originated_contracts\n\n {\n\n let fres = f(\n\n TxContext {\n", "file_path": "src/octez/block.rs", "rank": 57, "score": 36084.01476981588 }, { "content": " }\n\n }\n\n\n\n for contract in &operation_result.originated_contracts {\n\n let fres = f(\n\n TxContext {\n\n id: None,\n\n level: self.header.level,\n\n contract: contract.clone(),\n\n operation_group_number,\n\n operation_number,\n\n content_number,\n\n internal_number: None,\n\n },\n\n Tx {\n\n tx_context_id: -1,\n\n\n\n operation_hash: operation.hash.clone(),\n\n source: content.source.clone(),\n\n destination: Some(contract.clone()),\n", "file_path": "src/octez/block.rs", "rank": 58, "score": 36083.42377524003 }, { "content": "\n\n pub(crate) fn is_contract_active(&self, contract_address: &str) -> bool {\n\n if is_contract_denylisted(contract_address) {\n\n return false;\n\n }\n\n\n\n let destination = Some(contract_address.to_string());\n\n for operations in &self.operations {\n\n for operation in operations {\n\n for content in &operation.contents {\n\n if let Some(operation_result) =\n\n &content.metadata.operation_result\n\n {\n\n if operation_result.status != \"applied\" {\n\n continue;\n\n }\n\n if content.destination == destination {\n\n return true;\n\n }\n\n for result in &content\n", "file_path": "src/octez/block.rs", "rank": 59, "score": 36083.183587393156 }, { "content": "#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct TestChainStatus {\n\n pub status: String,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n", "file_path": "src/octez/block.rs", "rank": 60, "score": 36082.976834119254 }, { "content": ")]\n\npub struct Value {\n\n pub string: Option<String>,\n\n pub prim: Option<String>,\n\n #[serde(default)]\n\n pub bytes: Option<String>,\n\n pub args: Vec<::serde_json::Value>,\n\n pub int: Option<String>,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct KeyType {\n\n pub prim: Option<String>,\n", "file_path": "src/octez/block.rs", "rank": 61, "score": 36082.819470222734 }, { "content": " pub kind: String,\n\n #[serde(rename = \"start_position\")]\n\n pub start_position: i64,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct BalanceUpdate {\n\n pub kind: String,\n\n pub contract: Option<String>,\n\n pub change: String,\n\n pub origin: Option<String>,\n\n pub category: Option<String>,\n", "file_path": "src/octez/block.rs", "rank": 62, "score": 36082.75063111726 }, { "content": " serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct Arg {\n\n pub prim: Option<String>,\n\n pub bytes: Option<String>,\n\n pub int: Option<String>,\n\n #[serde(default)]\n\n pub args: Option<Vec<Arg>>,\n\n pub annots: Option<Vec<String>>,\n\n pub string: Option<String>,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n", "file_path": "src/octez/block.rs", "rank": 63, "score": 36082.65672848573 }, { "content": ")]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct LevelInfo {\n\n pub level: i64,\n\n #[serde(rename = \"level_position\")]\n\n pub level_position: i64,\n\n pub cycle: i64,\n\n #[serde(rename = \"cycle_position\")]\n\n pub cycle_position: i64,\n\n #[serde(rename = \"expected_commitment\")]\n\n pub expected_commitment: bool,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n", "file_path": "src/octez/block.rs", "rank": 64, "score": 36082.38131869472 }, { "content": "#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct Key {\n\n pub string: Option<String>,\n\n pub prim: Option<String>,\n\n pub args: Option<Vec<Arg>>,\n\n pub int: Option<String>,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n", "file_path": "src/octez/block.rs", "rank": 65, "score": 36082.36057147017 }, { "content": " pub delegate: Option<String>,\n\n pub cycle: Option<i64>,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct Operation {\n\n pub hash: String,\n\n pub contents: Vec<Content>,\n\n\n\n #[serde(skip)]\n\n protocol: String,\n\n #[serde(skip)]\n\n signature: Option<String>,\n", "file_path": "src/octez/block.rs", "rank": 66, "score": 36082.328213476365 }, { "content": " serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct LazyStorageDiff {\n\n pub kind: String,\n\n pub id: String,\n\n pub diff: Diff,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct Diff {\n\n pub action: String,\n\n pub updates: Option<Updates>,\n", "file_path": "src/octez/block.rs", "rank": 67, "score": 36082.22922434854 }, { "content": " Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct BigMapDiff {\n\n pub action: String,\n\n pub big_map: Option<String>,\n\n pub source_big_map: Option<String>,\n\n pub destination_big_map: Option<String>,\n\n pub key_hash: Option<String>,\n\n pub key: Option<serde_json::Value>,\n\n pub value: Option<serde_json::Value>,\n\n\n\n #[serde(skip)]\n\n key_type: Option<KeyType>,\n\n #[serde(skip)]\n\n value_type: Option<ValueType>,\n\n}\n\n\n", "file_path": "src/octez/block.rs", "rank": 68, "score": 36082.22582829051 }, { "content": " pub fee: Option<String>,\n\n pub gas_limit: Option<String>,\n\n pub storage_limit: Option<String>,\n\n\n\n #[serde(skip)]\n\n kind: String,\n\n #[serde(skip)]\n\n endorsement: Option<Endorsement>,\n\n #[serde(skip)]\n\n counter: Option<String>,\n\n #[serde(skip)]\n\n amount: Option<String>,\n\n #[serde(skip)]\n\n balance: Option<String>,\n\n #[serde(skip)]\n\n script: Option<Script>,\n\n}\n\n\n\n#[derive(\n\n Default,\n", "file_path": "src/octez/block.rs", "rank": 69, "score": 36082.040572763486 }, { "content": " pub big_map_diff: Option<Vec<BigMapDiff>>,\n\n pub lazy_storage_diff: Option<Vec<LazyStorageDiff>>,\n\n\n\n #[serde(default)]\n\n pub consumed_milligas: Option<String>,\n\n #[serde(default)]\n\n pub storage_size: Option<String>,\n\n #[serde(default)]\n\n pub paid_storage_size_diff: Option<String>,\n\n\n\n #[serde(skip)]\n\n balance_updates: Option<Vec<BalanceUpdate>>,\n\n #[serde(skip)]\n\n consumed_gas: Option<String>,\n\n // pub lazy_storage_diff: Option<Vec<LazyStorageDiff>>,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n", "file_path": "src/octez/block.rs", "rank": 70, "score": 36081.66403148214 }, { "content": " pub key_hash: Option<String>,\n\n pub key: Option<serde_json::Value>,\n\n pub value: Option<serde_json::Value>,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct ValueType2 {\n\n pub prim: Option<String>,\n\n pub args: Option<Vec<Arg>>,\n\n}\n\n\n\n#[derive(\n\n Default,\n", "file_path": "src/octez/block.rs", "rank": 71, "score": 36081.534635762735 }, { "content": " entrypoint: None,\n\n\n\n fee: Self::parse_option_i64(\n\n content.fee.as_ref(),\n\n )?,\n\n gas_limit: Self::parse_option_i64(\n\n content.gas_limit.as_ref(),\n\n )?,\n\n storage_limit: Self::parse_option_i64(\n\n content.storage_limit.as_ref(),\n\n )?,\n\n\n\n consumed_milligas: Self::parse_option_i64(\n\n operation_result\n\n .consumed_milligas\n\n .as_ref(),\n\n )?,\n\n storage_size: Self::parse_option_i64(\n\n operation_result.storage_size.as_ref(),\n\n )?,\n", "file_path": "src/octez/block.rs", "rank": 72, "score": 36081.491466172076 }, { "content": ")]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct MaxOperationListLength {\n\n #[serde(rename = \"max_size\")]\n\n pub max_size: i64,\n\n #[serde(rename = \"max_op\")]\n\n pub max_op: Option<i64>,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Level {\n\n pub level: i64,\n", "file_path": "src/octez/block.rs", "rank": 73, "score": 36081.43199460489 }, { "content": " Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Endorsement {\n\n pub branch: String,\n\n pub operations: Operations,\n\n pub signature: String,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n", "file_path": "src/octez/block.rs", "rank": 74, "score": 36081.41603643746 }, { "content": " #[serde(rename = \"level_position\")]\n\n pub level_position: i64,\n\n pub cycle: i64,\n\n #[serde(rename = \"cycle_position\")]\n\n pub cycle_position: i64,\n\n #[serde(rename = \"voting_period\")]\n\n pub voting_period: i64,\n\n #[serde(rename = \"voting_period_position\")]\n\n pub voting_period_position: i64,\n\n #[serde(rename = \"expected_commitment\")]\n\n pub expected_commitment: bool,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n", "file_path": "src/octez/block.rs", "rank": 75, "score": 36081.25511171515 }, { "content": " .parameters\n\n .clone()\n\n .map(|p| p.entrypoint),\n\n\n\n fee: None,\n\n gas_limit: None,\n\n storage_limit: None,\n\n\n\n consumed_milligas:\n\n Self::parse_option_i64(\n\n internal_op.result\n\n .consumed_milligas\n\n .as_ref(),\n\n )?,\n\n storage_size: Self::parse_option_i64(\n\n internal_op.result\n\n .storage_size\n\n .as_ref(),\n\n )?,\n\n paid_storage_size_diff:\n", "file_path": "src/octez/block.rs", "rank": 76, "score": 36080.88966932352 }, { "content": " serde_derive::Deserialize,\n\n)]\n\npub struct Script {\n\n pub code: Vec<Code>,\n\n pub storage: serde_json::Value,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct Code {\n\n pub prim: Option<String>,\n\n pub args: Option<Vec<::serde_json::Value>>,\n\n}\n", "file_path": "src/octez/block.rs", "rank": 77, "score": 36080.737120887585 }, { "content": " paid_storage_size_diff:\n\n Self::parse_option_i64(\n\n operation_result\n\n .paid_storage_size_diff\n\n .as_ref(),\n\n )?,\n\n },\n\n true,\n\n operation_result,\n\n )?;\n\n if let Some(elem) = fres {\n\n res.push(elem);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n Ok(res)\n\n }\n", "file_path": "src/octez/block.rs", "rank": 78, "score": 36080.73275951019 }, { "content": " operation_result\n\n .storage_size\n\n .as_ref(),\n\n )?,\n\n paid_storage_size_diff:\n\n Self::parse_option_i64(\n\n operation_result\n\n .paid_storage_size_diff\n\n .as_ref(),\n\n )?,\n\n },\n\n false,\n\n operation_result,\n\n )?;\n\n if let Some(elem) = fres {\n\n res.push(elem);\n\n }\n\n\n\n for (internal_number, internal_op) in content\n\n .metadata\n", "file_path": "src/octez/block.rs", "rank": 79, "score": 36080.66295023422 }, { "content": " operations_hash: String,\n\n #[serde(skip)]\n\n fitness: Vec<String>,\n\n #[serde(skip)]\n\n context: String,\n\n #[serde(skip)]\n\n priority: i64,\n\n #[serde(skip)]\n\n proof_of_work_nonce: String,\n\n #[serde(skip)]\n\n signature: String,\n\n #[serde(skip)]\n\n proto: i64,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n", "file_path": "src/octez/block.rs", "rank": 80, "score": 36080.63313198524 }, { "content": " .clone()\n\n .map(|p| p.entrypoint),\n\n\n\n fee: Self::parse_option_i64(\n\n content.fee.as_ref(),\n\n )?,\n\n gas_limit: Self::parse_option_i64(\n\n content.gas_limit.as_ref(),\n\n )?,\n\n storage_limit: Self::parse_option_i64(\n\n content.storage_limit.as_ref(),\n\n )?,\n\n\n\n consumed_milligas:\n\n Self::parse_option_i64(\n\n operation_result\n\n .consumed_milligas\n\n .as_ref(),\n\n )?,\n\n storage_size: Self::parse_option_i64(\n", "file_path": "src/octez/block.rs", "rank": 81, "score": 36080.62470926108 }, { "content": " let fres = f(\n\n TxContext {\n\n id: None,\n\n level: self.header.level,\n\n contract: dest_addr.clone(),\n\n operation_group_number,\n\n operation_number,\n\n content_number,\n\n internal_number: None,\n\n },\n\n Tx {\n\n tx_context_id: -1,\n\n\n\n operation_hash: operation.hash.clone(),\n\n source: content.source.clone(),\n\n destination: content\n\n .destination\n\n .clone(),\n\n entrypoint: content\n\n .parameters\n", "file_path": "src/octez/block.rs", "rank": 82, "score": 36080.59533550229 }, { "content": "}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct ValueType {\n\n pub prim: Option<String>,\n\n pub args: Option<Vec<Arg>>,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n", "file_path": "src/octez/block.rs", "rank": 83, "score": 36080.53723992436 }, { "content": " || contract_address == LIQUIDITY_BAKING_TOKEN)\n\n {\n\n return true;\n\n }\n\n\n\n self.contract_originations()\n\n .iter()\n\n .any(|c| c == contract_address)\n\n }\n\n\n\n fn contract_originations(&self) -> Vec<String> {\n\n self.map_tx_contexts(|tx_context, _tx, is_origination, _op_res| {\n\n if !is_origination {\n\n return Ok(None);\n\n }\n\n Ok(Some(tx_context.contract))\n\n })\n\n .unwrap()\n\n }\n\n\n", "file_path": "src/octez/block.rs", "rank": 84, "score": 36080.43115723074 }, { "content": "\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct Storage {\n\n pub prim: Option<String>,\n\n pub args: Vec<Vec<::serde_json::Value>>,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n", "file_path": "src/octez/block.rs", "rank": 85, "score": 36080.33262449158 }, { "content": " pub source: Option<String>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]\n\n#[serde(untagged)]\n\npub enum Updates {\n\n Updates(Vec<Update>),\n\n Update(Update),\n\n Unknown(serde_json::Value),\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct Update {\n", "file_path": "src/octez/block.rs", "rank": 86, "score": 36080.14011318399 }, { "content": " id: None,\n\n level: self.header.level,\n\n contract: contract.clone(),\n\n operation_group_number,\n\n operation_number,\n\n content_number,\n\n internal_number: Some(\n\n internal_number as i32,\n\n ),\n\n },\n\n Tx {\n\n tx_context_id: -1,\n\n\n\n operation_hash: operation\n\n .hash\n\n .clone(),\n\n source: Some(\n\n internal_op.source.clone(),\n\n ),\n\n destination: Some(\n", "file_path": "src/octez/block.rs", "rank": 87, "score": 36079.965610411666 }, { "content": " PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\npub struct Parameters {\n\n #[serde(default)]\n\n pub entrypoint: String,\n\n\n\n #[serde(skip)]\n\n value: Option<serde_json::Value>,\n\n}\n\n\n\n/*\n\n * TODO: probably unused. check\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n", "file_path": "src/octez/block.rs", "rank": 88, "score": 36079.80221208107 }, { "content": " fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n let res = self.level.cmp(&other.level);\n\n if res != Ordering::Equal {\n\n return Some(res);\n\n }\n\n let res = self\n\n .operation_group_number\n\n .cmp(&other.operation_group_number);\n\n if res != Ordering::Equal {\n\n return Some(res);\n\n }\n\n let res = self\n\n .operation_number\n\n .cmp(&other.operation_number);\n\n if res != Ordering::Equal {\n\n return Some(res);\n\n }\n\n let res = self\n\n .content_number\n\n .cmp(&other.content_number);\n", "file_path": "src/octez/block.rs", "rank": 89, "score": 36079.79385587999 }, { "content": ")]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct VotingPeriodInfo {\n\n #[serde(rename = \"voting_period\")]\n\n pub voting_period: VotingPeriod,\n\n pub position: i64,\n\n pub remaining: i64,\n\n}\n\n\n\n#[derive(\n\n Default,\n\n Debug,\n\n Clone,\n\n PartialEq,\n\n serde_derive::Serialize,\n\n serde_derive::Deserialize,\n\n)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct VotingPeriod {\n\n pub index: i64,\n", "file_path": "src/octez/block.rs", "rank": 90, "score": 36077.90835943767 }, { "content": " ) -> anyhow::Result<Option<O>>,\n\n {\n\n let mut res: Vec<O> = vec![];\n\n for (operation_group_number, operation_group) in\n\n self.operations().iter().enumerate()\n\n {\n\n for (operation_number, operation) in\n\n operation_group.iter().enumerate()\n\n {\n\n for (content_number, content) in\n\n operation.contents.iter().enumerate()\n\n {\n\n if let Some(operation_result) =\n\n &content.metadata.operation_result\n\n {\n\n if operation_result.status != \"applied\" {\n\n continue;\n\n }\n\n if let Some(dest_addr) = &content.destination {\n\n if is_contract(dest_addr) {\n", "file_path": "src/octez/block.rs", "rank": 91, "score": 36077.527667454 }, { "content": " content_number,\n\n internal_number: Some(\n\n internal_number as i32,\n\n ),\n\n },\n\n Tx {\n\n tx_context_id: -1,\n\n\n\n operation_hash: operation\n\n .hash\n\n .clone(),\n\n source: Some(\n\n internal_op\n\n .source\n\n .clone(),\n\n ),\n\n destination: internal_op\n\n .destination\n\n .clone(),\n\n entrypoint: internal_op\n", "file_path": "src/octez/block.rs", "rank": 92, "score": 36076.15783206293 }, { "content": " contract.clone(),\n\n ),\n\n entrypoint: None,\n\n\n\n fee: None,\n\n gas_limit: None,\n\n storage_limit: None,\n\n\n\n consumed_milligas: None,\n\n storage_size: None,\n\n paid_storage_size_diff: None,\n\n },\n\n true,\n\n &internal_op.result,\n\n )?;\n\n if let Some(elem) = fres {\n\n res.push(elem);\n\n }\n\n }\n\n }\n", "file_path": "src/octez/block.rs", "rank": 93, "score": 36076.10073321148 }, { "content": "DELETE FROM levels\n\nWHERE level IN ( {} )\n\n\",\n\n v_refs\n\n ))?;\n\n tx.query_raw(&stmt, values)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n pub(crate) fn save_contract_levels(\n\n tx: &mut Transaction,\n\n clvls: &[(ContractID, i32, bool)],\n\n ) -> Result<()> {\n\n for clvls_chunk in clvls.chunks(Self::INSERT_BATCH_SIZE) {\n\n let num_columns = 3;\n\n let v_refs = (1..(num_columns * clvls_chunk.len()) + 1)\n\n .map(|i| format!(\"${}\", i.to_string()))\n\n .collect::<Vec<String>>()\n\n .chunks(num_columns)\n", "file_path": "src/sql/db.rs", "rank": 94, "score": 34822.60434003758 }, { "content": "fn args(json: &serde_json::Value) -> Option<Vec<serde_json::Value>> {\n\n match &json[\"args\"] {\n\n serde_json::Value::Array(a) => Some(a.clone()),\n\n _ => None,\n\n }\n\n}\n\n\n\nmacro_rules! simple_expr {\n\n ($typ:expr, $name:expr) => {\n\n Ele {\n\n name: $name,\n\n expr_type: ExprTy::SimpleExprTy($typ),\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! complex_expr {\n\n ($typ:expr, $name:expr, $args:expr) => {{\n\n let args = $args.unwrap();\n\n Ele {\n", "file_path": "src/storage_structure/typing.rs", "rank": 95, "score": 34663.42170946633 }, { "content": "use crate::octez::block::{Block, LevelMeta};\n\nuse crate::octez::node;\n\nuse anyhow::{anyhow, Context, Result};\n\nuse std::thread;\n\n\n\n#[derive(Clone)]\n\npub struct ConcurrentBlockGetter {\n\n node_cli: node::NodeClient,\n\n workers: usize,\n\n}\n\n\n\nimpl ConcurrentBlockGetter {\n\n pub fn new(node_cli: node::NodeClient, workers: usize) -> Self {\n\n Self { node_cli, workers }\n\n }\n\n\n\n pub fn run(\n\n &self,\n\n recv_ch: flume::Receiver<u32>,\n\n send_ch: flume::Sender<Box<(LevelMeta, Block)>>,\n", "file_path": "src/octez/block_getter.rs", "rank": 96, "score": 34360.6825884944 }, { "content": " ) -> Vec<thread::JoinHandle<()>> {\n\n let mut threads = vec![];\n\n\n\n for _ in 0..self.workers {\n\n let w_node_cli = self.node_cli.clone();\n\n let w_recv_ch = recv_ch.clone();\n\n let w_send_ch = send_ch.clone();\n\n threads.push(thread::spawn(move || {\n\n Self::worker_fn(w_node_cli, w_recv_ch, w_send_ch).unwrap();\n\n }));\n\n }\n\n\n\n threads\n\n }\n\n\n\n fn worker_fn(\n\n node_cli: node::NodeClient,\n\n recv_ch: flume::Receiver<u32>,\n\n send_ch: flume::Sender<Box<(LevelMeta, Block)>>,\n\n ) -> Result<()> {\n", "file_path": "src/octez/block_getter.rs", "rank": 97, "score": 34346.3576143903 }, { "content": " for level_height in recv_ch {\n\n let (level, block) = node_cli\n\n .level_json(level_height)\n\n .with_context(|| {\n\n anyhow!(\"failed to get json for block {}\", level_height)\n\n })?;\n\n send_ch.send(Box::new((level, block)))?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/octez/block_getter.rs", "rank": 98, "score": 34340.962524923925 }, { "content": "DELETE FROM contract_levels\n\nWHERE level IN ( {} )\n\n\",\n\n v_refs\n\n ))?;\n\n tx.query_raw(&stmt, values)?;\n\n\n\n let values: Vec<&dyn postgres::types::ToSql> = lvls_chunk\n\n .iter()\n\n .map(|level| level.borrow_to_sql())\n\n .collect();\n\n let stmt = tx.prepare(&format!(\n\n \"\n", "file_path": "src/sql/db.rs", "rank": 99, "score": 33215.6142763167 } ]
Rust
benches/custom_benches.rs
IzumiRaine/libdeflater
3a90be5798fc9949e9a416efeaba4663ac7d4e25
extern crate flate2; extern crate libdeflater; use std::io::prelude::*; use std::fs; use std::fs::{File}; use std::path::Path; use criterion::{Criterion, black_box}; use flate2::{Compression, Compress, Decompress, FlushCompress, FlushDecompress}; use libdeflater::{Compressor, CompressionLvl, Decompressor}; struct Flate2Encoder { compress: Compress, } impl Flate2Encoder { fn new() -> Flate2Encoder { Flate2Encoder{ compress: Compress::new(Compression::default(), true) } } fn encode(&mut self, data: &[u8], out: &mut Vec<u8>) { self.compress.compress_vec(data, out, FlushCompress::Finish).unwrap(); self.compress.reset(); } } struct Flate2Decoder { decompress: Decompress, } impl Flate2Decoder { fn new() -> Flate2Decoder { Flate2Decoder { decompress: Decompress::new(true) } } fn decode(&mut self, compressed_data: &[u8], _decomp_sz: usize, out: &mut Vec<u8>) { self.decompress.decompress_vec(compressed_data, out, FlushDecompress::Finish).unwrap(); self.decompress.reset(true); } } struct LibdeflateEncoder { compressor: Compressor, } impl LibdeflateEncoder { fn new() -> LibdeflateEncoder { LibdeflateEncoder { compressor: Compressor::new(CompressionLvl::default()), } } fn encode(&mut self, data: &[u8], out: &mut Vec<u8>) { unsafe { out.set_len(self.compressor.zlib_compress_bound(data.len())); let actual = self.compressor.zlib_compress(data, out).unwrap(); out.set_len(actual); } } } struct LibdeflateDecoder { st: Decompressor, } impl LibdeflateDecoder { fn new() -> LibdeflateDecoder { LibdeflateDecoder { st: Decompressor::new() } } fn decode(&mut self, zlib_data: &[u8], decomp_sz: usize, out: &mut Vec<u8>) { unsafe { out.set_len(decomp_sz); let sz = self.st.zlib_decompress(zlib_data, out).unwrap(); out.set_len(sz); } } } pub fn run_custom_benches(b: &mut Criterion) { let (entries, biggest_entry) = { let mut biggest: u64 = 0; let mut entries = Vec::new(); let path = Path::new("bench_data"); for entry in fs::read_dir(path).unwrap() { let entry = entry.unwrap(); let pth = entry.path(); let sz = entry.metadata().unwrap().len(); let filename = pth.file_name().unwrap().to_str().unwrap(); if entry.metadata().unwrap().is_file() && !filename.contains("README.md") { entries.push(pth); biggest = if sz > biggest { sz } else { biggest } } } (entries, biggest as usize) }; let buf_big_enough_to_fit_data = biggest_entry + 100; let mut buf: Vec<u8> = Vec::with_capacity(buf_big_enough_to_fit_data); let mut flate2_encoder = Flate2Encoder::new(); let mut libdeflate_encoder = LibdeflateEncoder::new(); let mut flate2_decoder = Flate2Decoder::new(); let mut libdeflate_decoder = LibdeflateDecoder::new(); for pth in entries { let k = pth.file_name().unwrap().to_str().unwrap(); let mut grp = b.benchmark_group(k); grp.sample_size(20); let raw_data = { let mut file = File::open(&pth).unwrap(); let mut data = Vec::new(); file.read_to_end(&mut data).unwrap(); data }; grp.bench_function("flate2_encode", |b| b.iter(|| { buf.clear(); flate2_encoder.encode(black_box(&raw_data), black_box(&mut buf)); })); grp.bench_function("libdeflate_encode", |b| b.iter(|| { buf.clear(); libdeflate_encoder.encode(black_box(&raw_data), black_box(&mut buf)); })); let compressed_data = { let mut buf = Vec::with_capacity(buf_big_enough_to_fit_data); Flate2Encoder::new().encode(&raw_data, &mut buf); buf }; grp.bench_function("flate2_decode", |b| b.iter(|| { buf.clear(); flate2_decoder.decode(black_box(&compressed_data), black_box(raw_data.len()), black_box(&mut buf)); })); grp.bench_function("libdeflate_decode", |b| b.iter(|| { buf.clear(); libdeflate_decoder.decode(black_box(&compressed_data), black_box(raw_data.len()), black_box(&mut buf)); })); grp.finish(); } }
extern crate flate2; extern crate libdeflater; use std::io::prelude::*; use std::fs; use std::fs::{File}; use std::path::Path; use crit
inish(); } }
erion::{Criterion, black_box}; use flate2::{Compression, Compress, Decompress, FlushCompress, FlushDecompress}; use libdeflater::{Compressor, CompressionLvl, Decompressor}; struct Flate2Encoder { compress: Compress, } impl Flate2Encoder { fn new() -> Flate2Encoder { Flate2Encoder{ compress: Compress::new(Compression::default(), true) } } fn encode(&mut self, data: &[u8], out: &mut Vec<u8>) { self.compress.compress_vec(data, out, FlushCompress::Finish).unwrap(); self.compress.reset(); } } struct Flate2Decoder { decompress: Decompress, } impl Flate2Decoder { fn new() -> Flate2Decoder { Flate2Decoder { decompress: Decompress::new(true) } } fn decode(&mut self, compressed_data: &[u8], _decomp_sz: usize, out: &mut Vec<u8>) { self.decompress.decompress_vec(compressed_data, out, FlushDecompress::Finish).unwrap(); self.decompress.reset(true); } } struct LibdeflateEncoder { compressor: Compressor, } impl LibdeflateEncoder { fn new() -> LibdeflateEncoder { LibdeflateEncoder { compressor: Compressor::new(CompressionLvl::default()), } } fn encode(&mut self, data: &[u8], out: &mut Vec<u8>) { unsafe { out.set_len(self.compressor.zlib_compress_bound(data.len())); let actual = self.compressor.zlib_compress(data, out).unwrap(); out.set_len(actual); } } } struct LibdeflateDecoder { st: Decompressor, } impl LibdeflateDecoder { fn new() -> LibdeflateDecoder { LibdeflateDecoder { st: Decompressor::new() } } fn decode(&mut self, zlib_data: &[u8], decomp_sz: usize, out: &mut Vec<u8>) { unsafe { out.set_len(decomp_sz); let sz = self.st.zlib_decompress(zlib_data, out).unwrap(); out.set_len(sz); } } } pub fn run_custom_benches(b: &mut Criterion) { let (entries, biggest_entry) = { let mut biggest: u64 = 0; let mut entries = Vec::new(); let path = Path::new("bench_data"); for entry in fs::read_dir(path).unwrap() { let entry = entry.unwrap(); let pth = entry.path(); let sz = entry.metadata().unwrap().len(); let filename = pth.file_name().unwrap().to_str().unwrap(); if entry.metadata().unwrap().is_file() && !filename.contains("README.md") { entries.push(pth); biggest = if sz > biggest { sz } else { biggest } } } (entries, biggest as usize) }; let buf_big_enough_to_fit_data = biggest_entry + 100; let mut buf: Vec<u8> = Vec::with_capacity(buf_big_enough_to_fit_data); let mut flate2_encoder = Flate2Encoder::new(); let mut libdeflate_encoder = LibdeflateEncoder::new(); let mut flate2_decoder = Flate2Decoder::new(); let mut libdeflate_decoder = LibdeflateDecoder::new(); for pth in entries { let k = pth.file_name().unwrap().to_str().unwrap(); let mut grp = b.benchmark_group(k); grp.sample_size(20); let raw_data = { let mut file = File::open(&pth).unwrap(); let mut data = Vec::new(); file.read_to_end(&mut data).unwrap(); data }; grp.bench_function("flate2_encode", |b| b.iter(|| { buf.clear(); flate2_encoder.encode(black_box(&raw_data), black_box(&mut buf)); })); grp.bench_function("libdeflate_encode", |b| b.iter(|| { buf.clear(); libdeflate_encoder.encode(black_box(&raw_data), black_box(&mut buf)); })); let compressed_data = { let mut buf = Vec::with_capacity(buf_big_enough_to_fit_data); Flate2Encoder::new().encode(&raw_data, &mut buf); buf }; grp.bench_function("flate2_decode", |b| b.iter(|| { buf.clear(); flate2_decoder.decode(black_box(&compressed_data), black_box(raw_data.len()), black_box(&mut buf)); })); grp.bench_function("libdeflate_decode", |b| b.iter(|| { buf.clear(); libdeflate_decoder.decode(black_box(&compressed_data), black_box(raw_data.len()), black_box(&mut buf)); })); grp.f
random
[ { "content": "#[test]\n\nfn test_use_crc32_convenience_method_returns_same_crc32_as_flate2() {\n\n // This assumes that flate2's crc32 implementation returns a\n\n // correct value, which is a pretty safe assumption.\n\n\n\n let input_data = read_fixture_content();\n\n\n\n let flate2_crc32 = {\n\n let mut crc = flate2::Crc::new();\n\n crc.update(&input_data);\n\n crc.sum()\n\n };\n\n\n\n let libdeflate_crc32 = libdeflater::crc32(&input_data);\n\n\n\n assert_eq!(flate2_crc32, libdeflate_crc32);\n\n}\n", "file_path": "tests/integration_test.rs", "rank": 0, "score": 37722.327641832664 }, { "content": "#[test]\n\nfn test_use_crc32_reader_to_compute_crc32_of_fixture_returns_same_crc32_as_flate2() {\n\n // This assumes that flate2's crc32 implementation returns a\n\n // correct value, which is a pretty safe assumption.\n\n\n\n let input_data = read_fixture_content();\n\n\n\n let flate2_crc32 = {\n\n let mut crc = flate2::Crc::new();\n\n crc.update(&input_data);\n\n crc.sum()\n\n };\n\n\n\n let libdeflate_crc32 = {\n\n let mut crc = libdeflater::Crc::new();\n\n crc.update(&input_data);\n\n crc.sum()\n\n };\n\n\n\n assert_eq!(flate2_crc32, libdeflate_crc32);\n\n}\n\n\n", "file_path": "tests/integration_test.rs", "rank": 1, "score": 35115.96694970338 }, { "content": "# libdeflater\n\n\n\n[![Build Status](https://travis-ci.org/adamkewley/libdeflater.svg?branch=master)](https://travis-ci.org/adamkewley/libdeflater)\n\n[![Crates.io](https://img.shields.io/crates/v/libdeflater.svg?maxAge=2592000)](https://crates.io/crates/libdeflater)\n\n[![Documentation](https://docs.rs/libdeflater/badge.svg)](https://docs.rs/libdeflater)\n\n\n\nRust bindings to [libdeflate](https://github.com/ebiggers/libdeflate).\n\nA high-performance library for working with gzip/zlib/deflate data.\n\n\n\n```\n\nlibdeflater = \"0.2.0\"\n\n```\n\n\n\n**Warning**: libdeflate is for *specialized* use-cases. You should\n\n use something like [flate2](https://github.com/alexcrichton/flate2-rs)\n\n if you want a general-purpose deflate library.\n\n\n\nlibdeflate is optimal in applications that have all input data up and\n\nhave a mechanism for chunking large input datasets (e.g. genomic\n\n[bam](https://samtools.github.io/hts-specs/SAMv1.pdf) files, some\n\nobject stores, specialized backends, game netcode packets). It has a\n\nmuch simpler API than [zlib](https://www.zlib.net/manual.html) but\n\ncan't stream data.\n\n\n\n\n\n# Examples\n\n\n\nExample source [here](examples). To run the examples:\n\n\n\n```bash\n\ncargo run --example gz_compress.rs\n\ncargo run --example gz_decompress.rs\n\n```\n\n\n\n\n\n# Benchmarks\n\n\n\nBenchmark data is from both the [Calgary Corpus](https://en.wikipedia.org/wiki/Calgary_corpus), and the\n\n[Canterbury Corpus](http://corpus.canterbury.ac.nz/resources/cantrbry.zip). The\n\nbenchmark tables below were made with this set of steps:\n\n\n\n```bash\n\nwget http://www.data-compression.info/files/corpora/largecalgarycorpus.zip\n\nunzip -d bench_data largecalgarycorpus.zip\n\nwget http://corpus.canterbury.ac.nz/resources/cantrbry.zip\n\nunzip -d bench_data cantrbry.zip\n\n\n\n# runs benchmarks against all files in `bench_data`\n\ncargo bench\n\nscripts/process-bench.rb encode\n\nscripts/process-bench.rb decode\n\n```\n\n\n", "file_path": "README.md", "rank": 4, "score": 17494.51155092853 }, { "content": " read into memory before performing the comparison\n\n\n\n- Comparison made against `flate2` with no feature flags (i.e. `miniz`\n\n implementation). `flate2` was chosen because it's the most\n\n popular.\n\n\n\n- Comparisons with other `flate2` backends are available on the\n\n `bench-flate2-miniz-oxide` and `bench-flate2-zlib` branches. The\n\n `zlib` backend is ~8 % faster on some of the corpus entries.\n\n\n\n- Compression performed with default compression setting in both cases\n\n\n\n- Corpus entries were compressed with `flate2` at default compression\n\n level\n", "file_path": "README.md", "rank": 5, "score": 17493.643150674165 }, { "content": "### Compression\n\n\n\nAvg. speedup (on this corpus) is around 2-3x\n\n\n\n```\n\nbench size [KB] speedup flate2 [us] libdeflate [us]\n\nalice29.txt 152 3.2 9883 3132\n\nasyoulik.txt 125 3.0 7792 2572\n\nbib 111 2.8 5380 1907\n\nbook1 768 3.2 55512 17588\n\nbook2 610 2.9 33751 11719\n\ncp.html 24 2.0 571 280\n\nfields.c 11 2.1 229 108\n\ngeo 102 8.4 12142 1441\n\ngrammar.lsp 3 1.7 64 38\n\nkennedy.xls 1029 6.4 62130 9695\n\nlcet10.txt 426 3.1 23727 7733\n\nnews 377 2.7 15814 5898\n\nobj1 21 2.5 543 220\n\nobj2 246 3.7 12202 3336\n\npaper1 53 2.5 2149 865\n\npaper2 82 2.7 4410 1613\n\npaper3 46 2.5 2179 873\n\npaper4 13 2.0 341 174\n\npaper5 11 1.9 269 144\n\npaper6 38 2.3 1350 584\n\npic 513 2.6 11364 4426\n\nplrabn12.txt 481 3.4 38339 11266\n\nprogc 39 2.4 1401 590\n\nprogl 71 2.6 2301 897\n\nprogp 49 2.5 1322 531\n\nptt5 513 2.6 11379 4455\n\nsum 38 3.7 1606 435\n\ntrans 93 2.5 2403 958\n\nxargs.1 4 1.7 72 42\n\n```\n\n\n", "file_path": "README.md", "rank": 6, "score": 17491.571595333564 }, { "content": "### Decompression\n\n\n\nAvg. speedup (on this corpus) is around 2x.\n\n\n\n```\n\nbench size [KB] speedup flate2 [us] libdeflate [us]\n\nalice29.txt 152 2.5 601 244\n\nasyoulik.txt 125 2.2 519 234\n\nbib 111 2.5 413 165\n\nbook1 768 2.2 3204 1478\n\nbook2 610 2.4 2405 987\n\ncp.html 24 2.0 81 41\n\nfields.c 11 2.1 35 17\n\ngeo 102 1.8 553 315\n\ngrammar.lsp 3 1.8 16 9\n\nkennedy.xls 1029 2.1 2352 1105\n\nlcet10.txt 426 2.5 1505 607\n\nnews 377 2.1 1570 738\n\nobj1 21 1.9 94 50\n\nobj2 246 2.3 971 431\n\npaper1 53 2.3 205 90\n\npaper2 82 2.4 341 143\n\npaper3 46 2.1 192 90\n\npaper4 13 1.7 54 32\n\npaper5 11 1.9 52 28\n\npaper6 38 2.1 140 68\n\npic 513 3.0 1140 386\n\nplrabn12.txt 481 2.2 1973 880\n\nprogc 39 2.2 153 70\n\nprogl 71 2.5 214 86\n\nprogp 49 2.3 135 58\n\nptt5 513 3.0 1144 386\n\nsum 38 2.1 147 71\n\ntrans 93 2.4 246 104\n\nxargs.1 4 1.7 19 11\n\n```\n\n\n\n### Benchmark Notes\n\n\n\n- All benchmarks are single-threaded\n\n\n\n- IO/streaming overhead is not considered. The decompressed data is\n", "file_path": "README.md", "rank": 7, "score": 17491.38303942879 }, { "content": "# Benchmark Input Data\n\n\n\nAll files in this directory (apart from this `README.md` file) will be\n\nexercised against flate2 for performance comparisons.\n\n\n\nRunning `cargo bench` after putting data in here runs the\n\nsuite. There's a convenience ruby script at `scripts/*` that processes\n\nthe `Criterion` output data into a table.\n\n\n\nSee `benches/custom_benches.rs` for implementation details.\n", "file_path": "bench_data/README.md", "rank": 8, "score": 16361.416935438214 }, { "content": "extern crate libdeflater;\n\n\n\nuse std::vec::Vec;\n\nuse libdeflater::{Compressor, CompressionLvl};\n\n\n", "file_path": "examples/gz_compress.rs", "rank": 10, "score": 10.721690865120998 }, { "content": "extern crate libdeflater;\n\n\n\nuse std::vec::Vec;\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse std::str;\n\nuse libdeflater::Decompressor;\n\n\n", "file_path": "examples/gz_decompress.rs", "rank": 11, "score": 9.7424981417881 }, { "content": "extern crate libdeflater;\n\n\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse std::vec::Vec;\n\nuse libdeflater::{Compressor, CompressionLvl, CompressionError, Decompressor, DecompressionError, CompressionLvlError};\n\nuse flate2;\n\n\n\n\n\n\n\n// decompression\n\n\n\n#[test]\n", "file_path": "tests/integration_test.rs", "rank": 12, "score": 9.688382588327054 }, { "content": "extern crate cc;\n\n\n", "file_path": "build.rs", "rank": 13, "score": 6.270956308662411 }, { "content": "#[macro_use]\n\nextern crate criterion;\n\n\n\nmod custom_benches;\n\n\n\ncriterion_group!(benches, custom_benches::run_custom_benches);\n\ncriterion_main!(benches);\n", "file_path": "benches/benchmarks.rs", "rank": 14, "score": 5.684263079248794 }, { "content": "//! Rust bindings to [`libdeflate`], a DEFLATE-based buffer\n\n//! compression/decompression library that works with raw DEFLATE,\n\n//! zlib, and gzip data.\n\n//!\n\n//! **Warning**: Libdeflate is targeted at *specialized*\n\n//! performance-sensitive use-cases where developers have a good\n\n//! understanding of their input/output data. Developers looking for a\n\n//! general-purpose DEFLATE library should use something like\n\n//! [`flate2`], which can handle a much wider range of inputs (network\n\n//! streams, large files, etc.).\n\n//!\n\n//! [`libdeflate`]: https://github.com/ebiggers/libdeflate\n\n//! [`flate2`]: https://github.com/alexcrichton/flate2-rs\n\n//!\n\n//! # Decompression\n\n//!\n\n//! [`Decompressor::new`] can be used to construct a [`Decompressor`],\n\n//! which can decompress:\n\n//!\n\n//! - DEFLATE data ([`deflate_decompress`])\n", "file_path": "src/lib.rs", "rank": 15, "score": 3.493605303424034 }, { "content": " flate2_avg = JSON.parse(File.read(File.join(dir, \"flate2_#{suffix}\", \"new\", \"estimates.json\")))[\"Mean\"][\"point_estimate\"]\n\n libdeflate_avg = JSON.parse(File.read(File.join(dir, \"libdeflate_#{suffix}\", \"new\", \"estimates.json\")))[\"Mean\"][\"point_estimate\"]\n\n speedup = (flate2_avg.to_f()/libdeflate_avg.to_f()).round(1).to_s()\n\n\n\n result = {\n\n \"bench\" => group,\n\n \"size [KB]\" => (filesize / 1000).to_s(),\n\n \"flate2 [us]\" => (flate2_avg / 1000).round(0).to_s(),\n\n \"libdeflate [us]\" => (libdeflate_avg / 1000).round(0).to_s(),\n\n \"speedup\" => speedup,\n\n }\n\n\n\n results.push(result)\n\nend\n\n\n\nresults = results.sort_by { |results| results[\"bench\"] }\n\n\n\nheaders = cols.map do |col|\n\n { \"label\" => col, \"width\" => results.map { |result| result[col].size }.append(col.size).max + 2 }\n\nend\n\n\n\nputs headers.map { |header| header[\"label\"].ljust(header[\"width\"]) }.join(\" \")\n\n\n\nfor result in results do\n\n puts headers.map { |header| result[header[\"label\"]].ljust(header[\"width\"]) }.join(\" \")\n\nend\n", "file_path": "scripts/process-bench.rb", "rank": 17, "score": 1.9480371866310477 }, { "content": "/// Returns the CRC32 checksum of the bytes in `data`.\n\n///\n\n/// Note: this is a one-shot method that requires all data\n\n/// up-front. Developers wanting to compute a rolling crc32 from\n\n/// (e.g.) a stream should use [`Crc`](struct.Crc.html)\n\npub fn crc32(data: &[u8]) -> u32 {\n\n let mut crc = Crc::new();\n\n crc.update(&data);\n\n crc.sum()\n\n}\n", "file_path": "src/lib.rs", "rank": 21, "score": 1.7114835417231018 }, { "content": "//! [`zlib_compress_bound`]: struct.Compressor.html#method.zlib_compress_bound\n\n//! [`gzip_compress_bound`]: struct.Compressor.html#method.gzip_compress_bound\n\n\n\nmod libdeflate_sys;\n\n\n\nuse crate::libdeflate_sys::{libdeflate_decompressor,\n\n libdeflate_alloc_decompressor,\n\n libdeflate_free_decompressor,\n\n libdeflate_gzip_decompress,\n\n libdeflate_zlib_decompress,\n\n libdeflate_deflate_decompress,\n\n libdeflate_result,\n\n libdeflate_result_LIBDEFLATE_SUCCESS,\n\n libdeflate_result_LIBDEFLATE_BAD_DATA,\n\n libdeflate_result_LIBDEFLATE_INSUFFICIENT_SPACE,\n\n libdeflate_compressor,\n\n libdeflate_alloc_compressor,\n\n libdeflate_deflate_compress_bound,\n\n libdeflate_deflate_compress,\n\n libdeflate_zlib_compress_bound,\n", "file_path": "src/lib.rs", "rank": 22, "score": 1.6071371610176453 }, { "content": " libdeflate_result_LIBDEFLATE_INSUFFICIENT_SPACE => {\n\n Err(DecompressionError::InsufficientSpace)\n\n },\n\n _ => {\n\n panic!(\"libdeflate_deflate_decompress returned an unknown error type: this is an internal bug that **must** be fixed\");\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for Decompressor {\n\n fn drop(&mut self) {\n\n unsafe {\n\n libdeflate_free_decompressor(self.p);\n\n }\n\n }\n\n}\n\n\n\n/// Compression level used by a [`Compressor`](struct.Compressor.html)\n", "file_path": "src/lib.rs", "rank": 23, "score": 1.3257456088303239 }, { "content": " }\n\n\n\n #[test]\n\n fn can_use_compressor_to_compress_trivially_compressable_data() {\n\n unsafe {\n\n let in_data: [u8; 1<<16] = [0; 1<<16];\n\n let mut out_data: [u8; 1<<16] = [0; 1<<16];\n\n let compressor = libdeflate_alloc_compressor(MAX_COMP_LVL);\n\n let sz = libdeflate_deflate_compress(compressor,\n\n in_data.as_ptr() as *const core::ffi::c_void,\n\n in_data.len(),\n\n out_data.as_mut_ptr() as *mut core::ffi::c_void,\n\n out_data.len());\n\n assert_ne!(sz, 0);\n\n assert!(sz < 100);\n\n }\n\n }\n\n\n\n #[test]\n\n fn can_call_crc32() {\n", "file_path": "src/libdeflate_sys.rs", "rank": 24, "score": 1.0521319681047068 }, { "content": "}\n\n\n\n// Basic tests for Rust-to-C bindings. These tests are just for quick\n\n// internal checks to make sure that the bindgen build script built\n\n// something sane-looking. User-facing tests are in `tests/`\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const MIN_COMP_LVL: i32 = 1;\n\n const MAX_COMP_LVL: i32 = 12;\n\n\n\n #[test]\n\n fn can_make_decompressor_at_each_compression_lvl() {\n\n unsafe {\n\n for lvl in MIN_COMP_LVL..MAX_COMP_LVL+1 {\n\n let ptr = libdeflate_alloc_compressor(lvl);\n\n\n\n assert!(!ptr.is_null());\n\n\n", "file_path": "src/libdeflate_sys.rs", "rank": 25, "score": 0.9597004614560549 }, { "content": "#!/usr/bin/env ruby\n\n\n\n# This just processes the CSV data made by Criterion into a table\n\n# that's easier to read in the README\n\n\n\nrequire 'json'\n\n\n\nsuffix = ARGV[0].strip()\n\n\n\nif suffix != \"encode\" and suffix != \"decode\"\n\n STDERR.puts \"#{suffix}: argument must be either 'encode' or 'decode'\"\n\n exit 1\n\nend\n\n\n\nresults = []\n\ncols = [\"bench\", \"size [KB]\", \"speedup\", \"flate2 [us]\", \"libdeflate [us]\"]\n\n\n\nfor dir in Dir.glob(\"target/criterion/**\") do\n\n group = dir.split(\"/\")[-1].downcase()\n\n filesize = File.size(File.join(\"bench_data\", group))\n", "file_path": "scripts/process-bench.rb", "rank": 26, "score": 0.943534553908338 }, { "content": "#![allow(non_upper_case_globals)]\n\n#![allow(non_camel_case_types)]\n\n#![allow(non_snake_case)]\n\n#![allow(dead_code)]\n\n#![allow(bad_style)]\n\n\n\n#[repr(C)]\n\npub struct libdeflate_compressor { _unused : [ u8 ; 0 ] , }\n\n#[repr(C)]\n\npub struct libdeflate_decompressor { _unused : [ u8 ; 0 ] , }\n\npub const libdeflate_result_LIBDEFLATE_SUCCESS: libdeflate_result = 0;\n\npub const libdeflate_result_LIBDEFLATE_BAD_DATA: libdeflate_result = 1;\n\npub const libdeflate_result_LIBDEFLATE_SHORT_OUTPUT: libdeflate_result = 2;\n\npub const libdeflate_result_LIBDEFLATE_INSUFFICIENT_SPACE: libdeflate_result = 3;\n\npub type libdeflate_result = u32 ;\n\n\n\nextern \"C\" {\n\n pub fn libdeflate_alloc_decompressor() -> *mut libdeflate_decompressor;\n\n pub fn libdeflate_free_decompressor(decompressor: *mut libdeflate_decompressor);\n\n\n", "file_path": "src/libdeflate_sys.rs", "rank": 27, "score": 0.7193932036388495 }, { "content": "//! - zlib data ([`zlib_decompress`])\n\n//! - gzip data ([`gzip_decompress`])\n\n//!\n\n//! **Note**: `libdeflate` requires that the input *and* output\n\n//! buffers are pre-allocated before decompressing. Because of this,\n\n//! you will at least need to know the upper bound on how large the\n\n//! compressed data will decompress to; otherwise, a `decompress_*`\n\n//! function call will return `DecompressionError::InsufficientSpace`\n\n//!\n\n//! [`Decompressor::new`]: struct.Decompressor.html#method.new\n\n//! [`Decompressor`]: struct.Decompressor.html\n\n//! [`deflate_decompress`]: struct.Decompressor.html#method.deflate_decompress\n\n//! [`zlib_decompress`]: struct.Decompressor.html#method.zlib_decompress\n\n//! [`gzip_decompress`]: struct.Decompressor.html#method.gzip_decompress\n\n//! [`DecompressionError::InsufficientSpace`]: enum.DecompressionError.html\n\n//!\n\n//! # Compression\n\n//!\n\n//! `Compressor::new` can be used to construct a [`Compressor`], which\n\n//! can compress data into the following formats:\n", "file_path": "src/lib.rs", "rank": 28, "score": 0.7101493532174326 } ]
Rust
aoc13/src/main.rs
jadeaffenjaeger/rust_aoc19
4f3f4160377cbfdb76207ab942afc8306b201b2d
use intcomputer::*; use display::*; use std::env; use std::fs; use num_enum::TryFromPrimitive; use std::convert::TryFrom; const WIDTH: usize = 44; const HEIGHT: usize = 20; const SCALE: usize = 25; #[derive(Debug, Clone, Copy, PartialEq, TryFromPrimitive)] #[repr(i64)] enum Tile { Empty = 0, Wall = 1, Block = 2, Paddle = 3, Ball = 4, } struct Arcade<'a> { screen: [Tile; WIDTH * HEIGHT], display: &'a mut Display, computer: IntComputer, score: u32, } impl<'a> Arcade<'a> { pub fn new(program: Vec<i64>, display: &'a mut Display) -> Self { Self { display: display, screen: [Tile::Empty; WIDTH * HEIGHT], computer: IntComputer::new(program), score: 0, } } pub fn run(&mut self) { loop { self.computer.run(); match self.computer.state { ProgramState::Finished => break, ProgramState::Running => continue, ProgramState::WaitingForInput => { self.consume_output(); break; } } } } pub fn left(&mut self) { self.computer.input.push_back(-1); } pub fn right(&mut self) { self.computer.input.push_back(1); } pub fn neutral(&mut self) { self.computer.input.push_back(0); } pub fn consume_output(&mut self) { while self.computer.output.len() > 0 { let x = self.computer.output.pop_front().unwrap(); let y = self.computer.output.pop_front().unwrap(); if x == -1 && y == 0 { let score = self.computer.output.pop_front().unwrap() as u32; self.score = score; } else { let tile = Tile::try_from(self.computer.output.pop_front().unwrap()).unwrap(); self.screen[(x as usize) + (y as usize) * WIDTH] = tile; self.display.set_pixel(x as usize, y as usize, tile as u32); } } } } fn main() -> Result<(), String> { let args: Vec<String> = env::args().collect(); let filename = &args[1]; let contents = fs::read_to_string(filename).unwrap(); let mut display= Display::new(WIDTH, HEIGHT, SCALE, "Aoc Day 13"); let mut program: Vec<i64> = contents .trim() .split(',') .map(|x| x.parse().unwrap()) .collect(); let mut arcade = Arcade::new(program.clone(), &mut display); arcade.run(); arcade.consume_output(); let blocktiles = arcade.screen.iter().filter(|&&t| t == Tile::Block).count(); println!("Solution Part 1: {:?}", blocktiles); program[0] = 2; let mut arcade = Arcade::new(program, &mut display); loop { arcade.run(); if arcade.computer.state == ProgramState::Finished || arcade.display.update() == false { break; } let get_pos = |tiletype| { arcade .screen .iter() .enumerate() .filter(|(_, &t)| t == tiletype) .next() .unwrap() }; let ball_x = get_pos(Tile::Ball).0 % WIDTH; let paddle_x = get_pos(Tile::Paddle).0 % WIDTH; if ball_x < paddle_x { arcade.left() } if ball_x > paddle_x { arcade.right() } if ball_x == paddle_x { arcade.neutral() } } arcade.consume_output(); println!("Solution Part 2: {}", arcade.score); Ok(()) }
use intcomputer::*; use display::*; use std::env; use std::fs; use num_enum::TryFromPrimitive; use std::convert::TryFrom; const WIDTH: usize = 44; const HEIGHT: usize = 20; const SCALE: usize = 25; #[derive(Debug, Clone, Copy, PartialEq, TryFromPrimitive)] #[repr(i64)] enum Tile { Empty = 0, Wall = 1, Block = 2, Paddle = 3, Ball = 4, } struct Arcade<'a> { screen: [Tile; WIDTH * HEIGHT], display: &'a mut Display, computer: IntComputer, score: u32, } impl<'a> Arcade<'a> { pub fn new(program: Vec<i64>, display: &'a mut Display) -> Self { Self { display: display, screen: [Tile::Empty; WIDTH * HEIGHT], computer: IntComputer::new(program), score: 0, } } pub fn run(&mut self) { loo
pub fn left(&mut self) { self.computer.input.push_back(-1); } pub fn right(&mut self) { self.computer.input.push_back(1); } pub fn neutral(&mut self) { self.computer.input.push_back(0); } pub fn consume_output(&mut self) { while self.computer.output.len() > 0 { let x = self.computer.output.pop_front().unwrap(); let y = self.computer.output.pop_front().unwrap(); if x == -1 && y == 0 { let score = self.computer.output.pop_front().unwrap() as u32; self.score = score; } else { let tile = Tile::try_from(self.computer.output.pop_front().unwrap()).unwrap(); self.screen[(x as usize) + (y as usize) * WIDTH] = tile; self.display.set_pixel(x as usize, y as usize, tile as u32); } } } } fn main() -> Result<(), String> { let args: Vec<String> = env::args().collect(); let filename = &args[1]; let contents = fs::read_to_string(filename).unwrap(); let mut display= Display::new(WIDTH, HEIGHT, SCALE, "Aoc Day 13"); let mut program: Vec<i64> = contents .trim() .split(',') .map(|x| x.parse().unwrap()) .collect(); let mut arcade = Arcade::new(program.clone(), &mut display); arcade.run(); arcade.consume_output(); let blocktiles = arcade.screen.iter().filter(|&&t| t == Tile::Block).count(); println!("Solution Part 1: {:?}", blocktiles); program[0] = 2; let mut arcade = Arcade::new(program, &mut display); loop { arcade.run(); if arcade.computer.state == ProgramState::Finished || arcade.display.update() == false { break; } let get_pos = |tiletype| { arcade .screen .iter() .enumerate() .filter(|(_, &t)| t == tiletype) .next() .unwrap() }; let ball_x = get_pos(Tile::Ball).0 % WIDTH; let paddle_x = get_pos(Tile::Paddle).0 % WIDTH; if ball_x < paddle_x { arcade.left() } if ball_x > paddle_x { arcade.right() } if ball_x == paddle_x { arcade.neutral() } } arcade.consume_output(); println!("Solution Part 2: {}", arcade.score); Ok(()) }
p { self.computer.run(); match self.computer.state { ProgramState::Finished => break, ProgramState::Running => continue, ProgramState::WaitingForInput => { self.consume_output(); break; } } } }
function_block-function_prefixed
[ { "content": "fn combine_layers(layer: &[u32], image: &mut [u32]) {\n\n let combine_pixels = |top, bottom| match top {\n\n 2 => bottom,\n\n _ => top,\n\n };\n\n\n\n for (p1, p2) in image.iter_mut().zip(layer.iter()) {\n\n *p1 = combine_pixels(*p1, *p2);\n\n }\n\n}\n\n\n", "file_path": "aoc08/src/main.rs", "rank": 0, "score": 129147.95653226392 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\nenum Tile {\n\n Wall,\n\n Empty,\n\n Oxygen,\n\n}\n\n\n", "file_path": "aoc15/src/main.rs", "rank": 2, "score": 108510.02115843979 }, { "content": "fn count_digits(layer: &[u32], digit: u32) -> u32 {\n\n layer.iter().filter(|&&x| x == digit).count() as u32\n\n}\n\n\n", "file_path": "aoc08/src/main.rs", "rank": 3, "score": 97724.3589991931 }, { "content": "fn produce<'a>(node: &'a str, qty: u32, graph: &DiGraphMap<&'a str, Reaction>, inventory: &mut HashMap<&'a str, u32>) -> u32 {\n\n if node == \"ORE\" {\n\n return qty\n\n }\n\n\n\n let neighbors: Vec<_> = graph.neighbors_directed(node, petgraph::Incoming).collect();\n\n let qty_out = graph[(neighbors[0], node)].qty_out;\n\n\n\n let mut total_produced = match inventory.remove(node) {\n\n Some(n) => n,\n\n None => 0};\n\n let mut total_ore = 0;\n\n\n\n while total_produced < qty {\n\n for n in &neighbors {\n\n let r = graph[(*n, node)].qty_in;\n\n total_ore += produce(n, r, graph, inventory);\n\n }\n\n total_produced += qty_out\n\n }\n\n if total_produced > qty {\n\n inventory.insert(node, total_produced - qty);\n\n }\n\n total_ore\n\n}\n\n\n", "file_path": "aoc14/src/main.rs", "rank": 4, "score": 96574.33538550494 }, { "content": "fn phases(input: &mut Vec<i32>, num: usize) {\n\n for _ in 0..num {\n\n *input = fft(&input);\n\n }\n\n}\n\n\n", "file_path": "aoc16/src/main.rs", "rank": 5, "score": 94074.65304858418 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum Instruction {\n\n Add,\n\n Multiply,\n\n Input,\n\n Output,\n\n Halt,\n\n JumpT,\n\n JumpF,\n\n Less,\n\n Equal,\n\n AdjBase,\n\n}\n\n\n", "file_path": "intcomputer/src/lib.rs", "rank": 6, "score": 81498.46774543694 }, { "content": "#[derive(Debug)]\n\nstruct Pin(usize, usize, usize, usize, usize, usize);\n\n\n\nimpl Pin {\n\n fn new(num: usize) -> Pin {\n\n let d0 = num / 100000;\n\n let d1 = (num % 100000) / 10000;\n\n let d2 = (num % 10000) / 1000;\n\n let d3 = (num % 1000) / 100;\n\n let d4 = (num % 100) / 10;\n\n let d5 = num % 10;\n\n Pin(d0, d1, d2, d3, d4, d5)\n\n }\n\n\n\n fn adjacent(&self) -> bool {\n\n self.0 == self.1\n\n || self.1 == self.2\n\n || self.2 == self.3\n\n || self.3 == self.4\n\n || self.4 == self.5\n\n }\n", "file_path": "aoc04/src/main.rs", "rank": 7, "score": 79525.07719579712 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum ParameterMode {\n\n Position,\n\n Immediate,\n\n Relative,\n\n Invalid,\n\n}\n\n\n\nimpl IntComputer {\n\n pub fn new(program: Vec<i64>) -> IntComputer {\n\n IntComputer {\n\n program: program,\n\n pc: 0,\n\n base: 0,\n\n input: VecDeque::new(),\n\n output: VecDeque::new(),\n\n state: ProgramState::Running,\n\n }\n\n }\n\n\n\n // Run program until it halts\n", "file_path": "intcomputer/src/lib.rs", "rank": 8, "score": 79161.71336976138 }, { "content": "#[derive(Debug)]\n\nstruct OpCode {\n\n instr: Instruction,\n\n param_mode: (ParameterMode, ParameterMode, ParameterMode),\n\n}\n\n\n", "file_path": "intcomputer/src/lib.rs", "rank": 9, "score": 78746.65587212666 }, { "content": "fn update_bodies(bodies: &mut Vec<Body>) {\n\n for i in 1..bodies.len() {\n\n let (left, right) = bodies.split_at_mut(i);\n\n for b2 in left {\n\n right[0].interact(&b2);\n\n b2.interact(&right[0]);\n\n }\n\n }\n\n\n\n for b in bodies {\n\n b.update_position();\n\n }\n\n}\n\n\n", "file_path": "aoc12/src/main.rs", "rank": 10, "score": 77247.16402025253 }, { "content": "// Recursively count orbits in tree by calculating the distance from the root\n\nfn count_orbits(map: &HashMap<&str, Vec<&str>>, body: &str, depth: u32) -> u32 {\n\n if let Some(bodies) = map.get(body) {\n\n // Tree Node: Result is the cumulated result of all children plus ours\n\n bodies\n\n .iter()\n\n .fold(depth, |acc, &b| acc + count_orbits(map, b, depth + 1))\n\n } else {\n\n // Leaf: Result is the distance from the root (aka traversal depth)\n\n depth\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const INPUT: &str = \"COM)B\n\n B)C\n\n C)D\n\n D)E\n", "file_path": "aoc06/src/main.rs", "rank": 11, "score": 73962.53893479475 }, { "content": "// Retrieve number of unique angles (aka visible stars) from a given grid location\n\nfn count_visible(stars: &Vec<(f32, f32)>, s: &(f32, f32)) -> u32 {\n\n let mut angles: Vec<_> = get_angles_dists(stars, s).iter().map(|p| p.0).collect();\n\n angles.sort_by(|a, b| a.partial_cmp(b).unwrap());\n\n angles.dedup_by(|a, b| approx_eq!(f32, *a, *b));\n\n angles.len() as u32\n\n}\n\n\n", "file_path": "aoc10/src/main.rs", "rank": 12, "score": 65728.28550649932 }, { "content": "fn generate_pattern(len: usize, pos: usize) -> Vec<i32> {\n\n let zeroes = iter::repeat(0).take(pos);\n\n let pos_ones = iter::repeat(1).take(pos);\n\n let neg_ones = iter::repeat(-1).take(pos);\n\n\n\n zeroes\n\n .clone()\n\n .chain(pos_ones)\n\n .chain(zeroes)\n\n .chain(neg_ones)\n\n .cycle()\n\n .skip(1)\n\n .take(len)\n\n .collect()\n\n}\n\n\n", "file_path": "aoc16/src/main.rs", "rank": 13, "score": 62392.06570466406 }, { "content": "// fn read_reaction(&str line) -> (&str, &str, Reaction) {\n\nfn read_reaction<'a>(line: &'a str, graph: &mut DiGraphMap<&'a str, Reaction>) {\n\n let parts: Vec<&str> = line.split(' ').filter(|x| *x != \"=>\").collect();\n\n\n\n let edges: Vec<u32> = parts\n\n .iter()\n\n .step_by(2)\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n let nodes: Vec<_> = parts.iter().skip(1).step_by(2).collect();\n\n\n\n let output = graph.add_node(nodes[nodes.len() - 1]);\n\n\n\n for i in 0..nodes.len() - 1 {\n\n let input = graph.add_node(nodes[i]);\n\n let r = Reaction {\n\n qty_in: edges[i],\n\n qty_out: edges[edges.len() - 1],\n\n };\n\n graph.add_edge(input, output, r);\n\n }\n\n}\n\n\n", "file_path": "aoc14/src/main.rs", "rank": 14, "score": 60757.20437005949 }, { "content": "#[derive(Clone)]\n\nenum Color {\n\n Black = 0,\n\n White = 1,\n\n}\n\n\n", "file_path": "aoc11/src/main.rs", "rank": 15, "score": 53791.20859836083 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\nenum Direction {\n\n West,\n\n East,\n\n North,\n\n South,\n\n}\n\n\n", "file_path": "aoc15/src/main.rs", "rank": 16, "score": 53787.93421813599 }, { "content": "enum Direction {\n\n Up,\n\n Down,\n\n Left,\n\n Right,\n\n}\n\n\n", "file_path": "aoc11/src/main.rs", "rank": 17, "score": 53787.93421813599 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct Reaction {\n\n qty_in: u32,\n\n qty_out: u32,\n\n}\n\n\n", "file_path": "aoc14/src/main.rs", "rank": 18, "score": 53373.91518989722 }, { "content": "#[derive(Debug, PartialEq, Clone, Hash)]\n\nstruct Body {\n\n position: Vec3,\n\n velocity: Vec3,\n\n}\n\n\n\nimpl Eq for Body {}\n\n\n\nimpl Body {\n\n pub fn new(position: &str) -> Body {\n\n let position: Vec<_> = position\n\n .trim_start_matches('<')\n\n .trim_end_matches('>')\n\n .split(',')\n\n .collect();\n\n\n\n let parse_coord = |coord: &str| coord.split('=').nth(1).unwrap().parse::<i64>().unwrap();\n\n\n\n Body {\n\n position: Vec3 {\n\n x: parse_coord(position[0]),\n", "file_path": "aoc12/src/main.rs", "rank": 19, "score": 53370.619911555455 }, { "content": "#[derive(Debug, PartialEq, Hash, Clone)]\n\nstruct Position {\n\n x: i64,\n\n y: i64,\n\n}\n\n\n\nimpl Eq for Position {}\n\n\n", "file_path": "aoc11/src/main.rs", "rank": 20, "score": 53370.619911555455 }, { "content": "#[derive(Debug, PartialEq, Clone, Hash)]\n\nstruct Vec3 {\n\n x: i64,\n\n y: i64,\n\n z: i64,\n\n}\n\n\n\nimpl Vec3 {\n\n pub fn energy(&self) -> i64 {\n\n self.x.abs() + self.y.abs() + self.z.abs()\n\n }\n\n}\n\n\n\nimpl Eq for Vec3 {}\n\n\n", "file_path": "aoc12/src/main.rs", "rank": 21, "score": 53370.619911555455 }, { "content": "struct Grid {\n\n points: HashSet<(i32, i32)>,\n\n pt_queue: Vec<(i32, i32, i32)>,\n\n head: (i32, i32),\n\n steps: i32,\n\n}\n\n\n\nimpl Grid {\n\n fn new(raw_tokens: &str) -> Grid {\n\n let tokens: Vec<&str> = raw_tokens.split(',').collect();\n\n\n\n let mut capacity: usize = 0;\n\n for token in &tokens {\n\n let amt: usize = token[1..token.len()].parse().unwrap();\n\n capacity += amt;\n\n }\n\n\n\n let mut g = Grid {\n\n points: HashSet::with_capacity(capacity),\n\n pt_queue: Vec::with_capacity(capacity),\n", "file_path": "aoc03/src/main.rs", "rank": 22, "score": 53367.745631595055 }, { "content": "#[derive(Debug)]\n\nstruct Robot {\n\n position: Pos,\n\n computer: IntComputer,\n\n empty: Vec<Pos>,\n\n unknown: Vec<Pos>,\n\n walls: Vec<Pos>,\n\n}\n\n\n\nimpl Robot {\n\n pub fn new(program: Vec<i64>) -> Self {\n\n Self {\n\n position: Pos(0, 0),\n\n computer: IntComputer::new(program),\n\n empty: vec![Pos(0, 0)],\n\n unknown: vec![Pos(-1, 0), Pos(1, 0), Pos(0, 1), Pos(0, -1)],\n\n walls: vec![],\n\n }\n\n }\n\n\n\n pub fn step(&mut self, dir: Direction) -> Tile {\n", "file_path": "aoc15/src/main.rs", "rank": 23, "score": 53367.745631595055 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum AbsDirection {\n\n UP,\n\n DOWN,\n\n LEFT,\n\n RIGHT,\n\n}\n\n\n", "file_path": "aoc17/src/main.rs", "rank": 24, "score": 52611.69906357568 }, { "content": "enum RelDirection {\n\n LEFT,\n\n RIGHT,\n\n STRAIGHT,\n\n}\n\n\n", "file_path": "aoc17/src/main.rs", "rank": 25, "score": 52611.69906357568 }, { "content": "struct PaintingRobot {\n\n computer: IntComputer,\n\n pos: Position,\n\n dir: Direction,\n\n painted: HashMap<Position, Color>,\n\n}\n\n\n\nimpl PaintingRobot {\n\n pub fn new(program: Vec<i64>) -> PaintingRobot {\n\n PaintingRobot {\n\n computer: IntComputer::new(program),\n\n pos: Position { x: 0, y: 0 },\n\n dir: Direction::Up,\n\n painted: HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn run(&mut self) {\n\n loop {\n\n self.computer.run();\n", "file_path": "aoc11/src/main.rs", "rank": 26, "score": 52196.64156594097 }, { "content": "struct Robot<'a> {\n\n current_position: (isize, isize),\n\n current_direction: AbsDirection,\n\n scaffolding: &'a HashSet<(usize, usize)>,\n\n}\n\n\n\nimpl Robot<'_> {\n\n pub fn tile_ahead(&self) -> (isize, isize) {\n\n let (x, y) = self.current_position;\n\n match self.current_direction {\n\n AbsDirection::UP => (x, y - 1),\n\n AbsDirection::DOWN => (x, y + 1),\n\n AbsDirection::LEFT => (x - 1, y),\n\n AbsDirection::RIGHT => (x + 1, y),\n\n }\n\n }\n\n\n\n pub fn tile_left(&self) -> (isize, isize) {\n\n let (x, y) = self.current_position;\n\n match self.current_direction {\n", "file_path": "aoc17/src/main.rs", "rank": 27, "score": 51092.40399061584 }, { "content": "fn print_solution(remaining: Vec<(f32, f32)>, location: &(f32, f32), idx: usize) {\n\n // Recover cartesian offset from angle and distance representation\n\n let offset = (\n\n ((-remaining[idx].0).cos() * remaining[idx].1).round(),\n\n ((-remaining[idx].0).sin() * remaining[idx].1).round(),\n\n );\n\n println!(\n\n \"Solution Part 2: {}\",\n\n (location.1 + offset.1) * 100.0 + location.0 + offset.0\n\n );\n\n}\n", "file_path": "aoc10/src/main.rs", "rank": 29, "score": 48145.018494322205 }, { "content": "#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]\n\nstruct Pos(i32, i32);\n\n\n\nimpl Pos {\n\n pub fn all_neighbors(&self) -> Vec<Pos> {\n\n vec![\n\n self.neighbor(Direction::North),\n\n self.neighbor(Direction::South),\n\n self.neighbor(Direction::East),\n\n self.neighbor(Direction::West),\n\n ]\n\n }\n\n\n\n pub fn unknown_neighbors(&self, walls: &Vec<Pos>, empty: &Vec<Pos>) -> Vec<Pos> {\n\n self.all_neighbors()\n\n .into_iter()\n\n .filter(|&t| walls.iter().all(|&s| t != s))\n\n .filter(|&t| empty.iter().all(|&s| t != s))\n\n .collect()\n\n }\n\n\n", "file_path": "aoc15/src/main.rs", "rank": 30, "score": 46948.10441587247 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n\n\n let lines: Vec<&str> = contents.lines().collect();\n\n let map = read_solar_system(lines);\n\n\n\n let orbits = count_orbits(&map, \"COM\", 0);\n\n\n\n println!(\"Solution Part 1: {}\", orbits);\n\n\n\n // Get paths from root to each element\n\n let path_you = find_in_tree(&map, \"COM\", \"YOU\");\n\n let path_san = find_in_tree(&map, \"COM\", \"SAN\");\n\n\n\n // Count how many leading elements are identical for both paths\n\n let common_elements = path_san\n\n .iter()\n\n .zip(path_you.iter())\n\n .filter(|(x, y)| x == y)\n\n .count();\n\n\n\n let solution2 = path_san.len() + path_you.len() - 2 * common_elements;\n\n println!(\"Solution Part 2: {}\", solution2);\n\n}\n\n\n", "file_path": "aoc06/src/main.rs", "rank": 31, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n\n\n let program: Vec<i32> = contents\n\n .trim()\n\n .split(',')\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n\n\n println!(\"Solution Part 1: {}\", run_program(&program, 12, 2));\n\n\n\n for noun in 0..=99 {\n\n for verb in 0..=100 {\n\n let result = run_program(&program, noun, verb);\n\n if result == 19690720 {\n\n println!(\"Solution Part 2: {}{}\", noun, verb);\n\n break;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "aoc02/src/main.rs", "rank": 32, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n\n\n let program: Vec<i64> = contents\n\n .trim()\n\n .split(',')\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n\n\n let mut comp = IntComputer::new(program.clone());\n\n comp.input.push_back(1);\n\n comp.run();\n\n println!(\"Solution Part 1: {:?}\", comp.output);\n\n\n\n let mut comp = IntComputer::new(program);\n\n comp.input.push_back(2);\n\n comp.run();\n\n println!(\"Solution Part 2: {:?}\", comp.output);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_p1() {}\n\n}\n", "file_path": "aoc09/src/main.rs", "rank": 33, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n let stars = star_coords(contents);\n\n let max_visible = stars\n\n .iter()\n\n .map(|s| (s, count_visible(&stars, s)))\n\n .max_by_key(|s| s.1)\n\n .unwrap();\n\n println!(\"Solution Part 1: {}\", max_visible.1);\n\n\n\n let mut remaining: Vec<_> = get_angles_dists(&stars, &max_visible.0);\n\n\n\n // Flip angles for clockwise rotation\n\n remaining = remaining.iter().map(|x| (-x.0, x.1)).collect();\n\n\n\n // Sort by distance first, then angle to encounter the nearest star at any angle first\n\n remaining.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap());\n\n remaining.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());\n", "file_path": "aoc10/src/main.rs", "rank": 34, "score": 45510.77081716807 }, { "content": "#[test]\n\nfn test_up() {\n\n let grid = Grid::new(\"U2\");\n\n assert!(grid.points.contains(&(0, 1)));\n\n assert!(grid.points.contains(&(0, 2)));\n\n}\n\n\n", "file_path": "aoc03/src/main.rs", "rank": 35, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n\n\n let program: Vec<i64> = contents\n\n .trim()\n\n .split(',')\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n\n\n let mut computer = IntComputer::new(program);\n\n while computer.state == ProgramState::Running {\n\n computer.run();\n\n }\n\n\n\n // Draw to console\n\n let output: String = computer\n\n .output\n\n .clone()\n", "file_path": "aoc17/src/main.rs", "rank": 36, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n let input = num_to_vec(&contents);\n\n\n\n let mut num = input.clone();\n\n phases(&mut num, 100);\n\n\n\n let to_str = |num: Vec<i32>| -> String {\n\n num[0..8]\n\n .iter()\n\n .map(|&x| char::from_digit(x as u32, 10).unwrap())\n\n .collect()\n\n };\n\n println!(\"Solution Part 1: {}\", to_str(num));\n\n\n\n let offset = input[0..7].iter().fold(0, |acc, &x| acc * 10 + x) as usize;\n\n let size = input.len() * 10000;\n\n let mut num_large: Vec<_> = input.into_iter().cycle().take(size).skip(offset).collect();\n", "file_path": "aoc16/src/main.rs", "rank": 37, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).expect(\"Something went wrong reading the file\");\n\n\n\n let masses: Vec<i32> = contents.lines().map(|x| x.parse().unwrap()).collect();\n\n\n\n let total_fuel_simple: i32 = masses\n\n .iter()\n\n .fold(0, |acc, x| acc + fuel_required_simple(*x));\n\n println!(\"Solution Part 1: {}\", total_fuel_simple);\n\n\n\n let total_fuel_complex: i32 = masses\n\n .iter()\n\n .fold(0, |acc, x| acc + fuel_required_complex(*x));\n\n println!(\"Solution Part 2: {}\", total_fuel_complex);\n\n}\n\n\n", "file_path": "aoc01/src/main.rs", "rank": 38, "score": 45510.77081716807 }, { "content": "#[test]\n\nfn test_down() {\n\n let grid = Grid::new(\"D2\");\n\n assert!(grid.points.contains(&(0, -1)));\n\n assert!(grid.points.contains(&(0, -2)));\n\n}\n\n\n", "file_path": "aoc03/src/main.rs", "rank": 39, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n\n\n let program: Vec<i32> = contents\n\n .trim()\n\n .split(',')\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n\n\n let max_thrust = (0..=4)\n\n .into_iter()\n\n .permutations(5)\n\n .map(|perm| run_pipeline(&program, perm))\n\n .max()\n\n .unwrap();\n\n println!(\"Solution Part 1: {:?}\", max_thrust);\n\n\n\n let max_thrust = (5..=9)\n", "file_path": "aoc07/src/main.rs", "rank": 40, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n\n\n let program: Vec<i64> = contents\n\n .trim()\n\n .split(',')\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n\n\n let mut robot = PaintingRobot::new(program.clone());\n\n robot.run();\n\n println!(\"Solution Part 1: {:?}\", robot.painted.len());\n\n\n\n let mut robot = PaintingRobot::new(program);\n\n robot.painted.insert(Position { x: 0, y: 0 }, Color::White);\n\n robot.run();\n\n\n\n let x_min = robot.painted.keys().map(|p| p.x).min().unwrap();\n", "file_path": "aoc11/src/main.rs", "rank": 41, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n\n\n let program: Vec<i32> = contents\n\n .trim()\n\n .split(',')\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n\n\n let mut comp = IntComputer::new(program.clone());\n\n comp.input.push_back(1);\n\n comp.run();\n\n println!(\"Solution Part 1: {:?}\", comp.output);\n\n\n\n let mut comp = IntComputer::new(program);\n\n comp.input.push_back(5);\n\n comp.run();\n\n println!(\"Solution Part 2: {:?}\", comp.output);\n\n}\n", "file_path": "aoc05/src/main.rs", "rank": 42, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n\n\n let lines: Vec<&str> = contents.lines().collect();\n\n\n\n let g1 = Grid::new(lines[0]);\n\n let g2 = Grid::new(lines[1]);\n\n\n\n let overlap = g1.get_overlap(&g2);\n\n let dist = Grid::smallest_distance(&overlap);\n\n let delay = Grid::smallest_delay(&overlap, &g1, &g2);\n\n println!(\"Solution Part 1: {}\", dist);\n\n println!(\"Solution Part 2: {}\", delay);\n\n}\n\n\n", "file_path": "aoc03/src/main.rs", "rank": 43, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let lower = 145852;\n\n let upper = 616942;\n\n\n\n let pins: Vec<Pin> = (lower..=upper).into_iter().map(|p| Pin::new(p)).collect();\n\n\n\n let num_pins: usize = pins.iter().filter(|p| p.adjacent() && p.growing()).count();\n\n println!(\"Solution Part 1: {}\", num_pins);\n\n\n\n let num_pins: usize = pins\n\n .iter()\n\n .filter(|p| p.adjacent_doubles() && p.growing())\n\n .count();\n\n println!(\"Solution Part 2: {}\", num_pins);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "aoc04/src/main.rs", "rank": 44, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n\n\n let mut bodies1: Vec<_> = contents.lines().map(|l| Body::new(l)).collect();\n\n let mut bodies2 = bodies1.clone();\n\n\n\n for _ in 0..1000 {\n\n update_bodies(&mut bodies1);\n\n }\n\n\n\n let total_energy = bodies1.iter().fold(0, |acc, b| acc + b.energy());\n\n println!(\"Solution Part 1: {:?}\", total_energy);\n\n\n\n // I got stuck on the second part, so I looked for some help on the internet.\n\n // Two important bits of insight that I probably wouldn't have come up with myself:\n\n //\n\n // 1. The first reoccuring state will always be identical to the initial state (so the loop of states will always be ABCDABCD, never ABCDCDCDCD.\n\n // This means that we only need to compare to the initial state, not every state encountered this far.\n", "file_path": "aoc12/src/main.rs", "rank": 45, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n\n\n let program: Vec<i64> = contents\n\n .trim()\n\n .split(',')\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n\n\n let mut comp = IntComputer::new(program.clone());\n\n comp.input.push_back(1);\n\n comp.run();\n\n println!(\"Solution Part 1: {:?}\", comp.output);\n\n\n\n let mut comp = IntComputer::new(program);\n\n // comp.input.push_back(2);\n\n // comp.run();\n\n println!(\"Solution Part 2: {:?}\", 0);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_p1() {}\n\n}\n", "file_path": "aoc15/src/main.rs", "rank": 46, "score": 45510.77081716807 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap();\n\n\n\n let image: Vec<u32> = contents\n\n .trim()\n\n .chars()\n\n .map(|x| x.to_digit(10).unwrap())\n\n .collect();\n\n\n\n if let Some(max_layer) = image.chunks(PIC_SIZE).min_by_key(|x| count_digits(x, 0)) {\n\n let prod = count_digits(max_layer, 2) * count_digits(max_layer, 1);\n\n println!(\"Solution Part 1: {:?}\", prod);\n\n }\n\n\n\n let mut output: [u32; PIC_SIZE] = [2; PIC_SIZE];\n\n image\n\n .chunks(PIC_SIZE)\n\n .map(|x| combine_layers(x, &mut output))\n", "file_path": "aoc08/src/main.rs", "rank": 47, "score": 45510.77081716807 }, { "content": "#[test]\n\nfn test_right_up() {\n\n let grid = Grid::new(\"R2,U2\");\n\n println!(\"{:?}\", grid.points);\n\n assert!(grid.points.contains(&(1, 0)));\n\n assert!(grid.points.contains(&(2, 0)));\n\n assert!(grid.points.contains(&(2, 1)));\n\n assert!(grid.points.contains(&(2, 2)));\n\n}\n\n\n", "file_path": "aoc03/src/main.rs", "rank": 48, "score": 44416.26003812879 }, { "content": "#[test]\n\nfn test_left() {\n\n let grid = Grid::new(\"L2\");\n\n assert!(grid.points.contains(&(-1, 0)));\n\n assert!(grid.points.contains(&(-2, 0)));\n\n}\n\n\n", "file_path": "aoc03/src/main.rs", "rank": 49, "score": 44416.26003812879 }, { "content": "#[test]\n\nfn test_ex1() {\n\n let grid1 = Grid::new(\"R75,D30,R83,U83,L12,D49,R71,U7,L72\");\n\n let grid2 = Grid::new(\"U62,R66,U55,R34,D71,R55,D58,R83\");\n\n let overlap = grid1.get_overlap(&grid2);\n\n assert_eq!(Grid::smallest_distance(&overlap), 159);\n\n assert_eq!(Grid::smallest_delay(&overlap, &grid1, &grid2), 610);\n\n}\n\n\n", "file_path": "aoc03/src/main.rs", "rank": 50, "score": 44416.26003812879 }, { "content": "#[test]\n\nfn test_ex2() {\n\n let grid1 = Grid::new(\"R98,U47,R26,D63,R33,U87,L62,D20,R33,U53,R51\");\n\n let grid2 = Grid::new(\"U98,R91,D20,R16,D67,R40,U7,R15,U6,R7\");\n\n let overlap = grid1.get_overlap(&grid2);\n\n assert_eq!(Grid::smallest_distance(&overlap), 135);\n\n assert_eq!(Grid::smallest_delay(&overlap, &grid1, &grid2), 410);\n\n}\n", "file_path": "aoc03/src/main.rs", "rank": 51, "score": 44416.26003812879 }, { "content": "#[test]\n\nfn test_overlap() {\n\n let grid1 = Grid::new(\"R2,U2\");\n\n let grid2 = Grid::new(\"U2,R2\");\n\n assert_eq!(grid1.get_overlap(&grid2), vec![(2, 2)]);\n\n}\n\n\n", "file_path": "aoc03/src/main.rs", "rank": 52, "score": 44416.26003812879 }, { "content": "#[test]\n\nfn test_right() {\n\n let grid = Grid::new(\"R2\");\n\n assert!(grid.points.contains(&(1, 0)));\n\n assert!(grid.points.contains(&(2, 0)));\n\n}\n\n\n", "file_path": "aoc03/src/main.rs", "rank": 53, "score": 44416.26003812879 }, { "content": "#[test]\n\nfn test_fuel_simple() {\n\n assert_eq!(fuel_required_simple(12), 2);\n\n assert_eq!(fuel_required_simple(14), 2);\n\n assert_eq!(fuel_required_simple(1969), 654);\n\n assert_eq!(fuel_required_simple(100756), 33583);\n\n}\n\n\n", "file_path": "aoc01/src/main.rs", "rank": 54, "score": 43409.74056358826 }, { "content": "#[test]\n\nfn test_fuel_complex() {\n\n assert_eq!(fuel_required_complex(12), 2);\n\n assert_eq!(fuel_required_complex(14), 2);\n\n assert_eq!(fuel_required_complex(1969), 966);\n\n assert_eq!(fuel_required_complex(100756), 50346);\n\n}\n", "file_path": "aoc01/src/main.rs", "rank": 55, "score": 43409.74056358826 }, { "content": "fn main() -> Result<(), String> {\n\n let args: Vec<String> = env::args().collect();\n\n let filename = &args[1];\n\n let contents = fs::read_to_string(filename).unwrap().replace(\",\", \"\");\n\n let mut graph = DiGraphMap::<&str, Reaction>::new();\n\n let mut inventory = HashMap::<&str, u32>::new();\n\n\n\n contents.lines().for_each(|l| read_reaction(l, &mut graph));\n\n\n\n let result1 = produce(\"FUEL\", 1, &graph, &mut inventory);\n\n println!(\"Solution Part 1: {:?}\", result1);\n\n\n\n Ok(())\n\n}\n", "file_path": "aoc14/src/main.rs", "rank": 56, "score": 40224.180244745876 }, { "content": "fn print_bodies(bodies: &Vec<Body>) {\n\n for b in bodies {\n\n println!(\"{}\", b.to_string());\n\n }\n\n println!(\"====\");\n\n}\n\n\n", "file_path": "aoc12/src/main.rs", "rank": 58, "score": 37206.004336037426 }, { "content": "fn fuel_required_complex(mass: i32) -> i32 {\n\n let fuel = fuel_required_simple(mass);\n\n if fuel <= 0 {\n\n 0\n\n } else {\n\n fuel + fuel_required_complex(fuel)\n\n }\n\n}\n\n\n", "file_path": "aoc01/src/main.rs", "rank": 59, "score": 36408.05398499317 }, { "content": "fn fuel_required_simple(mass: i32) -> i32 {\n\n 0.max((mass / 3) - 2)\n\n}\n\n\n", "file_path": "aoc01/src/main.rs", "rank": 60, "score": 36408.05398499317 }, { "content": "fn num_to_vec(numstr: &str) -> Vec<i32> {\n\n numstr\n\n .trim()\n\n .chars()\n\n .map(|x| x.to_digit(10).unwrap() as i32)\n\n .collect()\n\n}\n\n\n", "file_path": "aoc16/src/main.rs", "rank": 61, "score": 35351.869665690116 }, { "content": "fn fft(input: &Vec<i32>) -> Vec<i32> {\n\n let fft_single = |pos| {\n\n generate_pattern(input.len(), pos)\n\n .iter()\n\n .zip(input.iter())\n\n .filter(|(&p, _)| p != 0)\n\n .fold(0, |acc, (&p, &i)| if p < 0 { acc - i } else { acc + i })\n\n .abs()\n\n % 10\n\n };\n\n (1..=input.len())\n\n .into_iter()\n\n .map(|pos| fft_single(pos))\n\n .collect()\n\n}\n\n\n", "file_path": "aoc16/src/main.rs", "rank": 62, "score": 34434.97657237839 }, { "content": "fn part_two(input: &Vec<i32>) -> Vec<i32> {\n\n let cumsum_rev: Vec<i32> = input\n\n .iter()\n\n .rev()\n\n .scan(0, |state, x| {\n\n *state = (*state + x) % 10;\n\n Some(*state)\n\n })\n\n .collect();\n\n cumsum_rev.into_iter().rev().collect()\n\n}\n\n\n", "file_path": "aoc16/src/main.rs", "rank": 63, "score": 33692.29033602751 }, { "content": "// Parse a star field into a list of coordinates\n\nfn star_coords(field: String) -> Vec<(f32, f32)> {\n\n let mut coords: Vec<(f32, f32)> = vec![];\n\n for (row_num, line) in field.lines().enumerate() {\n\n for (col_num, val) in line.chars().enumerate() {\n\n if val == '#' {\n\n coords.push((row_num as f32, col_num as f32));\n\n }\n\n }\n\n }\n\n coords\n\n}\n\n\n", "file_path": "aoc10/src/main.rs", "rank": 64, "score": 33692.29033602751 }, { "content": "fn run_pipeline(program: &Vec<i32>, sequence: Vec<i32>) -> i32 {\n\n let mut io = 0;\n\n for i in 0..=4 {\n\n let mut comp = IntComputer::new(program.clone());\n\n comp.input.push_back(sequence[i]);\n\n comp.input.push_back(io);\n\n comp.run();\n\n io = comp.output[0]\n\n }\n\n io\n\n}\n\n\n", "file_path": "aoc07/src/main.rs", "rank": 65, "score": 30837.94303127609 }, { "content": "fn run_pipeline_feedback(program: &Vec<i32>, sequence: Vec<i32>) -> i32 {\n\n let mut computers: Vec<IntComputer> = vec![];\n\n for i in 0..=4 {\n\n let mut comp = IntComputer::new(program.clone());\n\n comp.input.push_back(sequence[i]);\n\n computers.push(comp);\n\n }\n\n\n\n computers[0].input.push_back(0);\n\n\n\n let mut idx = 0;\n\n loop {\n\n let c = &mut computers[idx];\n\n c.run();\n\n let io = c.output.pop_front().unwrap();\n\n\n\n if computers.iter().all(|c| c.state == ProgramState::Finished) {\n\n return io;\n\n }\n\n\n\n idx += 1;\n\n idx %= 5;\n\n\n\n let c = &mut computers[idx];\n\n c.input.push_back(io);\n\n }\n\n}\n\n\n", "file_path": "aoc07/src/main.rs", "rank": 66, "score": 30230.517554954306 }, { "content": "fn read_solar_system(lines: Vec<&str>) -> HashMap<&str, Vec<&str>> {\n\n let mut map: HashMap<&str, Vec<&str>> = HashMap::with_capacity(lines.len());\n\n for line in lines {\n\n let substr: Vec<&str> = line.trim().split(')').collect();\n\n if map.contains_key(substr[0]) {\n\n map.get_mut(substr[0]).unwrap().push(substr[1]);\n\n } else {\n\n map.insert(substr[0], vec![substr[1]]);\n\n }\n\n }\n\n map\n\n}\n\n\n", "file_path": "aoc06/src/main.rs", "rank": 67, "score": 29660.042681447183 }, { "content": "fn run_program(program: &Vec<i32>, noun: i32, verb: i32) -> i32 {\n\n let mut prog = program.clone();\n\n prog[1] = noun;\n\n prog[2] = verb;\n\n let mut comp = intcomputer::IntComputer::new(prog);\n\n comp.run();\n\n comp.program[0]\n\n}\n\n\n", "file_path": "aoc02/src/main.rs", "rank": 68, "score": 29598.83376792798 }, { "content": "use sdl2::event::Event;\n\nuse sdl2::keyboard::Keycode;\n\nuse sdl2::pixels::Color;\n\nuse sdl2::rect::Rect;\n\n\n\nuse std::time::Duration;\n\n\n\npub struct Display {\n\n width: usize,\n\n height: usize,\n\n scale: usize,\n\n\n\n canvas: sdl2::render::Canvas<sdl2::video::Window>,\n\n frame: Vec<u32>,\n\n context: sdl2::Sdl,\n\n}\n\n\n\nimpl Display {\n\n pub fn new(width: usize, height: usize, scale: usize, title: &str) -> Self {\n\n let sdl_context = sdl2::init().unwrap();\n", "file_path": "display/src/lib.rs", "rank": 69, "score": 29595.538135790102 }, { "content": " let video_subsystem = sdl_context.video().unwrap();\n\n let window = video_subsystem\n\n .window(title, (width * scale) as u32, (height * scale) as u32)\n\n .position_centered()\n\n .opengl()\n\n .build()\n\n .map_err(|e| e.to_string())\n\n .unwrap();\n\n Self {\n\n context: sdl_context,\n\n width: width,\n\n height: height,\n\n scale: scale,\n\n canvas: window\n\n .into_canvas()\n\n .build()\n\n .map_err(|e| e.to_string())\n\n .unwrap(),\n\n frame: vec![0; width * height],\n\n }\n", "file_path": "display/src/lib.rs", "rank": 70, "score": 29590.425692145112 }, { "content": " }\n\n\n\n pub fn set_pixel(&mut self, x: usize, y: usize, val: u32) {\n\n self.frame[y * self.width + x] = val;\n\n }\n\n\n\n pub fn update(&mut self) -> bool {\n\n let mut event_pump = self.context.event_pump().unwrap();\n\n for event in event_pump.poll_iter() {\n\n match event {\n\n Event::Quit { .. }\n\n | Event::KeyDown {\n\n keycode: Some(Keycode::Escape),\n\n ..\n\n } => return false,\n\n _ => {}\n\n }\n\n }\n\n\n\n self.canvas.set_draw_color(Color::RGB(10, 10, 10));\n", "file_path": "display/src/lib.rs", "rank": 71, "score": 29587.91119709204 }, { "content": " self.canvas.clear();\n\n for (xy, &pixel) in self.frame.iter().enumerate() {\n\n if pixel == 0 {\n\n continue;\n\n }\n\n\n\n self.canvas.set_draw_color(Color::RGB(20, 220, 20));\n\n\n\n let x = ((xy % self.width) * self.scale) as i32;\n\n let y = ((xy / self.width) * self.scale) as i32;\n\n let _ = self\n\n .canvas\n\n .fill_rect(Rect::new(x, y, self.scale as u32, self.scale as u32));\n\n }\n\n self.canvas.present();\n\n ::std::thread::sleep(Duration::new(0, 1_000_000_000u32 / 30));\n\n true\n\n }\n\n}\n", "file_path": "display/src/lib.rs", "rank": 72, "score": 29586.378422651454 }, { "content": "use std::collections::VecDeque;\n\n\n\nconst LEN_IO_INSTR: usize = 2;\n\nconst LEN_BASE_INSTR: usize = 2;\n\nconst LEN_JUMP_INSTR: usize = 3;\n\nconst LEN_ARITH_INSTR: usize = 4;\n\n\n\npub struct IntComputer {\n\n pub program: Vec<i64>,\n\n pub input: VecDeque<i64>,\n\n pub output: VecDeque<i64>,\n\n\n\n base: i64,\n\n pc: usize,\n\n pub state: ProgramState,\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum ProgramState {\n\n Finished,\n\n Running,\n\n WaitingForInput,\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "intcomputer/src/lib.rs", "rank": 73, "score": 28985.064662298162 }, { "content": " instr: instr,\n\n param_mode: (mode1, mode2, mode3),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn test_add() {\n\n let mut comp = IntComputer::new(vec![1, 0, 0, 0, 99]);\n\n comp.run();\n\n assert_eq!(comp.program, vec![2, 0, 0, 0, 99])\n\n }\n\n\n\n #[test]\n\n fn test_mul() {\n\n let mut comp = IntComputer::new(vec![2, 3, 0, 3, 99]);\n\n comp.run();\n", "file_path": "intcomputer/src/lib.rs", "rank": 74, "score": 28982.638035595646 }, { "content": " let mut comp = IntComputer::new(vec![109, 2019, 109, -19, 99]);\n\n comp.run();\n\n assert_eq!(comp.base, 2000);\n\n\n\n let mut comp = IntComputer::new(vec![204, 2, 99]);\n\n comp.run();\n\n assert_eq!(comp.output[0], 99);\n\n\n\n let mut comp = IntComputer::new(vec![109, 3, 204, -2, 99]);\n\n comp.input.push_back(42);\n\n comp.run();\n\n assert_eq!(comp.output[0], 3);\n\n }\n\n\n\n #[test]\n\n fn test_p1() {\n\n let mut comp = IntComputer::new(vec![2, 4, 4, 5, 99, 0]);\n\n comp.run();\n\n assert_eq!(comp.program, vec![2, 4, 4, 5, 99, 9801])\n\n }\n", "file_path": "intcomputer/src/lib.rs", "rank": 75, "score": 28982.130441083744 }, { "content": "\n\n #[test]\n\n fn test_p2() {\n\n let mut comp = IntComputer::new(vec![1, 1, 1, 4, 99, 5, 6, 0, 99]);\n\n comp.run();\n\n assert_eq!(comp.program, vec![30, 1, 1, 4, 2, 5, 6, 0, 99])\n\n }\n\n\n\n #[test]\n\n fn test_p3() {\n\n let mut comp = IntComputer::new(vec![1, 1, 1, 4, 99, 5, 6, 0, 99]);\n\n comp.run();\n\n assert_eq!(comp.program, vec![30, 1, 1, 4, 2, 5, 6, 0, 99])\n\n }\n\n\n\n #[test]\n\n fn test_p4() {\n\n let mut comp = IntComputer::new(vec![\n\n 3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0, 36, 98, 0,\n\n 0, 1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46, 1101, 1000, 1, 20, 4,\n", "file_path": "intcomputer/src/lib.rs", "rank": 76, "score": 28982.105597077894 }, { "content": " assert_eq!(comp.program, vec![2, 3, 0, 6, 99])\n\n }\n\n\n\n #[test]\n\n fn test_input() {\n\n let mut comp = IntComputer::new(vec![3, 0, 99]);\n\n comp.input.push_back(42);\n\n comp.run();\n\n assert_eq!(comp.program, vec![42, 0, 99])\n\n }\n\n\n\n #[test]\n\n fn test_output() {\n\n let mut comp = IntComputer::new(vec![4, 0, 99]);\n\n comp.run();\n\n assert_eq!(comp.output.front(), Some(&4))\n\n }\n\n\n\n #[test]\n\n fn test_relative() {\n", "file_path": "intcomputer/src/lib.rs", "rank": 77, "score": 28981.08817066114 }, { "content": " fn test_p6() {\n\n let mut comp = IntComputer::new(vec![\n\n 3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0, 36, 98, 0,\n\n 0, 1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46, 1101, 1000, 1, 20, 4,\n\n 20, 1105, 1, 46, 98, 99,\n\n ]);\n\n comp.input.push_back(7);\n\n comp.run();\n\n assert_eq!(comp.output.front(), Some(&999))\n\n }\n\n}\n", "file_path": "intcomputer/src/lib.rs", "rank": 78, "score": 28981.08085150162 }, { "content": " }\n\n self.program[idx as usize]\n\n }\n\n\n\n // Create additional program space for reads/writes beyond the current end of the program\n\n fn extend_capacity(&mut self, offset: usize) {\n\n let mut extra_capacity: Vec<i64> = vec![0; 1 + offset - self.program.len()];\n\n self.program.append(&mut extra_capacity);\n\n }\n\n}\n\n\n\nimpl OpCode {\n\n // Parse Instruction and mode flags\n\n fn new(opcode: i64) -> OpCode {\n\n let read_mode = |flag| match flag % 10 {\n\n 0 => ParameterMode::Position,\n\n 1 => ParameterMode::Immediate,\n\n 2 => ParameterMode::Relative,\n\n _ => ParameterMode::Invalid,\n\n };\n", "file_path": "intcomputer/src/lib.rs", "rank": 79, "score": 28980.96166036795 }, { "content": " _ => panic!(\"Write with unsupported Paramter Mode\"),\n\n };\n\n if idx >= self.program.len() {\n\n self.extend_capacity(idx);\n\n }\n\n self.program[idx] = value;\n\n }\n\n\n\n // Read from offset relative to current instruction pointer\n\n // will consider the supplied parameter mode for direct/indirect read\n\n fn read(&mut self, offset: usize, param_mode: ParameterMode) -> i64 {\n\n let val = self.program[self.pc + offset];\n\n let idx: usize = match param_mode {\n\n ParameterMode::Immediate => return val,\n\n ParameterMode::Position => val as usize,\n\n ParameterMode::Relative => (self.base + val) as usize,\n\n ParameterMode::Invalid => panic!(\"Invalid read mode!\"),\n\n };\n\n if idx >= self.program.len() {\n\n self.extend_capacity(idx as usize);\n", "file_path": "intcomputer/src/lib.rs", "rank": 80, "score": 28980.56566010132 }, { "content": " 20, 1105, 1, 46, 98, 99,\n\n ]);\n\n comp.input.push_back(9);\n\n comp.run();\n\n assert_eq!(comp.output.front(), Some(&1001))\n\n }\n\n\n\n #[test]\n\n fn test_p5() {\n\n let mut comp = IntComputer::new(vec![\n\n 3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0, 36, 98, 0,\n\n 0, 1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46, 1101, 1000, 1, 20, 4,\n\n 20, 1105, 1, 46, 98, 99,\n\n ]);\n\n comp.input.push_back(8);\n\n comp.run();\n\n assert_eq!(comp.output.front(), Some(&1000))\n\n }\n\n\n\n #[test]\n", "file_path": "intcomputer/src/lib.rs", "rank": 81, "score": 28980.209560699805 }, { "content": "\n\n let result = match opcode.instr {\n\n Instruction::Add => op1 + op2,\n\n Instruction::Multiply => op1 * op2,\n\n Instruction::Less => (op1 < op2) as i64,\n\n Instruction::Equal => (op1 == op2) as i64,\n\n _ => 0,\n\n };\n\n\n\n self.write(3, mode3, result);\n\n self.pc += LEN_ARITH_INSTR;\n\n }\n\n\n\n // Write to memory given by the value at offset\n\n // Will interpret the value according to the supplied paramter mode\n\n fn write(&mut self, offset: i64, mode: ParameterMode, value: i64) {\n\n let val = self.program[self.pc + offset as usize];\n\n let idx = match mode {\n\n ParameterMode::Position => val as usize,\n\n ParameterMode::Relative => (self.base + val) as usize,\n", "file_path": "intcomputer/src/lib.rs", "rank": 82, "score": 28980.12539305119 }, { "content": " pub fn run(&mut self) {\n\n self.state = ProgramState::Running;\n\n while self.state == ProgramState::Running {\n\n self.exec_instr();\n\n }\n\n }\n\n\n\n // Execute instruction at current PC\n\n fn exec_instr(&mut self) {\n\n let opcode = OpCode::new(self.program[self.pc]);\n\n\n\n match opcode.instr {\n\n Instruction::Add | Instruction::Multiply | Instruction::Less | Instruction::Equal => {\n\n self.arith(opcode);\n\n }\n\n Instruction::Input | Instruction::Output => {\n\n self.io(opcode);\n\n }\n\n Instruction::JumpF | Instruction::JumpT => {\n\n self.jump(opcode);\n", "file_path": "intcomputer/src/lib.rs", "rank": 83, "score": 28980.072761491545 }, { "content": " }\n\n Instruction::AdjBase => {\n\n self.adjust_base(opcode);\n\n }\n\n Instruction::Halt => {\n\n self.state = ProgramState::Finished;\n\n }\n\n }\n\n }\n\n\n\n // Handle input/output instructions\n\n fn io(&mut self, opcode: OpCode) {\n\n let (mode1, _, _) = opcode.param_mode;\n\n match opcode.instr {\n\n Instruction::Input => {\n\n if self.input.is_empty() {\n\n self.state = ProgramState::WaitingForInput;\n\n return;\n\n } else {\n\n let input = self.input.pop_front().unwrap();\n", "file_path": "intcomputer/src/lib.rs", "rank": 84, "score": 28979.81961040356 }, { "content": " let (mode1, mode2, _) = opcode.param_mode;\n\n let val = self.read(1, mode1);\n\n let dst = self.read(2, mode2) as usize;\n\n\n\n if (opcode.instr == Instruction::JumpT && val != 0)\n\n || (opcode.instr == Instruction::JumpF && val == 0)\n\n {\n\n if dst < self.program.len() {\n\n self.pc = dst\n\n }\n\n } else {\n\n self.pc += LEN_JUMP_INSTR;\n\n }\n\n }\n\n\n\n // Handle addition, multiplication and comparisons\n\n fn arith(&mut self, opcode: OpCode) {\n\n let (mode1, mode2, mode3) = opcode.param_mode;\n\n let op1 = self.read(1, mode1);\n\n let op2 = self.read(2, mode2);\n", "file_path": "intcomputer/src/lib.rs", "rank": 85, "score": 28979.39912325602 }, { "content": " self.write(1, mode1, input);\n\n }\n\n }\n\n Instruction::Output => {\n\n let output = self.read(1, mode1);\n\n self.output.push_back(output);\n\n }\n\n _ => {}\n\n }\n\n self.pc += LEN_IO_INSTR;\n\n }\n\n\n\n fn adjust_base(&mut self, opcode: OpCode) {\n\n let (mode1, _, _) = opcode.param_mode;\n\n let val = self.read(1, mode1);\n\n self.base += val;\n\n self.pc += LEN_BASE_INSTR;\n\n }\n\n\n\n fn jump(&mut self, opcode: OpCode) {\n", "file_path": "intcomputer/src/lib.rs", "rank": 86, "score": 28979.159272199788 }, { "content": " let mode1 = read_mode(opcode / 100);\n\n let mode2 = read_mode(opcode / 1000);\n\n let mode3 = read_mode(opcode / 10000);\n\n\n\n let operation = opcode % 100;\n\n let instr = match operation {\n\n 1 => Instruction::Add,\n\n 2 => Instruction::Multiply,\n\n 3 => Instruction::Input,\n\n 4 => Instruction::Output,\n\n 5 => Instruction::JumpT,\n\n 6 => Instruction::JumpF,\n\n 7 => Instruction::Less,\n\n 8 => Instruction::Equal,\n\n 9 => Instruction::AdjBase,\n\n 99 => Instruction::Halt,\n\n _ => panic!(\"Unknown Opcode\"),\n\n };\n\n\n\n OpCode {\n", "file_path": "intcomputer/src/lib.rs", "rank": 87, "score": 28977.14425264387 }, { "content": "// Convert all stars into angle and distance representation from a given grid location 's'\n\nfn get_angles_dists(stars: &Vec<(f32, f32)>, s: &(f32, f32)) -> Vec<(f32, f32)> {\n\n let is_self = |p: &(f32, f32)| approx_eq!(f32, p.0, 0.0) && approx_eq!(f32, p.1, 0.0);\n\n stars\n\n .iter()\n\n .map(|s1| (s1.0 - s.0, s1.1 - s.1))\n\n .filter(|p| !is_self(p))\n\n .map(|p| (p.1.atan2(p.0), (p.0.powi(2) + p.1.powi(2)).sqrt()))\n\n .collect()\n\n}\n\n\n", "file_path": "aoc10/src/main.rs", "rank": 88, "score": 27416.148293356237 }, { "content": "fn find_in_tree<'a>(map: &'a HashMap<&str, Vec<&str>>, cur: &'a str, dst: &str) -> Vec<&'a str> {\n\n // Reached leaf node -> recursion end\n\n if !map.contains_key(cur) {\n\n return vec![];\n\n }\n\n\n\n let children = map.get(cur).unwrap();\n\n\n\n // Found destination node in children, return ourselves as part of the path\n\n if children.contains(&dst) {\n\n return vec![cur];\n\n // Recursively search through children\n\n } else {\n\n let mut out: Vec<&str> = vec![];\n\n for c in children {\n\n let mut traversal = find_in_tree(map, c, dst);\n\n if !traversal.is_empty() {\n\n out.push(cur);\n\n }\n\n out.append(&mut traversal);\n\n }\n\n out\n\n }\n\n}\n\n\n", "file_path": "aoc06/src/main.rs", "rank": 89, "score": 24491.641456640038 }, { "content": "use std::env;\n\nuse std::fs;\n\n\n\nconst WIDTH: usize = 25;\n\nconst HEIGHT: usize = 6;\n\nconst PIC_SIZE: usize = WIDTH * HEIGHT;\n\n\n", "file_path": "aoc08/src/main.rs", "rank": 93, "score": 12.453517206481806 }, { "content": " match dir {\n\n Direction::North => self.computer.input.push_back(1),\n\n Direction::South => self.computer.input.push_back(2),\n\n Direction::West => self.computer.input.push_back(3),\n\n Direction::East => self.computer.input.push_back(4),\n\n }\n\n self.computer.run();\n\n let ret = match self.computer.output.pop_front().unwrap() {\n\n 0 => Tile::Wall,\n\n 1 => Tile::Empty,\n\n 2 => Tile::Oxygen,\n\n _ => panic! {\"Unknown Tile type\"},\n\n };\n\n\n\n let dst = self.position.neighbor(dir);\n\n\n\n match ret {\n\n Tile::Wall => self.walls.push(dst),\n\n Tile::Oxygen => self.position = dst,\n\n Tile::Empty => {\n", "file_path": "aoc15/src/main.rs", "rank": 94, "score": 10.05632666976091 }, { "content": "use intcomputer::IntComputer;\n\nuse std::env;\n\nuse std::fs;\n\n\n", "file_path": "aoc09/src/main.rs", "rank": 96, "score": 8.611032564881308 }, { "content": "use intcomputer::IntComputer;\n\nuse std::env;\n\nuse std::fs;\n\n\n", "file_path": "aoc05/src/main.rs", "rank": 97, "score": 8.611032564881308 }, { "content": "use intcomputer::{IntComputer, ProgramState};\n\nuse std::env;\n\nuse std::fs;\n\n\n\nuse itertools::Itertools;\n\n\n", "file_path": "aoc07/src/main.rs", "rank": 98, "score": 8.014423036084374 } ]
Rust
textures/src/imagemap.rs
hackmad/pbr_rust
b7ae75564bf71c4dfea8b20f49d05ac1b89e6734
use super::*; use core::geometry::*; use core::interaction::*; use core::mipmap::*; use core::pbrt::*; use core::spectrum::*; use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign}; #[derive(Clone)] pub struct ImageTexture<Tmemory> where Tmemory: Copy + Default + Mul<Float, Output = Tmemory> + MulAssign<Float> + Div<Float, Output = Tmemory> + DivAssign<Float> + Add<Tmemory, Output = Tmemory> + AddAssign + Clamp<Float>, Spectrum: ConvertIn<Tmemory>, { mapping: ArcTextureMapping2D, mipmap: ArcMIPMap<Tmemory>, } macro_rules! new_image_texture { ($t: ty) => { impl ImageTexture<$t> { pub fn new( mapping: ArcTextureMapping2D, path: &str, filtering_method: FilteringMethod, wrap_mode: ImageWrap, scale: Float, gamma: bool, max_anisotropy: Float, ) -> Self { let tex_info = TexInfo::new( path, filtering_method, wrap_mode, scale, gamma, max_anisotropy, ); let mipmap = match MIPMapCache::get(tex_info) { Ok(mipmap) => mipmap, Err(err) => panic!("Unable to load MIPMap: {}", err), }; Self { mapping, mipmap } } } }; } new_image_texture!(RGBSpectrum); new_image_texture!(Float); impl Texture<Spectrum> for ImageTexture<RGBSpectrum> { fn evaluate(&self, hit: &Hit, uv: &Point2f, der: &Derivatives) -> Spectrum { let TextureMap2DResult { p: st, dstdx, dstdy, } = self.mapping.map(hit, uv, der); let mem = self.mipmap.lookup(&st, &dstdx, &dstdy); let rgb = mem.to_rgb(); Spectrum::from_rgb(&rgb, None) } } impl Texture<Float> for ImageTexture<Float> { fn evaluate(&self, hit: &Hit, uv: &Point2f, der: &Derivatives) -> Float { let TextureMap2DResult { p: st, dstdx, dstdy, } = self.mapping.map(hit, uv, der); self.mipmap.lookup(&st, &dstdx, &dstdy) } } macro_rules! from_params { ($t: ty) => { impl From<(&TextureParams, ArcTransform, &str)> for ImageTexture<$t> { fn from(p: (&TextureParams, ArcTransform, &str)) -> Self { let (tp, tex2world, cwd) = p; let map = get_texture_mapping(tp, tex2world); let max_anisotropy = tp.find_float("maxanisotropy", 8.0); let filtering_method = if tp.find_bool("trilinear", false) { FilteringMethod::Trilinear } else { FilteringMethod::Ewa }; let wrap = tp.find_string("wrap", String::from("repeat")); let wrap_mode = match &wrap[..] { "black" => ImageWrap::Black, "clamp" => ImageWrap::Clamp, _ => ImageWrap::Repeat, }; let scale = tp.find_float("scale", 1.0); let path = tp.find_filename("filename", String::from(""), cwd); let gamma = tp.find_bool("gamma", path.ends_with(".tga") || path.ends_with(".png")); Self::new( map, &path, filtering_method, wrap_mode, scale, gamma, max_anisotropy, ) } } }; } from_params!(RGBSpectrum); from_params!(Float);
use super::*; use core::geometry::*; use core::interaction::*; use core::mipmap::*; use core::pbrt::*; use core::spectrum::*; use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign}; #[derive(Clone)] pub struct ImageTexture<Tmemory> where Tmemory: Copy + Default + Mul<Float, Output = Tmemory> + MulAssign<Float> + Div<Float, Output = Tmemory> + DivAssign<Float> + Add<Tmemory, Output = Tmemory> + AddAssign + Clamp<Float>, Spectrum: ConvertIn<Tmemory>, { mapping: ArcTextureMapping2D, mipmap: ArcMIPMap<Tmemory>, } macro_rules! new_image_texture { ($t: ty) => { impl ImageTexture<$t> { pub fn new( mapping: ArcTextureMapping2D, path: &str, filtering_method: FilteringMethod, wrap_mode: ImageWrap, scale: Float, gamma: bool, max_anisotropy: Float, ) -> Self { let tex_info = TexInfo::new( path, filtering_method, wrap_mode, scale, gamma, max_anisotropy, ); let mipmap = match MIPMapCache::get(tex_info) { Ok(mipmap) => mipmap, Err(err) => panic!("Unable to load MIPMap: {}", err), }; Self { mapping, mipmap } } } }; } new_image_texture!(RGBSpectrum); new_image_texture!(Float); impl Texture<Spectrum> for ImageTexture<RGBSpectrum> { fn evaluate(&self, hit: &Hit, uv: &Point2f, der: &Derivatives) -> Spectrum {
let mem = self.mipmap.lookup(&st, &dstdx, &dstdy); let rgb = mem.to_rgb(); Spectrum::from_rgb(&rgb, None) } } impl Texture<Float> for ImageTexture<Float> { fn evaluate(&self, hit: &Hit, uv: &Point2f, der: &Derivatives) -> Float { let TextureMap2DResult { p: st, dstdx, dstdy, } = self.mapping.map(hit, uv, der); self.mipmap.lookup(&st, &dstdx, &dstdy) } } macro_rules! from_params { ($t: ty) => { impl From<(&TextureParams, ArcTransform, &str)> for ImageTexture<$t> { fn from(p: (&TextureParams, ArcTransform, &str)) -> Self { let (tp, tex2world, cwd) = p; let map = get_texture_mapping(tp, tex2world); let max_anisotropy = tp.find_float("maxanisotropy", 8.0); let filtering_method = if tp.find_bool("trilinear", false) { FilteringMethod::Trilinear } else { FilteringMethod::Ewa }; let wrap = tp.find_string("wrap", String::from("repeat")); let wrap_mode = match &wrap[..] { "black" => ImageWrap::Black, "clamp" => ImageWrap::Clamp, _ => ImageWrap::Repeat, }; let scale = tp.find_float("scale", 1.0); let path = tp.find_filename("filename", String::from(""), cwd); let gamma = tp.find_bool("gamma", path.ends_with(".tga") || path.ends_with(".png")); Self::new( map, &path, filtering_method, wrap_mode, scale, gamma, max_anisotropy, ) } } }; } from_params!(RGBSpectrum); from_params!(Float);
let TextureMap2DResult { p: st, dstdx, dstdy, } = self.mapping.map(hit, uv, der);
assignment_statement
[]
Rust
imxrt1062-pac/imxrt1062-ocotp/src/crc_addr.rs
Shock-1/teensy4-rs
effc3b290f1be3c7aef62a78e82dbfbc27aa6370
#[doc = "Reader of register CRC_ADDR"] pub type R = crate::R<u32, super::CRC_ADDR>; #[doc = "Writer for register CRC_ADDR"] pub type W = crate::W<u32, super::CRC_ADDR>; #[doc = "Register CRC_ADDR `reset()`'s with value 0"] impl crate::ResetValue for super::CRC_ADDR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `DATA_START_ADDR`"] pub type DATA_START_ADDR_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DATA_START_ADDR`"] pub struct DATA_START_ADDR_W<'a> { w: &'a mut W, } impl<'a> DATA_START_ADDR_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff); self.w } } #[doc = "Reader of field `DATA_END_ADDR`"] pub type DATA_END_ADDR_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DATA_END_ADDR`"] pub struct DATA_END_ADDR_W<'a> { w: &'a mut W, } impl<'a> DATA_END_ADDR_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8); self.w } } #[doc = "Reader of field `CRC_ADDR`"] pub type CRC_ADDR_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CRC_ADDR`"] pub struct CRC_ADDR_W<'a> { w: &'a mut W, } impl<'a> CRC_ADDR_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16); self.w } } #[doc = "Reader of field `OTPMK_CRC`"] pub type OTPMK_CRC_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OTPMK_CRC`"] pub struct OTPMK_CRC_W<'a> { w: &'a mut W, } impl<'a> OTPMK_CRC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Reader of field `RSVD0`"] pub type RSVD0_R = crate::R<u8, u8>; impl R { #[doc = "Bits 0:7 - DATA_START_ADDR"] #[inline(always)] pub fn data_start_addr(&self) -> DATA_START_ADDR_R { DATA_START_ADDR_R::new((self.bits & 0xff) as u8) } #[doc = "Bits 8:15 - DATA_END_ADDR"] #[inline(always)] pub fn data_end_addr(&self) -> DATA_END_ADDR_R { DATA_END_ADDR_R::new(((self.bits >> 8) & 0xff) as u8) } #[doc = "Bits 16:23 - CRC_ADDR"] #[inline(always)] pub fn crc_addr(&self) -> CRC_ADDR_R { CRC_ADDR_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bit 24 - OTPMK_CRC"] #[inline(always)] pub fn otpmk_crc(&self) -> OTPMK_CRC_R { OTPMK_CRC_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bits 25:31 - RSVD0"] #[inline(always)] pub fn rsvd0(&self) -> RSVD0_R { RSVD0_R::new(((self.bits >> 25) & 0x7f) as u8) } } impl W { #[doc = "Bits 0:7 - DATA_START_ADDR"] #[inline(always)] pub fn data_start_addr(&mut self) -> DATA_START_ADDR_W { DATA_START_ADDR_W { w: self } } #[doc = "Bits 8:15 - DATA_END_ADDR"] #[inline(always)] pub fn data_end_addr(&mut self) -> DATA_END_ADDR_W { DATA_END_ADDR_W { w: self } } #[doc = "Bits 16:23 - CRC_ADDR"] #[inline(always)] pub fn crc_addr(&mut self) -> CRC_ADDR_W { CRC_ADDR_W { w: self } } #[doc = "Bit 24 - OTPMK_CRC"] #[inline(always)] pub fn otpmk_crc(&mut self) -> OTPMK_CRC_W { OTPMK_CRC_W { w: self } } }
#[doc = "Reader of register CRC_ADDR"] pub type R = crate::R<u32, super::CRC_ADDR>; #[doc = "Writer for register CRC_ADDR"] pub type W = crate::W<u32, super::CRC_ADDR>; #[doc = "Register CRC_ADDR `reset()`'s with value 0"] impl crate::ResetValue for super::CRC_ADDR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `DATA_START_ADDR`"] pub type DATA_START_ADDR_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DATA_START_ADDR`"] pub struct DATA_START_ADDR_W<'a> { w: &'a mut W, } impl<'a> DATA_START_ADDR_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff); self.w } } #[doc = "Reader of field `DATA_END_ADDR`"] pub type DATA_END_ADDR_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DATA_END_ADDR`"] pub struct DATA_END_ADDR_W<'a> { w: &'a mut W, } impl<'a> DATA_END_ADDR_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8); self.w } } #[doc = "Reader of
{ OTPMK_CRC_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bits 25:31 - RSVD0"] #[inline(always)] pub fn rsvd0(&self) -> RSVD0_R { RSVD0_R::new(((self.bits >> 25) & 0x7f) as u8) } } impl W { #[doc = "Bits 0:7 - DATA_START_ADDR"] #[inline(always)] pub fn data_start_addr(&mut self) -> DATA_START_ADDR_W { DATA_START_ADDR_W { w: self } } #[doc = "Bits 8:15 - DATA_END_ADDR"] #[inline(always)] pub fn data_end_addr(&mut self) -> DATA_END_ADDR_W { DATA_END_ADDR_W { w: self } } #[doc = "Bits 16:23 - CRC_ADDR"] #[inline(always)] pub fn crc_addr(&mut self) -> CRC_ADDR_W { CRC_ADDR_W { w: self } } #[doc = "Bit 24 - OTPMK_CRC"] #[inline(always)] pub fn otpmk_crc(&mut self) -> OTPMK_CRC_W { OTPMK_CRC_W { w: self } } }
field `CRC_ADDR`"] pub type CRC_ADDR_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CRC_ADDR`"] pub struct CRC_ADDR_W<'a> { w: &'a mut W, } impl<'a> CRC_ADDR_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16); self.w } } #[doc = "Reader of field `OTPMK_CRC`"] pub type OTPMK_CRC_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OTPMK_CRC`"] pub struct OTPMK_CRC_W<'a> { w: &'a mut W, } impl<'a> OTPMK_CRC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Reader of field `RSVD0`"] pub type RSVD0_R = crate::R<u8, u8>; impl R { #[doc = "Bits 0:7 - DATA_START_ADDR"] #[inline(always)] pub fn data_start_addr(&self) -> DATA_START_ADDR_R { DATA_START_ADDR_R::new((self.bits & 0xff) as u8) } #[doc = "Bits 8:15 - DATA_END_ADDR"] #[inline(always)] pub fn data_end_addr(&self) -> DATA_END_ADDR_R { DATA_END_ADDR_R::new(((self.bits >> 8) & 0xff) as u8) } #[doc = "Bits 16:23 - CRC_ADDR"] #[inline(always)] pub fn crc_addr(&self) -> CRC_ADDR_R { CRC_ADDR_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bit 24 - OTPMK_CRC"] #[inline(always)] pub fn otpmk_crc(&self) -> OTPMK_CRC_R
random
[]
Rust
src/transaction.rs
tiagolobocastro/heath
4faedf8f37acba0ac183273cfc0cd00286526ded
use crate::{ client::ClientId, csv::transaction::{TransactionId, TransactionLogCsv, TransactionType}, transactions::TransactionInfo, }; use serde::{Deserialize, Serialize}; impl TransactionInfo for TransactionLog { fn transaction_type(&self) -> TransactionType { match self { Self::Deposit { .. } => TransactionType::Deposit, Self::Withdrawal { .. } => TransactionType::Withdrawal, Self::Dispute { .. } => TransactionType::Dispute, Self::Resolve { .. } => TransactionType::Resolve, Self::Chargeback { .. } => TransactionType::Chargeback, } } fn client_id(&self) -> ClientId { match self { Self::Deposit { common, .. } => common.client_id, Self::Withdrawal { common, .. } => common.client_id, Self::Dispute { common } => common.client_id, Self::Resolve { common } => common.client_id, Self::Chargeback { common } => common.client_id, } } fn transaction_id(&self) -> TransactionId { match self { Self::Deposit { common, .. } => common.tx_id, Self::Withdrawal { common, .. } => common.tx_id, Self::Dispute { common } => common.tx_id, Self::Resolve { common } => common.tx_id, Self::Chargeback { common } => common.tx_id, } } fn amount(&self) -> Option<rust_decimal::Decimal> { match self { Self::Deposit { amount, .. } => Some(*amount), Self::Withdrawal { amount, .. } => Some(*amount), Self::Dispute { .. } => None, Self::Resolve { .. } => None, Self::Chargeback { .. } => None, } } } #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(tag = "type")] pub(crate) enum TransactionLog { Deposit { #[serde(flatten)] common: TransactionLogCommon, #[serde(rename = "amount")] amount: rust_decimal::Decimal, }, Withdrawal { common: TransactionLogCommon, #[serde(rename = "amount")] amount: rust_decimal::Decimal, }, Dispute { #[serde(flatten)] common: TransactionLogCommon, }, Resolve { #[serde(flatten)] common: TransactionLogCommon, }, Chargeback { #[serde(flatten)] common: TransactionLogCommon, }, } #[derive(Debug, Clone, Serialize, Deserialize)] pub(crate) enum DisputeSate { Undisputed, Disputed(rust_decimal::Decimal), Chargeback, } impl Default for DisputeSate { fn default() -> Self { Self::Undisputed } } #[derive(Debug, Clone, Serialize, Deserialize)] pub(crate) struct TransactionLogCommon { #[serde(rename = "client")] client_id: ClientId, #[serde(rename = "tx")] tx_id: TransactionId, } impl From<TransactionLogCsv> for TransactionLog { fn from(tx: TransactionLogCsv) -> Self { let common = TransactionLogCommon { client_id: tx.client_id(), tx_id: tx.transaction_id(), }; match tx.transaction_type() { TransactionType::Deposit => Self::Deposit { common, amount: tx.amount().expect("Deposit should contain the amount"), }, TransactionType::Withdrawal => Self::Withdrawal { common, amount: tx.amount().expect("Withdrawal should contain the amount"), }, TransactionType::Dispute => Self::Dispute { common }, TransactionType::Resolve => Self::Resolve { common }, TransactionType::Chargeback => Self::Chargeback { common }, } } } impl TransactionLog { #[allow(dead_code)] pub(crate) fn log_info(&self) { tracing::info!(type_=?self.transaction_type(), client=self.client_id(), tx=%self.transaction_id(), amount=?self.amount()); } }
use crate::{ client::ClientId, csv::transaction::{TransactionId, TransactionLogCsv, TransactionType}, transactions::TransactionInfo, }; use serde::{Deserialize, Serialize}; impl TransactionInfo for TransactionLog { fn transaction_type(&self) -> TransactionType { match self { Self::Deposit { .. } => TransactionType::Deposit, Self::Withdrawal { .. } => TransactionType::Withdrawal, Self::Dispute { .. } => TransactionType::Dispute, Self::Resolve { .. } => TransactionType::Resolve, Self::Chargeback { .. } => TransactionType::Chargeback, } } fn client_id(&self) -> ClientId { match self { Self::Deposit { common, .. } => common.client_id, Self::Withdrawal { common, .. } => common.client_id, Self::Dispute { common } => common.client_id, Self::Resolve { common } => common.client_id, Self::Chargeback { common } => common.client_id, } } fn transaction_id(&self) -> TransactionId { match self { Self::Deposit { common, .. } => common.tx_id, Self::Withdrawal { common, .. } => common.tx_id, Self::Dispute { common } => common.tx_id, Self::Resolve { common } => common.tx_id, Self::Chargeback { common } => common.tx_id, } } fn amount(&self) -> Option<rust_decimal::Decimal> { match self { Self::Deposit { amount, .. } => Some(*amount), Self::Withdrawal { amount, .. } => Some(*amount), Self::Dispute { .. } => None, Self::Resolve { .. } => None, Self::Chargeback { .. } => None, } } } #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(tag = "type")] pub(crate) enum TransactionLog { Deposit { #[serde(flatten)] common: TransactionLogCommon, #[serde(rename = "amount")] amount: rust_decimal::Decimal, }, Withdrawal { common: TransactionLogCommon, #[serde(rename = "amount")] amount: rust_decimal::Decimal, }, Dispute { #[serde(flatten)] common: TransactionLogCommon, }, Resolve { #[serde(flatten)] common: TransactionLogCommon, }, Chargeback { #[serde(flatten)] common: TransactionLogCommon, }, } #[derive(Debug, Clone, Serialize, Deserialize)] pub(crate) enum DisputeSate { Undisputed, Disputed(rust_decimal::Decimal), Chargeback, } impl Default for DisputeSate { fn default() -> Self { Self::Undisputed } } #[derive(Debug, Clone, Serialize, Deserialize)] pub(crate) struct TransactionLogCommon { #[serde(rename = "client")] client_id: ClientId, #[serde(rename = "tx")] tx_id: TransactionId, } impl From<TransactionLogCsv> for TransactionLog { fn from(tx: TransactionLogCsv) -> Self {
match tx.transaction_type() { TransactionType::Deposit => Self::Deposit { common, amount: tx.amount().expect("Deposit should contain the amount"), }, TransactionType::Withdrawal => Self::Withdrawal { common, amount: tx.amount().expect("Withdrawal should contain the amount"), }, TransactionType::Dispute => Self::Dispute { common }, TransactionType::Resolve => Self::Resolve { common }, TransactionType::Chargeback => Self::Chargeback { common }, } } } impl TransactionLog { #[allow(dead_code)] pub(crate) fn log_info(&self) { tracing::info!(type_=?self.transaction_type(), client=self.client_id(), tx=%self.transaction_id(), amount=?self.amount()); } }
let common = TransactionLogCommon { client_id: tx.client_id(), tx_id: tx.transaction_id(), };
assignment_statement
[ { "content": " account: BankAccount,\n\n disputed_tx: Option<TransactionLog>,\n\n}\n\nimpl Dispute {\n\n pub(crate) fn new(account: BankAccount, disputed_tx: Option<TransactionLog>) -> Self {\n\n Self {\n\n account,\n\n disputed_tx,\n\n }\n\n }\n\n}\n\nimpl Transaction for Dispute {\n\n #[tracing::instrument(err)]\n\n fn execute(&mut self) -> anyhow::Result<()> {\n\n // disputes for locked accounts are currently allowed\n\n match &self.disputed_tx {\n\n None => {\n\n tracing::debug!(account=?self.account, \"Disputed Transaction not found.\");\n\n Ok(())\n\n }\n", "file_path": "src/transactions/dispute.rs", "rank": 0, "score": 47963.246070783585 }, { "content": " Some(disputed_tx) => {\n\n // Check that we don't dispute the same account twice for the same transaction\n\n let disputed_id = disputed_tx.transaction_id();\n\n match self.account.find_dispute(disputed_id) {\n\n DisputeSate::Undisputed => {\n\n if let Some(amount) = disputed_tx.amount() {\n\n let available = self.account.available_funds();\n\n if available >= amount {\n\n let new_available = available - amount;\n\n self.account.set_available_funds(new_available);\n\n self.account\n\n .add_held_funds(amount, disputed_tx.transaction_id());\n\n } else {\n\n // I did not find the correct procedure in the document so I'm\n\n // assuming that here we take the\n\n // same approach as a withdrawal? Or would we\n\n // allow the account funds to go negative?\n\n tracing::debug!(account=?self.account, disputed_tx=?disputed_tx, \"Disputed account does not have the funds!\");\n\n }\n\n }\n", "file_path": "src/transactions/dispute.rs", "rank": 1, "score": 47960.90951108163 }, { "content": "use crate::{\n\n account::{AccountInfo, SetAccountInfo},\n\n bank::BankAccount,\n\n transaction::{DisputeSate, TransactionLog},\n\n transactions::{Transaction, TransactionInfo},\n\n};\n\n\n\n/// A dispute represents a client's claim that a transaction was erroneous and should be reversed.\n\n/// The transaction shouldn't be reversed yet but the associated funds should be held. This means\n\n/// that the clients available funds should decrease by the amount disputed, their held funds should\n\n/// increase by the amount disputed, while their total funds should remain the same.\n\n/// A dispute looks like\n\n/// type client tx amount\n\n/// dispute 1 1\n\n/// # Non-Fatal Error:\n\n/// Notice that a dispute does not state the amount disputed. Instead a dispute references the\n\n/// transaction that is disputed by ID. If the tx specified by the dispute doesn't exist you can\n\n/// ignore it and assume this is an error on our partners side\n\n#[derive(Debug)]\n\npub(super) struct Dispute {\n", "file_path": "src/transactions/dispute.rs", "rank": 2, "score": 47959.64607313361 }, { "content": " }\n\n DisputeSate::Disputed(_) => {\n\n tracing::debug!(account=?self.account, disputed_tx=?disputed_tx, \"Transaction is already disputed\");\n\n }\n\n DisputeSate::Chargeback => {\n\n tracing::debug!(account=?self.account, disputed_tx=?disputed_tx, \"Transaction has already been charged back\");\n\n }\n\n }\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{bank::tests::test, init_tracing};\n\n\n\n #[test]\n\n fn ok() -> anyhow::Result<()> {\n", "file_path": "src/transactions/dispute.rs", "rank": 3, "score": 47958.91315912656 }, { "content": " init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/dispute/unknown\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn invalid_cid_tx() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/dispute/invalid_cid_tx\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/transactions/dispute.rs", "rank": 4, "score": 47950.985356176694 }, { "content": " init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/dispute/ok\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn repeated_unresolved() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/dispute/repeated_unresolved\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn repeated_resolved() -> anyhow::Result<()> {\n", "file_path": "src/transactions/dispute.rs", "rank": 5, "score": 47950.24048099483 }, { "content": " init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/dispute/repeated_resolved\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn repeated_charged() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/dispute/repeated_charged\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn unknown() -> anyhow::Result<()> {\n", "file_path": "src/transactions/dispute.rs", "rank": 6, "score": 47950.24048099483 }, { "content": "#[derive(structopt::StructOpt, Debug)]\n\nstruct CliArgs {\n\n /// Transactions file in a csv format.\n\n #[structopt(name = \"transactions\")]\n\n transactions: PathBuf,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 7, "score": 30760.846164701277 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let args = CliArgs::from_args();\n\n init_tracing()?;\n\n\n\n let ledger = Ledger::from_path(args.transactions)?;\n\n // ledger.print_transactions()?;\n\n\n\n let mut bank = Bank::new(ledger);\n\n\n\n // todo: this is probably not great for large datasets with around 2MB of account data\n\n println!(\"{}\", bank.ordered_accounts_balance_buffer()?);\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 8, "score": 26533.17270790469 }, { "content": "/// Type identifier for a client\n\npub(crate) type ClientId = u16;\n", "file_path": "src/client.rs", "rank": 9, "score": 25831.601056031468 }, { "content": "fn init_tracing() -> anyhow::Result<()> {\n\n let filter = tracing_subscriber::EnvFilter::from_default_env();\n\n tracing_subscriber::fmt()\n\n .with_env_filter(filter)\n\n .pretty()\n\n .try_init()\n\n .map_err(|_| anyhow::anyhow!(\"Failed to init tracing (already inited?)\"))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 10, "score": 25534.868989173687 }, { "content": "use crate::{\n\n account::{AccountInfo, SetAccountInfo},\n\n bank::BankAccount,\n\n transactions::{Transaction, TransactionError},\n\n};\n\n\n\n/// A deposit is a credit to the client's asset account, meaning it should increase the available\n\n/// and total funds of the client account\n\n/// A deposit looks like\n\n/// type client tx amount\n\n/// deposit 1 1 1.0\n\n/// Withdrawal\n\n/// A withdraw is a debit to the client's asset account, meaning it should decrease the available\n\n/// and total funds of the client account\n\n#[derive(Debug)]\n\npub(super) struct Deposit {\n\n account: BankAccount,\n\n amount: rust_decimal::Decimal,\n\n}\n\n\n", "file_path": "src/transactions/deposit.rs", "rank": 11, "score": 24317.32579775856 }, { "content": "impl Deposit {\n\n pub(crate) fn new(account: BankAccount, amount: rust_decimal::Decimal) -> Self {\n\n Self { account, amount }\n\n }\n\n}\n\nimpl Transaction for Deposit {\n\n #[tracing::instrument(err)]\n\n fn execute(&mut self) -> anyhow::Result<()> {\n\n if !self.account.locked() {\n\n let new_available = self.account.available_funds() + self.amount;\n\n self.account.set_available_funds(new_available);\n\n } else {\n\n let error = TransactionError::AccountFrozen {\n\n account: self.account.client_id(),\n\n };\n\n tracing::debug!(error=%error, \"non-fatal error occurred\");\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/transactions/deposit.rs", "rank": 12, "score": 24314.843978247933 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{bank::tests::test, init_tracing};\n\n\n\n #[test]\n\n fn ok() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/deposit/ok\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/transactions/deposit.rs", "rank": 13, "score": 24308.049382177127 }, { "content": "use crate::{\n\n account::{AccountInfo, SetAccountInfo},\n\n bank::BankAccount,\n\n transactions::{Transaction, TransactionError},\n\n};\n\n\n\n/// A withdraw is a debit to the client's asset account, meaning it should decrease the available\n\n/// and total funds of the client account\n\n/// A withdrawal looks like\n\n/// type client tx amount\n\n/// withdrawal 2 2 1.0\n\n/// # Non-Fatal Error\n\n/// If a client does not have sufficient available funds the withdrawal should fail and the\n\n/// total amount of funds should not change\n\n#[derive(Debug)]\n\npub(super) struct Withdrawal {\n\n account: BankAccount,\n\n amount: rust_decimal::Decimal,\n\n}\n\nimpl Withdrawal {\n", "file_path": "src/transactions/withdrawal.rs", "rank": 14, "score": 24234.982322256983 }, { "content": " pub(crate) fn new(account: BankAccount, amount: rust_decimal::Decimal) -> Self {\n\n Self { account, amount }\n\n }\n\n}\n\nimpl Transaction for Withdrawal {\n\n #[tracing::instrument(err)]\n\n fn execute(&mut self) -> anyhow::Result<()> {\n\n if self.account.locked() {\n\n let error = TransactionError::AccountFrozen {\n\n account: self.account.client_id(),\n\n };\n\n tracing::debug!(error=%error, \"non-fatal error occurred\");\n\n return Ok(());\n\n }\n\n let available = self.account.available_funds();\n\n if available >= self.amount {\n\n let new_available = available - self.amount;\n\n self.account.set_available_funds(new_available);\n\n } else {\n\n let error = TransactionError::InsufficientFunds {\n", "file_path": "src/transactions/withdrawal.rs", "rank": 15, "score": 24231.3771296225 }, { "content": " required: self.amount,\n\n available,\n\n };\n\n tracing::debug!(error=%error, \"non-fatal error occurred\");\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{bank::tests::test, init_tracing};\n\n\n\n #[test]\n\n fn no_funds() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/withdrawal/no_funds\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n", "file_path": "src/transactions/withdrawal.rs", "rank": 16, "score": 24227.347702885458 }, { "content": " Ok(())\n\n }\n\n\n\n #[test]\n\n fn ok() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/withdrawal/ok\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/transactions/withdrawal.rs", "rank": 17, "score": 24222.826501420455 }, { "content": " account: BankAccount,\n\n disputed_tx: Option<TransactionLog>,\n\n}\n\nimpl Resolve {\n\n pub(crate) fn new(account: BankAccount, disputed_tx: Option<TransactionLog>) -> Self {\n\n Self {\n\n account,\n\n disputed_tx,\n\n }\n\n }\n\n}\n\nimpl Transaction for Resolve {\n\n #[tracing::instrument(err)]\n\n fn execute(&mut self) -> anyhow::Result<()> {\n\n if let Some(dispute) = &self.disputed_tx {\n\n match self.account.find_dispute(dispute.transaction_id()) {\n\n DisputeSate::Disputed(amount) => {\n\n assert!(\n\n amount <= self.account.held_funds(),\n\n \"Amount held and disputes got out of sync - BUG\"\n", "file_path": "src/transactions/resolve.rs", "rank": 18, "score": 24160.20698710243 }, { "content": "use crate::{\n\n account::{AccountInfo, SetAccountInfo},\n\n bank::BankAccount,\n\n transaction::{DisputeSate, TransactionLog},\n\n transactions::{Transaction, TransactionInfo},\n\n};\n\n\n\n/// A resolve represents a resolution to a dispute, releasing the associated held funds. Funds that\n\n/// were previously disputed are no longer disputed. This means that the clients held funds should\n\n/// decrease by the amount no longer disputed, their available funds should increase by the\n\n/// amount no longer disputed, and their total funds should remain the same.\n\n/// A resolve looks like\n\n/// type client tx amount\n\n/// resolve 1 1\n\n/// # Non-fatal Error:\n\n/// Like disputes, resolves do not specify an amount. Instead they refer to a transaction that was\n\n/// under dispute by ID. If the tx specified doesn't exist, or the tx isn't under dispute, you can\n\n/// ignore the resolve and assume this is an error on our partner's side.\n\n#[derive(Debug)]\n\npub(super) struct Resolve {\n", "file_path": "src/transactions/resolve.rs", "rank": 19, "score": 24156.621604062526 }, { "content": " );\n\n let available = self.account.available_funds();\n\n let new_available = available + amount;\n\n self.account.remove_held_funds(dispute.transaction_id());\n\n self.account.set_available_funds(new_available);\n\n // I'm guessing that we allow resolved disputes to be re-disputed?\n\n self.account\n\n .complete_dispute(dispute.transaction_id(), DisputeSate::Undisputed);\n\n }\n\n DisputeSate::Undisputed => {}\n\n DisputeSate::Chargeback => {}\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{bank::tests::test, init_tracing};\n", "file_path": "src/transactions/resolve.rs", "rank": 20, "score": 24155.493985290777 }, { "content": "\n\n #[test]\n\n fn unknown() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/resolve/unknown\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn invalid_cid_tx() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/resolve/invalid_cid_tx\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/transactions/resolve.rs", "rank": 21, "score": 24143.216949957885 }, { "content": "\n\n #[test]\n\n fn ok() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/resolve/ok\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn repeated() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/resolve/repeated\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n", "file_path": "src/transactions/resolve.rs", "rank": 22, "score": 24140.988062423545 }, { "content": "use crate::{\n\n account::{AccountInfo, SetAccountInfo},\n\n bank::BankAccount,\n\n transaction::{DisputeSate, TransactionLog},\n\n transactions::{Transaction, TransactionInfo},\n\n};\n\n\n\n/// A chargeback is the final state of a dispute and represents the client reversing a transaction.\n\n/// Funds that were held have now been withdrawn. This means that the clients held funds and\n\n/// total funds should decrease by the amount previously disputed. If a chargeback occurs the\n\n/// client's account should be immediately frozen.\n\n/// A chargeback looks like\n\n/// type client tx amount\n\n/// chargeback 1 1\n\n/// # Non-fatal Error:\n\n/// Like a dispute and a resolve a chargeback refers to the transaction by ID (tx) and does not\n\n/// specify an amount. Like a resolve, if the tx specified doesn't exist, or the tx isn't under\n\n/// dispute, you can ignore chargeback and assume this is an error on our partner's side.\n\n#[derive(Debug)]\n\npub(super) struct ChargeBack {\n", "file_path": "src/transactions/chargeback.rs", "rank": 23, "score": 24075.520288064898 }, { "content": " account: BankAccount,\n\n disputed_tx: Option<TransactionLog>,\n\n}\n\nimpl ChargeBack {\n\n pub(crate) fn new(account: BankAccount, disputed_tx: Option<TransactionLog>) -> Self {\n\n Self {\n\n account,\n\n disputed_tx,\n\n }\n\n }\n\n}\n\nimpl Transaction for ChargeBack {\n\n #[tracing::instrument(err)]\n\n fn execute(&mut self) -> anyhow::Result<()> {\n\n if let Some(dispute) = &self.disputed_tx {\n\n match self.account.find_dispute(dispute.transaction_id()) {\n\n DisputeSate::Disputed(amount) => {\n\n assert!(\n\n amount <= self.account.held_funds(),\n\n \"Amount held and disputes got out of sync - BUG\"\n", "file_path": "src/transactions/chargeback.rs", "rank": 24, "score": 24074.96670088131 }, { "content": " );\n\n self.account.remove_held_funds(dispute.transaction_id());\n\n self.account\n\n .complete_dispute(dispute.transaction_id(), DisputeSate::Chargeback);\n\n\n\n // we're now frozen so we cannot issue any deposit/withdrawals?\n\n self.account.set_locked(true);\n\n }\n\n DisputeSate::Undisputed => {\n\n tracing::debug!(account=?self.account, disputed_tx=?dispute, \"Transaction undisputed\");\n\n }\n\n DisputeSate::Chargeback => {\n\n tracing::debug!(account=?self.account, disputed_tx=?dispute, \"Transaction has already been charged back\");\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/transactions/chargeback.rs", "rank": 25, "score": 24073.021310532098 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use crate::{bank::tests::test, init_tracing};\n\n\n\n #[test]\n\n fn ok() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/chargeback/ok\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn repeated() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/chargeback/repeated\");\n\n let (expected, actual) = test(test_folder)?;\n", "file_path": "src/transactions/chargeback.rs", "rank": 26, "score": 24061.079713471194 }, { "content": " assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn undisputed() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/chargeback/undisputed\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/transactions/chargeback.rs", "rank": 27, "score": 24060.748302529635 }, { "content": " assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn unknown() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/chargeback/unknown\");\n\n let (expected, actual) = test(test_folder)?;\n\n assert_eq!(expected, actual);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn invalid_cid_tx() -> anyhow::Result<()> {\n\n init_tracing().ok();\n\n\n\n let test_folder = std::path::Path::new(\"./test_data/chargeback/invalid_cid_tx\");\n\n let (expected, actual) = test(test_folder)?;\n", "file_path": "src/transactions/chargeback.rs", "rank": 28, "score": 24060.436306766704 }, { "content": "impl TransactionLogCsv {\n\n #[allow(dead_code)]\n\n pub(crate) fn log_info(&self) {\n\n tracing::info!(type_=?self.transaction_type(), client=self.client_id(), tx=%self.transaction_id(), amount=?self.amount());\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub(crate) enum TransactionType {\n\n Deposit,\n\n Withdrawal,\n\n Dispute,\n\n Resolve,\n\n Chargeback,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::TransactionLogCsv;\n", "file_path": "src/csv/transaction.rs", "rank": 33, "score": 30.193119318647813 }, { "content": "use crate::{\n\n account::AccountId,\n\n client::ClientId,\n\n csv::transaction::{TransactionId, TransactionType},\n\n transaction::TransactionLog,\n\n transactions::{\n\n chargeback::ChargeBack, deposit::Deposit, dispute::Dispute, resolve::Resolve,\n\n withdrawal::Withdrawal,\n\n },\n\n Bank,\n\n};\n\n\n\nmod chargeback;\n\nmod deposit;\n\nmod dispute;\n\nmod resolve;\n\nmod withdrawal;\n\n\n\n/// A transaction, that can be executed\n\npub(crate) trait Transaction {\n", "file_path": "src/transactions/mod.rs", "rank": 35, "score": 24.30647343822723 }, { "content": "use crate::{client::ClientId, transactions::TransactionInfo};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Type identifier for a transaction\n\npub(crate) type TransactionId = u32;\n\n\n\n/// The input will be a CSV file with the columns type, client, tx, and amount. You can assume the\n\n/// type is a string, the client column is a valid u16 client ID, the tx is a valid u32 transaction\n\n/// ID, and the amount is a rust_decimal::Decimal value with a precision of up to four places past\n\n/// the rust_decimal::Decimal.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub(crate) struct TransactionLogCsv {\n\n /// Transaction Type.\n\n #[serde(rename = \"type\")]\n\n type_: TransactionType,\n\n /// Client ID.\n\n #[serde(rename = \"client\")]\n\n client_id: ClientId,\n\n /// Transaction ID.\n\n #[serde(rename = \"tx\")]\n", "file_path": "src/csv/transaction.rs", "rank": 37, "score": 19.386900581675736 }, { "content": "use crate::{account::AccountInfo, client::ClientId};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Default, Serialize, Deserialize)]\n\npub(crate) struct AccountLog {\n\n /// Client identifier.\n\n #[serde(rename = \"client\")]\n\n client_id: ClientId,\n\n /// The total funds that are available for trading, staking, withdrawal, etc.\n\n /// This should be equal to the total - held amounts.\n\n #[serde(rename = \"available\")]\n\n available_funds: rust_decimal::Decimal,\n\n /// The total funds that are held for dispute.\n\n /// This should be equal to total - available amounts.\n\n #[serde(rename = \"held\")]\n\n held_funds: rust_decimal::Decimal,\n\n /// The total funds that are available or held. This should be equal to available + held.\n\n #[serde(rename = \"total\")]\n\n total_funds: rust_decimal::Decimal,\n\n /// Whether the account is locked. An account is locked if a charge back occur.\n", "file_path": "src/csv/account.rs", "rank": 38, "score": 17.425412108594596 }, { "content": "use crate::{\n\n client::ClientId,\n\n csv::{account::AccountLog, transaction::TransactionId},\n\n transaction::DisputeSate,\n\n};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, Clone, Default)]\n\npub(crate) struct Account {\n\n /// Client identifier.\n\n client_id: ClientId,\n\n /// The total funds that are available for trading, staking, withdrawal, etc.\n\n /// This should be equal to the total - held amounts.\n\n available_funds: rust_decimal::Decimal,\n\n /// The total funds that are held for dispute.\n\n /// This should be equal to total - available amounts.\n\n held_funds: HashMap<TransactionId, rust_decimal::Decimal>,\n\n completed_disputes: HashMap<TransactionId, DisputeSate>,\n\n held_funds_cache: rust_decimal::Decimal,\n\n /// Whether the account is locked. An account is locked if a charge back occur.\n", "file_path": "src/account.rs", "rank": 39, "score": 17.042269586496015 }, { "content": " available: rust_decimal::Decimal,\n\n },\n\n #[error(\"Account({account:?}) is frozen\")]\n\n AccountFrozen { account: AccountId },\n\n}\n\n\n\nimpl<'a> Transaction for BankTransaction<'a> {\n\n fn execute(&mut self) -> anyhow::Result<()> {\n\n let account = self.bank.account(self.transaction_log.client_id());\n\n match self.transaction_log {\n\n TransactionLog::Deposit { amount, .. } => Deposit::new(account, *amount).execute(),\n\n TransactionLog::Withdrawal { amount, .. } => {\n\n Withdrawal::new(account, *amount).execute()\n\n }\n\n\n\n TransactionLog::Dispute { .. } => {\n\n let dispute = self.bank.transaction(\n\n self.chronological_index,\n\n self.transaction_log.client_id(),\n\n self.transaction_log.transaction_id(),\n", "file_path": "src/transactions/mod.rs", "rank": 40, "score": 16.470800038114284 }, { "content": " tx_id: TransactionId,\n\n /// Transaction amount with a precision of up to four places past the rust_decimal::Decimal.\n\n #[serde(rename = \"amount\")]\n\n amount: Option<rust_decimal::Decimal>,\n\n}\n\n\n\nimpl TransactionInfo for TransactionLogCsv {\n\n fn transaction_type(&self) -> TransactionType {\n\n self.type_.clone()\n\n }\n\n fn client_id(&self) -> ClientId {\n\n self.client_id\n\n }\n\n fn transaction_id(&self) -> TransactionId {\n\n self.tx_id\n\n }\n\n fn amount(&self) -> Option<rust_decimal::Decimal> {\n\n self.amount\n\n }\n\n}\n", "file_path": "src/csv/transaction.rs", "rank": 42, "score": 13.976357874161927 }, { "content": " fn total_funds(&self) -> rust_decimal::Decimal {\n\n self.held_funds() + self.available_funds()\n\n }\n\n fn locked(&self) -> bool {\n\n self.locked\n\n }\n\n fn find_dispute(&self, transaction: TransactionId) -> DisputeSate {\n\n if let Some(amount) = self.held_funds.get(&transaction) {\n\n DisputeSate::Disputed(*amount)\n\n } else {\n\n self.completed_disputes\n\n .get(&transaction)\n\n .cloned()\n\n .unwrap_or(DisputeSate::Undisputed)\n\n }\n\n }\n\n}\n\n\n\npub(crate) trait SetAccountInfo {\n\n fn set_available_funds(&mut self, amount: rust_decimal::Decimal);\n", "file_path": "src/account.rs", "rank": 43, "score": 13.803644911208922 }, { "content": " locked: bool,\n\n}\n\n\n\n// Assumed from the provided doc that there's only one account per client\n\npub(crate) type AccountId = crate::client::ClientId;\n\n\n\nimpl Account {\n\n pub(crate) fn new(account_id: AccountId) -> Self {\n\n Self {\n\n client_id: account_id,\n\n available_funds: rust_decimal::Decimal::new(0, 0),\n\n held_funds: Default::default(),\n\n completed_disputes: Default::default(),\n\n held_funds_cache: rust_decimal::Decimal::new(0, 0),\n\n locked: false,\n\n }\n\n }\n\n #[allow(dead_code)]\n\n pub(crate) fn log_info(&self) {\n\n tracing::info!(client=%self.client_id(), available=?self.available_funds(), held=?self.held_funds(), total=?self.total_funds(), locked=self.locked());\n", "file_path": "src/account.rs", "rank": 44, "score": 13.38228952986776 }, { "content": " fn set_locked(&mut self, locked: bool) {\n\n self.locked = locked;\n\n }\n\n fn complete_dispute(&mut self, disputer_id: TransactionId, state: DisputeSate) {\n\n match state {\n\n DisputeSate::Undisputed => {}\n\n DisputeSate::Disputed(_) => {}\n\n DisputeSate::Chargeback => {\n\n self.completed_disputes.insert(disputer_id, state);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/account.rs", "rank": 45, "score": 12.926090718681714 }, { "content": " w.serialize(transaction?)?;\n\n }\n\n let output = String::from_utf8(w.into_inner()?)?;\n\n assert_eq!(test_input, output);\n\n Ok(())\n\n }\n\n\n\n /// Same test as csv_sanity but with spaces\n\n #[test]\n\n fn csv_sanity_with_spaces() -> anyhow::Result<()> {\n\n let test_input = \"\\\n\ntype, client, tx, amount\n\ndeposit, 1,1, 1\n\ndeposit, 2,2, 2\n\ndeposit, 1,3, 2\n\nwithdrawal, 1,4, 1.5\n\nwithdrawal, 2,5, 3\n\n\";\n\n let mut test_reader = csv::ReaderBuilder::new()\n\n .trim(csv::Trim::All)\n", "file_path": "src/csv/transaction.rs", "rank": 46, "score": 12.787179287823378 }, { "content": " )?;\n\n Dispute::new(account, dispute).execute()\n\n }\n\n TransactionLog::Resolve { .. } => {\n\n let dispute = self.bank.transaction(\n\n self.chronological_index,\n\n self.transaction_log.client_id(),\n\n self.transaction_log.transaction_id(),\n\n )?;\n\n Resolve::new(account, dispute).execute()\n\n }\n\n TransactionLog::Chargeback { .. } => {\n\n let dispute = self.bank.transaction(\n\n self.chronological_index,\n\n self.transaction_log.client_id(),\n\n self.transaction_log.transaction_id(),\n\n )?;\n\n ChargeBack::new(account, dispute).execute()\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/transactions/mod.rs", "rank": 47, "score": 12.600947569266895 }, { "content": " #[test]\n\n /// Basic CSV test, read some test input and write it back, it should be the same\n\n fn csv_sanity() -> anyhow::Result<()> {\n\n let test_input = \"\\\n\ntype,client,tx,amount\n\ndeposit,1,1,1\n\ndeposit,2,2,2\n\ndeposit,1,3,2\n\nwithdrawal,1,4,1.5\n\nwithdrawal,2,5,3\n\n\";\n\n let mut test_reader = csv::Reader::from_reader(test_input.as_bytes());\n\n let transactions = test_reader\n\n .deserialize::<TransactionLogCsv>()\n\n .enumerate()\n\n .map(|(_, t)| t)\n\n .collect::<Vec<_>>();\n\n\n\n let mut w = csv::Writer::from_writer(vec![]);\n\n for transaction in transactions {\n", "file_path": "src/csv/transaction.rs", "rank": 48, "score": 12.046982745590313 }, { "content": " fn execute(&mut self) -> anyhow::Result<()>;\n\n}\n\n\n\n/// Information about a transaction\n\npub(crate) trait TransactionInfo {\n\n fn transaction_type(&self) -> TransactionType;\n\n fn client_id(&self) -> ClientId;\n\n fn transaction_id(&self) -> TransactionId;\n\n fn amount(&self) -> Option<rust_decimal::Decimal>;\n\n}\n\n\n\n/// A bank transaction helper that implements `Transaction`\n\npub(crate) struct BankTransaction<'a> {\n\n bank: &'a mut Bank,\n\n chronological_index: usize,\n\n transaction_log: &'a TransactionLog,\n\n}\n\n\n\nimpl<'a> BankTransaction<'a> {\n\n /// Return a new `Self`\n", "file_path": "src/transactions/mod.rs", "rank": 49, "score": 11.640170594942031 }, { "content": "\n\n/// Ledger iterator\n\n#[derive(Debug)]\n\npub(crate) struct LedgerIter {\n\n reader: csv::Reader<File>,\n\n}\n\n\n\nimpl Iterator for LedgerIter {\n\n type Item = TransactionLog;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n match self.reader.deserialize::<TransactionLogCsv>().next() {\n\n None => None,\n\n Some(Ok(transaction)) => Some(transaction.into()),\n\n Some(Err(error)) => {\n\n let error = anyhow::anyhow!(\"Error in the csv file!!!: {}\", error);\n\n panic!(\"{}\", error);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/ledger.rs", "rank": 50, "score": 11.391553704372487 }, { "content": " fn available_funds(&self) -> rust_decimal::Decimal;\n\n fn held_funds(&self) -> rust_decimal::Decimal;\n\n fn total_funds(&self) -> rust_decimal::Decimal;\n\n fn locked(&self) -> bool;\n\n fn find_dispute(&self, transaction: TransactionId) -> DisputeSate {\n\n let _ = transaction;\n\n DisputeSate::Undisputed\n\n }\n\n}\n\n\n\nimpl AccountInfo for Account {\n\n fn client_id(&self) -> ClientId {\n\n self.client_id\n\n }\n\n fn available_funds(&self) -> rust_decimal::Decimal {\n\n self.available_funds.round_dp(4)\n\n }\n\n fn held_funds(&self) -> rust_decimal::Decimal {\n\n self.held_funds_cache.round_dp(4)\n\n }\n", "file_path": "src/account.rs", "rank": 51, "score": 10.75440721062036 }, { "content": " fn add_held_funds(&mut self, amount: rust_decimal::Decimal, disputer_id: TransactionId);\n\n fn remove_held_funds(&mut self, disputer_id: TransactionId);\n\n fn set_locked(&mut self, locked: bool);\n\n fn complete_dispute(&mut self, disputer_id: TransactionId, state: DisputeSate);\n\n}\n\n\n\nimpl SetAccountInfo for Account {\n\n fn set_available_funds(&mut self, amount: rust_decimal::Decimal) {\n\n self.available_funds = amount.round_dp(4);\n\n }\n\n fn add_held_funds(&mut self, amount: rust_decimal::Decimal, disputer_id: TransactionId) {\n\n let amount = amount.round_dp(4);\n\n self.held_funds.insert(disputer_id, amount);\n\n self.held_funds_cache += amount;\n\n }\n\n fn remove_held_funds(&mut self, disputer_id: TransactionId) {\n\n if let Some(d) = self.held_funds.remove(&disputer_id) {\n\n self.held_funds_cache -= d.round_dp(4);\n\n };\n\n }\n", "file_path": "src/account.rs", "rank": 52, "score": 10.110271112866863 }, { "content": " self.lock().unwrap().complete_dispute(disputer_id, state)\n\n }\n\n}\n\n// todo: use Deref with an OwnedMutexGuard target?\n\nimpl AccountInfo for BankAccount {\n\n fn client_id(&self) -> ClientId {\n\n self.lock().unwrap().client_id()\n\n }\n\n fn available_funds(&self) -> rust_decimal::Decimal {\n\n self.lock().unwrap().available_funds()\n\n }\n\n fn held_funds(&self) -> rust_decimal::Decimal {\n\n self.lock().unwrap().held_funds()\n\n }\n\n fn total_funds(&self) -> rust_decimal::Decimal {\n\n self.lock().unwrap().total_funds()\n\n }\n\n fn locked(&self) -> bool {\n\n self.lock().unwrap().locked()\n\n }\n", "file_path": "src/bank.rs", "rank": 53, "score": 9.838455968779758 }, { "content": "use crate::{\n\n account::{Account, AccountId, AccountInfo, SetAccountInfo},\n\n client::ClientId,\n\n csv::transaction::TransactionId,\n\n transaction::{DisputeSate, TransactionLog},\n\n transactions::{BankTransaction, Transaction, TransactionInfo},\n\n Ledger,\n\n};\n\nuse itertools::Itertools;\n\nuse std::{\n\n collections::HashMap,\n\n sync::{Arc, Mutex},\n\n};\n\n\n\n/// A bank Account\n\n/// todo: The way things are this could probably use a Cell instead of a Mutex\n\npub(crate) type BankAccount = Arc<Mutex<Account>>;\n\n\n\n/// A Bank\n\n/// It has a ledger of transactions and bank accounts.\n", "file_path": "src/bank.rs", "rank": 54, "score": 9.685200966158954 }, { "content": "use crate::{csv::transaction::TransactionLogCsv, transaction::TransactionLog};\n\nuse std::{fs::File, io::Seek, path::PathBuf};\n\n\n\n#[derive(Debug)]\n\npub(crate) struct Ledger {\n\n csv_file: File,\n\n}\n\n\n\nimpl Ledger {\n\n /// New `Self` from a given csv file\n\n pub(crate) fn from_path(path: PathBuf) -> anyhow::Result<Self> {\n\n let csv_file = File::open(path)?;\n\n Ok(Self { csv_file })\n\n }\n\n fn reader(&self) -> anyhow::Result<csv::Reader<File>> {\n\n let mut file = self.csv_file.try_clone()?;\n\n file.rewind()?;\n\n let reader = csv::ReaderBuilder::new()\n\n .flexible(true)\n\n .trim(csv::Trim::All)\n", "file_path": "src/ledger.rs", "rank": 55, "score": 9.611959926743904 }, { "content": " let _ = std::mem::take(&mut self.accounts);\n\n\n\n Ok(String::from_utf8(w.into_inner()?)?)\n\n }\n\n}\n\n\n\nimpl SetAccountInfo for BankAccount {\n\n fn set_available_funds(&mut self, amount: rust_decimal::Decimal) {\n\n self.lock().unwrap().set_available_funds(amount.round_dp(4))\n\n }\n\n fn add_held_funds(&mut self, amount: rust_decimal::Decimal, disputer_id: TransactionId) {\n\n self.lock().unwrap().add_held_funds(amount, disputer_id)\n\n }\n\n fn remove_held_funds(&mut self, disputer_id: TransactionId) {\n\n self.lock().unwrap().remove_held_funds(disputer_id)\n\n }\n\n fn set_locked(&mut self, locked: bool) {\n\n self.lock().unwrap().set_locked(locked)\n\n }\n\n fn complete_dispute(&mut self, disputer_id: TransactionId, state: DisputeSate) {\n", "file_path": "src/bank.rs", "rank": 56, "score": 9.574263264474164 }, { "content": "mod account;\n\nmod bank;\n\nmod client;\n\nmod csv;\n\nmod ledger;\n\nmod transaction;\n\nmod transactions;\n\n\n\nuse crate::{bank::Bank, ledger::Ledger};\n\nuse std::path::PathBuf;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(structopt::StructOpt, Debug)]\n", "file_path": "src/main.rs", "rank": 57, "score": 9.388818331053386 }, { "content": " #[serde(rename = \"locked\")]\n\n locked: bool,\n\n}\n\nimpl AccountLog {\n\n pub(crate) fn new(\n\n client_id: ClientId,\n\n available_funds: rust_decimal::Decimal,\n\n held_funds: rust_decimal::Decimal,\n\n total_funds: rust_decimal::Decimal,\n\n locked: bool,\n\n ) -> Self {\n\n Self {\n\n client_id,\n\n available_funds,\n\n held_funds,\n\n total_funds,\n\n locked,\n\n }\n\n }\n\n}\n", "file_path": "src/csv/account.rs", "rank": 58, "score": 8.513533017620135 }, { "content": " }\n\n pub(crate) fn to_csv(&self) -> AccountLog {\n\n AccountLog::from(self)\n\n }\n\n}\n\n\n\nimpl From<&Account> for AccountLog {\n\n fn from(acc: &Account) -> Self {\n\n AccountLog::new(\n\n acc.client_id,\n\n acc.available_funds().normalize(),\n\n acc.held_funds_cache.round_dp(4).normalize(),\n\n acc.total_funds().normalize(),\n\n acc.locked,\n\n )\n\n }\n\n}\n\n\n\npub(crate) trait AccountInfo {\n\n fn client_id(&self) -> ClientId;\n", "file_path": "src/account.rs", "rank": 59, "score": 8.239334788345367 }, { "content": "#[derive(Debug)]\n\npub(crate) struct Bank {\n\n accounts: HashMap<AccountId, BankAccount>,\n\n ledger: Ledger,\n\n}\n\n\n\nimpl Bank {\n\n /// Return a new `Self` with the provided `Ledger`\n\n pub(crate) fn new(ledger: Ledger) -> Self {\n\n Self {\n\n accounts: Default::default(),\n\n ledger,\n\n }\n\n }\n\n /// Get the BankAccount for the given account_id\n\n /// If the account does not exist a new default account will be created\n\n pub(crate) fn account(&mut self, account_id: AccountId) -> BankAccount {\n\n self.accounts\n\n .entry(account_id)\n\n .or_insert_with(|| Arc::new(Mutex::new(Account::new(account_id))))\n", "file_path": "src/bank.rs", "rank": 60, "score": 7.799875856947967 }, { "content": " fn find_dispute(&self, transaction: TransactionId) -> DisputeSate {\n\n self.lock().unwrap().find_dispute(transaction)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod tests {\n\n use crate::{Bank, Ledger};\n\n\n\n /// Get a Bank usable for testing\n\n pub(crate) fn bank(test_file: std::path::PathBuf) -> anyhow::Result<Bank> {\n\n let ledger = Ledger::from_path(test_file)?;\n\n let bank = Bank::new(ledger);\n\n Ok(bank)\n\n }\n\n /// Test that the folder's test input and output succeed:\n\n /// The input is read into the bank which then returns the actual output.\n\n /// Returns a tuple with the expected output and the actual output.\n\n pub(crate) fn test(tests_folder: &std::path::Path) -> anyhow::Result<(String, String)> {\n\n let expected_output = std::fs::read_to_string(tests_folder.join(\"output.csv\"))?;\n\n let mut bank = bank(tests_folder.join(\"input.csv\"))?;\n\n\n\n let output = bank.ordered_accounts_balance_buffer()?;\n\n Ok((expected_output.trim().into(), output.trim().into()))\n\n }\n\n}\n", "file_path": "src/bank.rs", "rank": 61, "score": 7.312498844398766 }, { "content": "pub(crate) mod tests {\n\n use super::AccountLog;\n\n\n\n #[test]\n\n /// Basic CSV test, read some test input and write it back, it should be the same\n\n fn csv_sanity() -> anyhow::Result<()> {\n\n let test_input = \"\\\n\nclient,available,held,total,locked\n\n1,1.5,0,1.5,false\n\n2,2,0,2,false\n\n\";\n\n let mut test_reader = csv::Reader::from_reader(test_input.as_bytes());\n\n let accounts = test_reader\n\n .deserialize::<AccountLog>()\n\n .enumerate()\n\n .map(|(_, t)| t)\n\n .collect::<Vec<_>>();\n\n\n\n let mut w = csv::Writer::from_writer(vec![]);\n\n for account in accounts {\n", "file_path": "src/csv/account.rs", "rank": 62, "score": 6.251810166395187 }, { "content": " w.serialize(account?)?;\n\n }\n\n let output = String::from_utf8(w.into_inner()?)?;\n\n assert_eq!(test_input, output);\n\n Ok(())\n\n }\n\n\n\n /// Same test as csv_sanity but with spaces\n\n #[test]\n\n fn csv_sanity_with_spaces() -> anyhow::Result<()> {\n\n let test_input = \"\\\n\nclient, available, held, total, locked\n\n1, 1.5, 0, 1.5, false\n\n2, 2, 0, 2, false\n\n\";\n\n let mut test_reader = csv::ReaderBuilder::new()\n\n .trim(csv::Trim::All)\n\n .from_reader(test_input.as_bytes());\n\n let accounts = test_reader\n\n .deserialize::<AccountLog>()\n", "file_path": "src/csv/account.rs", "rank": 63, "score": 5.128470419588606 }, { "content": " .clone()\n\n }\n\n /// Try to get the TransactionLog for the given transaction_id\n\n /// Searches the ledger only up to the chronologically ordered index max_ledger_search\n\n pub(crate) fn transaction(\n\n &mut self,\n\n max_ledger_search: usize,\n\n account_id: AccountId,\n\n transaction_id: TransactionId,\n\n ) -> anyhow::Result<Option<TransactionLog>> {\n\n Ok(self\n\n .ledger\n\n .iter()?\n\n .take(max_ledger_search)\n\n .find(|transaction| {\n\n transaction.transaction_id() == transaction_id\n\n && account_id == transaction.client_id()\n\n }))\n\n }\n\n\n", "file_path": "src/bank.rs", "rank": 64, "score": 4.636124170281681 }, { "content": "\n\nimpl AccountInfo for AccountLog {\n\n fn client_id(&self) -> ClientId {\n\n self.client_id\n\n }\n\n fn available_funds(&self) -> rust_decimal::Decimal {\n\n self.available_funds\n\n }\n\n fn held_funds(&self) -> rust_decimal::Decimal {\n\n self.held_funds\n\n }\n\n fn total_funds(&self) -> rust_decimal::Decimal {\n\n self.total_funds\n\n }\n\n fn locked(&self) -> bool {\n\n self.locked\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/csv/account.rs", "rank": 65, "score": 4.576035200510462 }, { "content": " /// Get the ordered accounts balance as a String\n\n pub(crate) fn ordered_accounts_balance_buffer(&mut self) -> anyhow::Result<String> {\n\n // Note: if we ever wanted to \"commit\" the ledger into the accounts we'd have to either\n\n // trim the ledger or make sure the iterator can not be reset\n\n let _ = std::mem::take(&mut self.accounts);\n\n\n\n self.ledger.iter()?.enumerate().for_each(|(index, f)| {\n\n // as things stand most \"errors\"/invalid ops are simply ignored, but they're ignored\n\n // in the specific transaction as it's the one that knows what it should ignore\n\n BankTransaction::new(self, index, &f).execute().unwrap();\n\n });\n\n let mut w = csv::Writer::from_writer(vec![]);\n\n for account in self\n\n .accounts\n\n .iter()\n\n .map(|a| a.1.lock().unwrap().to_csv())\n\n .sorted_by(|a, b| a.client_id().cmp(&b.client_id()))\n\n {\n\n w.serialize(account)?;\n\n }\n", "file_path": "src/bank.rs", "rank": 66, "score": 4.549294212193647 }, { "content": " pub(crate) fn new(\n\n bank: &'a mut Bank,\n\n chronological_index: usize,\n\n transaction_log: &'a TransactionLog,\n\n ) -> Self {\n\n Self {\n\n bank,\n\n chronological_index,\n\n transaction_log,\n\n }\n\n }\n\n}\n\n\n\n/// A transaction error - was intended to return an error but it turns out we have to ignore\n\n/// a certain number of \"errors\" so ended up doing away with it\n\n#[derive(thiserror::Error, Debug)]\n\npub(crate) enum TransactionError {\n\n #[error(\"Insufficient Funds (required {required:?}, available {available:?})\")]\n\n InsufficientFunds {\n\n required: rust_decimal::Decimal,\n", "file_path": "src/transactions/mod.rs", "rank": 67, "score": 4.419363483313046 }, { "content": " .from_reader(test_input.as_bytes());\n\n let transactions = test_reader\n\n .deserialize::<TransactionLogCsv>()\n\n .enumerate()\n\n .map(|(_, t)| t.ok())\n\n .flatten()\n\n .collect::<Vec<_>>();\n\n\n\n let mut w = csv::Writer::from_writer(vec![]);\n\n for transaction in &transactions {\n\n w.serialize(transaction)?;\n\n }\n\n let output = String::from_utf8(w.into_inner()?)?;\n\n assert_eq!(test_input.replace(' ', \"\"), output);\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/csv/transaction.rs", "rank": 68, "score": 4.059962531650054 }, { "content": " .from_reader(file);\n\n Ok(reader)\n\n }\n\n /// Print ledger transactions to stdout\n\n #[allow(dead_code)]\n\n pub(crate) fn print_transactions(&self) -> anyhow::Result<()> {\n\n let mut reader = self.reader()?;\n\n for record in reader.deserialize::<TransactionLogCsv>() {\n\n println!(\"{:?}\", record?);\n\n let _ = record;\n\n }\n\n Ok(())\n\n }\n\n /// Get a Ledger iterator\n\n pub(crate) fn iter(&self) -> anyhow::Result<LedgerIter> {\n\n Ok(LedgerIter {\n\n reader: self.reader()?,\n\n })\n\n }\n\n}\n", "file_path": "src/ledger.rs", "rank": 69, "score": 4.014445307856317 }, { "content": "pub(crate) mod account;\n\npub(crate) mod transaction;\n", "file_path": "src/csv/mod.rs", "rank": 70, "score": 2.7680868822916187 }, { "content": " .enumerate()\n\n .map(|(_, t)| t)\n\n .collect::<Vec<_>>();\n\n\n\n let mut w = csv::Writer::from_writer(vec![]);\n\n for account in accounts {\n\n w.serialize(account.unwrap())?;\n\n }\n\n let output = String::from_utf8(w.into_inner()?)?;\n\n assert_eq!(test_input.replace(' ', \"\"), output);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/csv/account.rs", "rank": 71, "score": 2.302386683559968 }, { "content": "# Usage\n\n```shell\n\n[nix-shell:~/git/heath]$ cargo run -q --bin ledger -- transactions.csv\n\nclient,available,held,total,locked\n\n1,1.4988,0,1.4988,false\n\n2,2,0,2,false\n\n```\n\n\n\n# Missing Tests:\n\n## 4 decimal point precision (and accuracy)\n\n## large datasets\n\n\n", "file_path": "README.md", "rank": 72, "score": 1.9559738875790718 } ]
Rust
src/terminal/winapi_terminal.rs
Rukenshia/crossterm
caa7579ef6102f966a7436f55c2dfda2e30efb5a
use {Construct}; use cursor::cursor; use super::{ClearType, ITerminal}; use winapi::um::wincon::{SMALL_RECT, COORD, CONSOLE_SCREEN_BUFFER_INFO,}; use kernel::windows_kernel::{kernel, terminal}; pub struct WinApiTerminal; impl Construct for WinApiTerminal { fn new() -> Box<WinApiTerminal> { Box::from(WinApiTerminal {}) } } impl ITerminal for WinApiTerminal { fn clear(&self, clear_type: ClearType) { let csbi = kernel::get_console_screen_buffer_info(); let pos = cursor().pos(); match clear_type { ClearType::All => clear_entire_screen(csbi), ClearType::FromCursorDown => clear_after_cursor(pos,csbi), ClearType::FromCursorUp => clear_before_cursor(pos, csbi), ClearType::CurrentLine => clear_current_line(pos, csbi), ClearType::UntilNewLine => clear_until_line(pos, csbi), }; } fn terminal_size(&self) -> (u16, u16) { terminal::terminal_size() } fn scroll_up(&self, count: i16) { } fn scroll_down(&self, count: i16) { let csbi = kernel::get_console_screen_buffer_info(); let mut srct_window; srct_window = csbi.srWindow; if srct_window.Bottom < csbi.dwSize.Y - count { srct_window.Top += count; srct_window.Bottom += count; let success = kernel::set_console_info(true, &mut srct_window); if success { panic!("Something went wrong when scrolling down"); } } } fn set_size(&self, width: i16, height: i16) { if width <= 0 { panic!("Cannot set the terminal width lower than 1"); } if height <= 0 { panic!("Cannot set the terminal height lower then 1") } let csbi = kernel::get_console_screen_buffer_info(); let mut success = false; let mut resize_buffer = false; let mut size = COORD { X: csbi.dwSize.X, Y: csbi.dwSize.Y }; if csbi.dwSize.X < csbi.srWindow.Left + width { if csbi.srWindow.Left >= i16::max_value() - width { panic!("Argument out of range when setting terminal width."); } size.X = csbi.srWindow.Left + width; resize_buffer = true; } if csbi.dwSize.Y < csbi.srWindow.Top + height { if csbi.srWindow.Top >= i16::max_value() - height { panic!("Argument out of range when setting terminal height"); } size.Y = csbi.srWindow.Top + height; resize_buffer = true; } if resize_buffer { success = kernel::set_console_screen_buffer_size(size); if !success { panic!("Something went wrong when setting screen buffer size."); } } let mut fsr_window: SMALL_RECT = csbi.srWindow; fsr_window.Bottom = fsr_window.Top + height; fsr_window.Right = fsr_window.Left + width; let success = kernel::set_console_info(true, &fsr_window); if success { if resize_buffer { kernel::set_console_screen_buffer_size(csbi.dwSize); } let bounds = kernel::get_largest_console_window_size(); if width > bounds.X { panic!("Argument width: {} out of range when setting terminal width.", width); } if height > bounds.Y { panic!("Argument height: {} out of range when setting terminal height", height); } } } } pub fn clear_after_cursor(pos: (u16,u16), csbi: CONSOLE_SCREEN_BUFFER_INFO) { let (mut x,mut y) = pos; if x as i16 > csbi.dwSize.X { y += 1; x = 0; } let start_location = COORD { X: x as i16, Y: y as i16}; let cells_to_write = csbi.dwSize.X as u32 * csbi.dwSize.Y as u32; clear(start_location,cells_to_write); } pub fn clear_before_cursor(pos: (u16,u16), csbi: CONSOLE_SCREEN_BUFFER_INFO) { let (xpos,ypos) = pos; let x = 0; let y = 0; let start_location = COORD { X: x as i16, Y: y as i16}; let cells_to_write = (csbi.dwSize.X as u32 * ypos as u32) + (xpos as u32 + 1); clear(start_location, cells_to_write); } pub fn clear_entire_screen(csbi: CONSOLE_SCREEN_BUFFER_INFO) { let x = 0; let y = 0; let start_location = COORD { X: x as i16, Y: y as i16}; let cells_to_write = csbi.dwSize.X as u32 * csbi.dwSize.Y as u32; clear( start_location, cells_to_write); cursor().goto(0, 0); } pub fn clear_current_line(pos: (u16,u16), csbi: CONSOLE_SCREEN_BUFFER_INFO) { let x = 0; let y = pos.1; let start_location = COORD { X: x as i16, Y: y as i16}; let cells_to_write = csbi.dwSize.X as u32; clear(start_location, cells_to_write); cursor().goto(0, y); } pub fn clear_until_line(pos: (u16,u16), csbi: CONSOLE_SCREEN_BUFFER_INFO) { let (x,y) = pos; let start_location = COORD { X: x as i16, Y: y as i16}; let cells_to_write = (csbi.dwSize.X - x as i16) as u32; clear(start_location, cells_to_write); cursor().goto(x,y); } fn clear( start_loaction: COORD, cells_to_write: u32 ) { let mut cells_written = 0; let mut success = false; success = kernel::fill_console_output_character(&mut cells_written, start_loaction, cells_to_write); if !success { panic!("Could not clear screen after cursor"); } cells_written = 0; success = kernel::fill_console_output_attribute(&mut cells_written, start_loaction, cells_to_write); if !success { panic!("Couldnot reset attributes after cursor"); } }
use {Construct}; use cursor::cursor; use super::{ClearType, ITerminal}; use winapi::um::wincon::{SMALL_RECT, COORD, CONSOLE_SCREEN_BUFFER_INFO,}; use kernel::windows_kernel::{kernel, terminal}; pub struct WinApiTerminal; impl Construct for WinApiTerminal { fn new() -> Box<WinApiTerminal> { Box::from(WinApiTerminal {}) } } impl ITerminal for WinApiTerminal { fn clear(&self, clear_type: ClearType) { let csbi = kernel::get_console_screen_buffer_info(); let pos = cursor().pos(); match clear_type { ClearType::All => clear_entire_screen(csbi), ClearType::FromCursorDown => clear_after_cursor(pos,csbi), ClearType::FromCursorUp => clear_before_cursor(pos, csbi), ClearType::CurrentLine => clear_current_line(pos, csbi), ClearType::UntilNewLine => clear_until_line(pos, csbi), }; } fn terminal_size(&self) -> (u16, u16) { terminal::terminal_size() } fn scroll_up(&self, count: i16) { } fn scroll_down(&self, count: i16) { let csbi = kernel::get_console_screen_buffer_info(); let mut srct_window; srct_window = csbi.srWindow; if srct_window.Bottom < csbi.dwSize.Y - count { srct_window.Top += count; srct_window.Bottom += count; let success = kernel::set_console_info(true, &mut srct_window); if success { panic!("Something went wrong when scrolling down"); } } } fn set_size(&self, width: i16, height: i16) { if width <= 0 { panic!("Cannot set the terminal width lower than 1"); } if height <= 0 { panic!("Cannot set the terminal height lower then 1") } let csbi = kernel::get_console_screen_buffer_info(); let mut success = false; let mut resize_buffer = false; let mut size = COORD { X: csbi.dwSize.X, Y: csbi.dwSize.Y }; if csbi.dwSize.X < csbi.srWindow.Left + width { if csbi.srWindow.Left >= i16::max_value() - width { panic!("Argument out of range when setting terminal width."); } size.X = csbi.srWindow.Left + width; resize_buffer = true; } if csbi.dwSize.Y < csbi.srWindow.Top + height { if csbi.srWindow.Top >= i16::max_value() - height { panic!("Argument out of range when setting terminal height"); } size.Y = csbi.srWindow.Top + height; resize_buffer = true; } if resize_buffer { success = kernel::set_console_screen_buffer_size(size); if !success { panic!("Something went wrong when setting screen buffer size."); } } let mut fsr_window: SMALL_RECT = csbi.srWindow; fsr_window.Bottom = fsr_window.Top + height; fsr_window.Right = fsr_window.Left + width; let success = kernel::set_console_info(true, &fsr_window); if success { if resize_buffer { kernel::set_console_screen_buffer_size(csbi.dwSize); } let bounds = kernel::get_largest_console_window_size(); if width > bounds.X { panic!("Argument width: {} out of range when setting terminal width.", width); } if height > bounds.Y { panic!("Argument height: {} out of range when setting terminal height", height); } } } } pub fn clear_after_cursor(pos: (u16,u16), csbi: CONSOLE_SCREEN_BUFFER_INFO) { let (mut x,mut y) = pos; if x as i16 > csbi.dwSize.X { y += 1; x = 0; } let start_location = COORD { X: x as i16, Y: y as i16}; let cells_to_write = csbi.dwSize.X as u32 * csbi.dwSize.Y as u32; clear(start_location,cells_to_write); }
pub fn clear_entire_screen(csbi: CONSOLE_SCREEN_BUFFER_INFO) { let x = 0; let y = 0; let start_location = COORD { X: x as i16, Y: y as i16}; let cells_to_write = csbi.dwSize.X as u32 * csbi.dwSize.Y as u32; clear( start_location, cells_to_write); cursor().goto(0, 0); } pub fn clear_current_line(pos: (u16,u16), csbi: CONSOLE_SCREEN_BUFFER_INFO) { let x = 0; let y = pos.1; let start_location = COORD { X: x as i16, Y: y as i16}; let cells_to_write = csbi.dwSize.X as u32; clear(start_location, cells_to_write); cursor().goto(0, y); } pub fn clear_until_line(pos: (u16,u16), csbi: CONSOLE_SCREEN_BUFFER_INFO) { let (x,y) = pos; let start_location = COORD { X: x as i16, Y: y as i16}; let cells_to_write = (csbi.dwSize.X - x as i16) as u32; clear(start_location, cells_to_write); cursor().goto(x,y); } fn clear( start_loaction: COORD, cells_to_write: u32 ) { let mut cells_written = 0; let mut success = false; success = kernel::fill_console_output_character(&mut cells_written, start_loaction, cells_to_write); if !success { panic!("Could not clear screen after cursor"); } cells_written = 0; success = kernel::fill_console_output_attribute(&mut cells_written, start_loaction, cells_to_write); if !success { panic!("Couldnot reset attributes after cursor"); } }
pub fn clear_before_cursor(pos: (u16,u16), csbi: CONSOLE_SCREEN_BUFFER_INFO) { let (xpos,ypos) = pos; let x = 0; let y = 0; let start_location = COORD { X: x as i16, Y: y as i16}; let cells_to_write = (csbi.dwSize.X as u32 * ypos as u32) + (xpos as u32 + 1); clear(start_location, cells_to_write); }
function_block-full_function
[ { "content": "pub fn set_console_screen_buffer_size( size: COORD) -> bool\n\n{\n\n let output_handle = get_output_handle();\n\n\n\n unsafe\n\n {\n\n let success = SetConsoleScreenBufferSize(output_handle, size);\n\n is_true(success)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 4, "score": 225620.67480280943 }, { "content": "/// Get the terminal size\n\npub fn terminal_size() -> (u16, u16) {\n\n let csbi = super::kernel::get_console_screen_buffer_info();\n\n (\n\n (csbi.srWindow.Right - csbi.srWindow.Left) as u16,\n\n (csbi.srWindow.Bottom - csbi.srWindow.Top) as u16,\n\n )\n\n}", "file_path": "src/kernel/windows_kernel/terminal.rs", "rank": 6, "score": 190995.8030594545 }, { "content": "/// Get the current terminal size.\n\npub fn terminal_size() -> (u16,u16) {\n\n // http://rosettacode.org/wiki/Terminal_control/Dimensions#Library:_BSD_libc\n\n let us = UnixSize {\n\n rows: 0,\n\n cols: 0,\n\n x: 0,\n\n y: 0,\n\n };\n\n let r = unsafe { ioctl(STDOUT_FILENO, TIOCGWINSZ, &us) };\n\n if r == 0 {\n\n // because crossterm works starts counting at 0 and unix terminal starts at cell 1 you have subtract one to get 0-based results.\n\n (us.cols -1, us.rows -1)\n\n } else {\n\n (0,0)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 7, "score": 190995.67254246149 }, { "content": "/// Get the terminal size based on the current platform.\n\npub fn get_terminal_size() -> (u16, u16)\n\n{\n\n terminal_size()\n\n}\n\n\n", "file_path": "src/shared/functions.rs", "rank": 8, "score": 186449.9023506248 }, { "content": "pub fn read_console_output(read_buffer: &HANDLE, copy_buffer: &mut [CHAR_INFO;160], buffer_size: COORD, buffer_coord: COORD, source_buffer: PSMALL_RECT)\n\n{\n\n use self::wincon::ReadConsoleOutputA;\n\n\n\n unsafe\n\n {\n\n if !is_true(ReadConsoleOutputA(\n\n *read_buffer, // screen buffer to read from\n\n copy_buffer.as_mut_ptr(), // buffer to copy into\n\n buffer_size, // col-row size of chiBuffer\n\n buffer_coord, // top left dest. cell in chiBuffer\n\n source_buffer) // screen buffer source rectangle\n\n ){\n\n\n\n panic!(\"Cannot read console output\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 9, "score": 177160.4179455071 }, { "content": "pub fn write_console_output(write_buffer: &HANDLE, copy_buffer: &mut [CHAR_INFO;160], buffer_size: COORD, buffer_coord: COORD, source_buffer: PSMALL_RECT)\n\n{\n\n use self::wincon::WriteConsoleOutputA;\n\n\n\n unsafe\n\n {\n\n if !is_true(WriteConsoleOutputA(\n\n *write_buffer, // screen buffer to write to\n\n copy_buffer.as_mut_ptr(), // buffer to copy into\n\n buffer_size, // col-row size of chiBuffer\n\n buffer_coord, // top left dest. cell in chiBuffer\n\n source_buffer)// screen buffer source rectangle\n\n ){\n\n\n\n panic!(\"Cannot write to console output\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 10, "score": 177160.4179455071 }, { "content": "/// Set the terminal size to width 10, height: 10 | demonstration.\n\npub fn set_terminal_size()\n\n{\n\n let mut terminal = terminal();\n\n\n\n terminal.set_size(10,10);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 11, "score": 173405.51309499034 }, { "content": "pub fn fill_console_output_character(cells_written: &mut u32, start_location: COORD, cells_to_write: u32) -> bool\n\n{\n\n let output_handle = get_output_handle();\n\n\n\n unsafe {\n\n // fill the cells in console with blanks\n\n let success = FillConsoleOutputCharacterA (\n\n output_handle,\n\n ' ' as i8,\n\n cells_to_write,\n\n start_location,\n\n cells_written,\n\n );\n\n is_true(success)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 12, "score": 171516.54098696753 }, { "content": "pub fn fill_console_output_attribute(cells_written: &mut u32, start_location: COORD, cells_to_write: u32) -> bool\n\n{\n\n // Get the position of the current console window\n\n let csbi = get_console_screen_buffer_info();\n\n let output_handle = get_output_handle();\n\n\n\n let success;\n\n\n\n unsafe {\n\n success = FillConsoleOutputAttribute (\n\n output_handle,\n\n csbi.wAttributes,\n\n cells_to_write,\n\n start_location,\n\n cells_written,\n\n );\n\n }\n\n\n\n is_true(success)\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 13, "score": 171516.54098696753 }, { "content": "pub fn set_console_cursor_position(x: i16, y: i16)\n\n{\n\n if x < 0 || x >= <i16>::max_value() {\n\n panic!(\"X: {}, Argument Out of Range Exception\", x);\n\n }\n\n\n\n if y < 0 || y >= <i16>::max_value() {\n\n panic!(\"Y: {}, Argument Out of Range Exception\", y);\n\n }\n\n\n\n let output_handle = get_output_handle();\n\n let position = COORD { X: x, Y: y };\n\n\n\n unsafe {\n\n let success = SetConsoleCursorPosition(output_handle, position);\n\n\n\n if success == 0 {\n\n panic!(\"Argument out of range.\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 14, "score": 169162.18864094635 }, { "content": "pub fn set_active_screen_buffer(new_buffer: HANDLE)\n\n{\n\n unsafe\n\n {\n\n if !is_true(SetConsoleActiveScreenBuffer(new_buffer))\n\n {\n\n panic!(\"Cannot set active screen buffer\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 15, "score": 166408.22481233394 }, { "content": "/// Get the current cursor position.\n\npub fn pos() -> (u16,u16)\n\n{\n\n use std::io::Error;\n\n use std::io::{ Write,Read };\n\n\n\n let mut context = Context::new();\n\n {\n\n let mut command = NoncanonicalModeCommand::new(&mut context);\n\n command.0.execute();\n\n\n\n // This code is original written by term_cursor credits to them.\n\n let mut stdout = io::stdout();\n\n\n\n // Write command\n\n stdout.write(b\"\\x1B[6n\");\n\n stdout.flush();\n\n\n\n // Read back result\n\n let mut buf = [0u8; 2];\n\n // Expect `ESC[`\n", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 16, "score": 160644.14949990629 }, { "content": "pub fn get_console_screen_buffer_info() -> CONSOLE_SCREEN_BUFFER_INFO {\n\n let output_handle = get_output_handle();\n\n let mut csbi = CONSOLE_SCREEN_BUFFER_INFO::empty();\n\n let success;\n\n\n\n unsafe { success = GetConsoleScreenBufferInfo(output_handle, &mut csbi) }\n\n\n\n if success == 0 {\n\n panic!(\"Cannot get console screen buffer info\");\n\n }\n\n\n\n csbi\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 17, "score": 150969.19259186712 }, { "content": "/// Scroll down 10 lines | demonstration.\n\npub fn scroll_down()\n\n{\n\n print_test_data();\n\n // Get terminal\n\n let mut terminal = terminal();\n\n // Scroll down 10 lines.\n\n terminal.scroll_down(10);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 18, "score": 149856.10367853026 }, { "content": "/// Scroll down 10 lines | demonstration.\n\npub fn scroll_up()\n\n{\n\n print_test_data();\n\n\n\n // Get terminal\n\n let mut terminal = terminal();\n\n // Scroll up 10 lines.\n\n terminal.scroll_up(10);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 19, "score": 149856.10367853026 }, { "content": "/// Print the the current terminal size | demonstration.\n\npub fn print_terminal_size()\n\n{\n\n // Get terminal\n\n let mut terminal = terminal();\n\n // Get terminal size\n\n let terminal_size = terminal.terminal_size();\n\n // Print results\n\n print!(\"X: {}, y: {}\", terminal_size.0, terminal_size.1);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 20, "score": 145693.70955383175 }, { "content": "/// Clear all lines from cursor position X:4, Y:7 up | demonstration\n\npub fn clear_until_new_line()\n\n{\n\n // Get terminal\n\n let mut terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor().goto(4,20);\n\n\n\n // Clear all the cells until next line.\n\n terminal.clear(ClearType::UntilNewLine);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 21, "score": 141764.45248217983 }, { "content": "pub fn get_largest_console_window_size() -> COORD\n\n{\n\n let output_handle = get_output_handle();\n\n\n\n unsafe {\n\n GetLargestConsoleWindowSize(output_handle)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 22, "score": 140565.66248625814 }, { "content": "pub fn set_console_text_attribute(value: u16)\n\n{\n\n let output_handle = get_output_handle();\n\n\n\n unsafe {\n\n SetConsoleTextAttribute(output_handle, value);\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 23, "score": 136724.41274943954 }, { "content": "/// Get the cursor position based on the current platform.\n\npub fn get_cursor_position() -> (u16,u16)\n\n{\n\n pos()\n\n}\n\n\n\n#[cfg(windows)]\n", "file_path": "src/shared/functions.rs", "rank": 24, "score": 136439.27219425887 }, { "content": "pub fn pos() -> (u16,u16)\n\n{\n\n let csbi = kernel::get_console_screen_buffer_info();\n\n ( csbi.dwCursorPosition.X as u16, csbi.dwCursorPosition.Y as u16 )\n\n}\n", "file_path": "src/kernel/windows_kernel/cursor.rs", "rank": 25, "score": 136439.27219425887 }, { "content": "/// Transform the given mode into an raw mode (non-canonical) mode.\n\npub fn make_raw(termios: &mut Termios) {\n\n extern \"C\" {\n\n pub fn cfmakeraw(termptr: *mut Termios);\n\n }\n\n unsafe { cfmakeraw(termios) }\n\n}\n\n\n", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 26, "score": 135746.2999793693 }, { "content": "pub fn get_console_mode(handle: &HANDLE, current_mode: &mut u32) -> bool\n\n{\n\n unsafe {\n\n let success = GetConsoleMode(*handle, &mut *current_mode);\n\n return is_true(success);\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 27, "score": 135463.1855563863 }, { "content": "pub fn set_console_mode(handle: &HANDLE, console_mode: u32) -> bool\n\n{\n\n unsafe {\n\n let success = SetConsoleMode(*handle, console_mode);\n\n return is_true(success);\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 28, "score": 121889.43731340198 }, { "content": "/// Resize the terminal to X: 10, Y: 10 | demonstration.\n\npub fn resize_terminal()\n\n{\n\n // Get terminal\n\n let mut terminal = terminal();\n\n // Get terminal size\n\n terminal.set_size(10,10);\n\n}\n", "file_path": "examples/terminal/terminal.rs", "rank": 29, "score": 120682.56884304632 }, { "content": "pub fn create_console_screen_buffer() -> HANDLE\n\n{\n\n use winapi::shared::ntdef::NULL;\n\n use winapi::um::wincon::CONSOLE_TEXTMODE_BUFFER;\n\n use winapi::um::winnt::{GENERIC_READ, GENERIC_WRITE, FILE_SHARE_READ, FILE_SHARE_WRITE};\n\n use winapi::um::minwinbase::SECURITY_ATTRIBUTES;\n\n use std::mem::size_of;\n\n\n\n unsafe\n\n {\n\n let mut security_attr: SECURITY_ATTRIBUTES = SECURITY_ATTRIBUTES\n\n {\n\n nLength: size_of::<SECURITY_ATTRIBUTES>() as u32,\n\n lpSecurityDescriptor: NULL,\n\n bInheritHandle: TRUE\n\n };\n\n\n\n let new_screen_buffer = CreateConsoleScreenBuffer(\n\n GENERIC_READ | // read/write access\n\n GENERIC_WRITE,\n\n FILE_SHARE_READ |\n\n FILE_SHARE_WRITE, // shared\n\n &mut security_attr, // default security attributes\n\n CONSOLE_TEXTMODE_BUFFER, // must be TEXTMODE\n\n NULL\n\n );\n\n new_screen_buffer\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 30, "score": 118990.21847131461 }, { "content": "pub fn get_original_console_color() -> u16 {\n\n let console_buffer_info = get_console_screen_buffer_info();\n\n console_buffer_info.wAttributes as u16\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 31, "score": 118783.11616591162 }, { "content": "/// Get an Terminal implementation whereon terminal related actions can be performed.\n\n///\n\n/// Check `/examples/terminal` in the libary for more spesific examples.\n\n///\n\n/// #Example\n\n///\n\n/// ```rust\n\n///\n\n/// extern crate crossterm;\n\n/// use crossterm::terminal;\n\n///\n\n/// let mut term = terminal::terminal();\n\n///\n\n/// // scroll down by 5 lines\n\n/// let size = term.scroll_down(5);\n\n///\n\n/// ```\n\n///\n\npub fn terminal() -> Box<Terminal>\n\n{\n\n Box::from(Terminal::new())\n\n}\n", "file_path": "src/terminal/terminal.rs", "rank": 32, "score": 116753.4530032239 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 down | demonstration\n\npub fn clear_from_cursor_down()\n\n{\n\n // Get terminal\n\n let mut terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor().goto(4,8);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorDown);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 33, "score": 116710.4633294706 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_from_cursor_up()\n\n{\n\n // Get terminal\n\n let mut terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor().goto(4,4);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorUp);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 34, "score": 116710.4633294706 }, { "content": "/// Clear all lines in terminal | demonstration\n\npub fn clear_all_lines()\n\n{\n\n // Get terminal\n\n let mut terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Clear all lines in terminal;\n\n terminal.clear(ClearType::All);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 35, "score": 116710.28197367916 }, { "content": "/// Set the terminal mode to the given mode.\n\npub fn set_terminal_mode(termios: &Termios) -> io::Result<()>\n\n{\n\n extern \"C\" {\n\n pub fn tcsetattr(fd: c_int, opt: c_int, termptr: *const Termios) -> c_int;\n\n }\n\n is_true(unsafe { tcsetattr(0, 0, termios) }).and(Ok(()))\n\n}\n\n\n", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 36, "score": 116293.12926554844 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_current_line()\n\n{\n\n // Get terminal\n\n let mut terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor().goto(4,4);\n\n\n\n // Clear current line cells.\n\n terminal.clear(ClearType::CurrentLine);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 37, "score": 113875.44295114504 }, { "content": "pub fn set_console_info(absolute: bool, rect: &SMALL_RECT) -> bool\n\n{\n\n\n\n let output_handle = get_output_handle();\n\n\n\n let absolute = match absolute { true => 1, false => 0, };\n\n unsafe\n\n {\n\n let success = SetConsoleWindowInfo(output_handle,absolute ,rect);\n\n is_true(success)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 38, "score": 100551.98241491642 }, { "content": "/// Get an TerminalCursor implementation whereon cursor related actions can be performed.\n\n///\n\n/// Check `/examples/cursor` in the libary for more spesific examples.\n\n/// \n\n/// #Example\n\n///\n\n/// ```rust\n\n///\n\n/// extern crate crossterm;\n\n///\n\n/// use self::crossterm::cursor;\n\n/// \n\n/// // Get cursor and goto pos X: 5, Y: 10\n\n/// let mut cursor = cursor::cursor();\n\n/// cursor.goto(5,10);\n\n/// \n\n/// //Or you can do it in one line.\n\n/// cursor::cursor().goto(5,10);\n\n///\n\n/// ```\n\npub fn cursor() -> Box<TerminalCursor> {\n\n Box::from(TerminalCursor::new())\n\n}\n", "file_path": "src/cursor/cursor.rs", "rank": 39, "score": 99078.27699443095 }, { "content": "/// This trait is used for creating an instance of an concrete implementation from an base trait.\n\n/// This trait allows the output to be different in size.\n\npub trait Construct {\n\n fn new() -> Box<Self>\n\n where\n\n Self: Sized;\n\n}\n\n\n", "file_path": "src/shared/traits.rs", "rank": 40, "score": 98686.63183050392 }, { "content": "/// Get the current terminal mode.\n\npub fn get_terminal_mode() -> io::Result<Termios>\n\n{\n\n extern \"C\" {\n\n pub fn tcgetattr(fd: c_int, termptr: *mut Termios) -> c_int;\n\n }\n\n unsafe {\n\n let mut termios = mem::zeroed();\n\n is_true(tcgetattr(0, &mut termios))?;\n\n Ok(termios)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 41, "score": 96901.52639543879 }, { "content": "/// Get an TerminalColor implementation whereon color related actions can be performed.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// extern crate crossterm;\n\n///\n\n/// use self::crossterm::style::{color, Color};\n\n/// \n\n/// // Get colored terminal instance\n\n/// let mut colored_terminal = color();\n\n///\n\n/// // preform some actions on the colored terminal\n\n/// colored_terminal.set_fg(Color::Red);\n\n/// colored_terminal.set_bg(Color::Blue);\n\n/// colored_terminal.reset();\n\n/// ```\n\npub fn color() -> Box<TerminalColor> {\n\n Box::from(TerminalColor::new())\n\n}\n\n\n", "file_path": "src/style/color/color.rs", "rank": 42, "score": 96508.63373679217 }, { "content": "/// Get the alternate screen command to enable and disable alternate screen based on the current platform\n\nfn get_to_alternate_screen_command() -> Box<ICommand>\n\n{\n\n #[cfg(target_os = \"windows\")]\n\n let command = functions::get_module::<Box<ICommand>>(win_commands::ToAlternateScreenBufferCommand::new(), shared_commands::ToAlternateScreenBufferCommand::new()).unwrap();\n\n\n\n #[cfg(not(target_os = \"windows\"))]\n\n let command = shared_commands::ToAlternateScreenBufferCommand::new();\n\n \n\n command\n\n}", "file_path": "src/terminal/screen.rs", "rank": 43, "score": 95557.39106712509 }, { "content": "/// This generates an random key for the `ContextCommand`.\n\n/// So that we can identify the `ContextCommand` in an list of commands.\n\nfn generate_key() -> i16 {\n\n rand::random::<i16>()\n\n}", "file_path": "src/state/commands/mod.rs", "rank": 44, "score": 93698.25413562084 }, { "content": "/// Set the is ansi escape property enabled or disabled. So whe can determine if the ansi escape codes are enabled.\n\npub fn set_ansi_enabled(is_enabled :bool)\n\n{\n\n unsafe\n\n {\n\n IS_ANSI_ON_WINDOWS_ENABLED = Some(is_enabled);\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/ansi_support.rs", "rank": 45, "score": 91189.37475916851 }, { "content": "/// Trait withs contains a method for switching into raw mode.\n\npub trait IntoRawMode: Write + Sized\n\n{\n\n fn into_raw_mode<'a>(self, context: &'a mut Context) -> io::Result<RawTerminal<Self>>;\n\n}\n\n\n\nimpl<W: Write> IntoRawMode for W\n\n{\n\n /// Switch to raw mode.\n\n ///\n\n /// Raw mode means that input (stdin) won't be printed it will instead have to be written manually by\n\n /// the program. The input isn't canonicalised or line buffered (that is, you can\n\n /// read from input(stdin) one byte of a time).\n\n fn into_raw_mode<'a>(self, context: &'a mut Context) -> io::Result<RawTerminal<Self>>\n\n {\n\n let (mut command, _) = EnableRawModeCommand::new(context);\n\n let success = command.execute();\n\n\n\n if success\n\n {\n\n Ok(RawTerminal { output: self, context: context})\n", "file_path": "src/terminal/raw.rs", "rank": 46, "score": 89044.03536091003 }, { "content": "/// Get an module specific implementation based on the current platform.\n\npub fn get_module<T>(winapi_impl: T, unix_impl: T) -> Option<T>\n\n{\n\n let mut term: Option<T> = None;\n\n let mut does_support = true;\n\n\n\n if cfg!(target_os = \"windows\") {\n\n #[cfg(windows)]\n\n use kernel::windows_kernel::ansi_support::try_enable_ansi_support;\n\n\n\n // Try to enable ansi on windows if not than use WINAPI.\n\n does_support = try_enable_ansi_support();\n\n\n\n// println!(\"does support = {}\", does_support);\n\n if !does_support\n\n {\n\n term = Some(winapi_impl);\n\n }\n\n }\n\n\n\n if does_support\n\n {\n\n term = Some(unix_impl);\n\n }\n\n\n\n term\n\n}", "file_path": "src/shared/functions.rs", "rank": 47, "score": 88910.92915878195 }, { "content": "/// Set the cursor to position X: 10, Y: 5 in the terminal.\n\npub fn goto()\n\n{\n\n // Get the cursor\n\n let mut cursor = cursor();\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10,5); \n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 48, "score": 83663.8046592322 }, { "content": "/// Print character at X: 10 Y: 5 | demonstration.\n\npub fn print()\n\n{\n\n // To print an some displayable content on an certain position. \n\n \n\n // Get the cursor\n\n let mut cursor = cursor();\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10,5);\n\n // Print the @ symbol at position X: 10, Y: 5 in the terminal\n\n print!(\"@\");\n\n // Rust is line buffered inorder to print at an certain position we need to clear the buffer first. \n\n use std;\n\n use std::io::Write;\n\n std::io::stdout().flush();\n\n \n\n /* Because the above method is a little to much code,\n\n you can use the `print()` method for printing an value at an certain position in the terminal.\n\n \n\n Crossterm provides method chaining so that the above points can be inlined.\n\n */\n\n\n\n cursor.goto(10,5).print(\"@\");\n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 49, "score": 83657.1648585915 }, { "content": "/// Move the cursor 3 down | demonstration.\n\npub fn move_down()\n\n{\n\n // Get the cursor\n\n let mut cursor = cursor();\n\n // Move the cursor to position 3 times to the down in the terminal\n\n cursor.move_down(3);\n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 50, "score": 83653.50489014179 }, { "content": "/// Move the cursor 3 up | demonstration.\n\npub fn move_up()\n\n{\n\n // Get the cursor\n\n let mut cursor = cursor();\n\n // Move the cursor to position 3 times to the up in the terminal\n\n cursor.move_up(3);\n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 51, "score": 83653.50489014179 }, { "content": "/// Is the return value true?\n\nfn is_true(value: i32) -> Result<(), Error>\n\n{\n\n match value\n\n {\n\n -1 => Err(io::Error::last_os_error()),\n\n 0 => Ok(()),\n\n _ => Err(io::Error::last_os_error()),\n\n }\n\n}", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 52, "score": 82866.83821804936 }, { "content": "/// Move the cursor 3 to the left | demonstration.\n\npub fn move_left()\n\n{\n\n // Get the cursor\n\n let mut cursor = cursor();\n\n // Move the cursor to position 3 times to the left in the terminal\n\n cursor.move_left(3);\n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 53, "score": 81517.87404837889 }, { "content": "/// print some font on red background | demonstration.\n\npub fn paint_background()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red background color\");\n\n // Call the method `on()` on the object given by `paint()` and pass in an Color from the Color enum.\n\n styledobject = styledobject.on(Color::Red);\n\n // Print the object to the console and check see the result \n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red background color\").on(Color::Red));\n\n}\n\n\n", "file_path": "examples/color/mod.rs", "rank": 54, "score": 81517.87404837889 }, { "content": "/// Move the cursor 3 to the right | demonstration.\n\npub fn move_right()\n\n{\n\n // Get the cursor\n\n let mut cursor = cursor();\n\n // Move the cursor to position 3 times to the right in the terminal\n\n cursor.move_right(3);\n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 55, "score": 81517.87404837889 }, { "content": "/// print some red font | demonstration.\n\npub fn paint_foreground()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red font\");\n\n // Call the method `with()` on the object given by `paint()` and pass in any Color from the Color enum.\n\n styledobject = styledobject.with(Color::Red);\n\n // Print the object to the console and see the result. \n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red font\").with(Color::Red));\n\n}\n\n\n", "file_path": "examples/color/mod.rs", "rank": 56, "score": 81517.87404837889 }, { "content": "/// Save and reset cursor position | demonstration..\n\npub fn safe_and_reset_position()\n\n{\n\n let mut cursor = cursor();\n\n \n\n // Goto X: 5 Y: 5\n\n cursor.goto(5,5);\n\n // Safe cursor position: X: 5 Y: 5\n\n cursor.save_position();\n\n // Goto X: 5 Y: 20\n\n cursor.goto(5,20);\n\n // Print at X: 5 Y: 20.\n\n println!(\"Yea!\");\n\n // Reset back to X: 5 Y: 5.\n\n cursor.reset_position();\n\n // Print Back at X: 5 Y: 5.\n\n println!(\"Back\");\n\n\n\n println!()\n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 57, "score": 79539.70766062495 }, { "content": "#[cfg(unix)]\n\npub fn print_font_with_attributes()\n\n{\n\n println!(\"{}\", paint(\"Normal text\"));\n\n println!(\"{}\", paint(\"Bold text\").bold());\n\n println!(\"{}\", paint(\"Italic text\").italic());\n\n println!(\"{}\", paint(\"Slow blinking text\").slow_blink());\n\n println!(\"{}\", paint(\"Rapid blinking text\").rapid_blink());\n\n println!(\"{}\", paint(\"Hidden text\").hidden());\n\n println!(\"{}\", paint(\"Underlined text\").underlined());\n\n println!(\"{}\", paint(\"Reversed color\").reverse());\n\n println!(\"{}\", paint(\"Dim text color\").dim());\n\n println!(\"{}\", paint(\"Crossed out font\").crossed_out());\n\n}\n\n\n\n/// Print all supported rgb colors | demonstration.\n", "file_path": "examples/color/mod.rs", "rank": 58, "score": 79539.70766062495 }, { "content": "#[cfg(unix)]\n\npub fn print_supported_colors()\n\n{ \n\n let count = crossterm::style::color().get_available_color_count().unwrap();\n\n\n\n for i in 0..count\n\n {\n\n println!(\"{}\", paint(format!(\"Color: {}\",i)).with(Color::AnsiValue(i as u8)));\n\n }\n\n}", "file_path": "examples/color/mod.rs", "rank": 59, "score": 79539.70766062495 }, { "content": "/// Print all available foreground colors | demonstration.\n\npub fn print_all_background_colors()\n\n{\n\n println!(\"Black : \\t {}\", paint(\" \").on(Color::Black));\n\n println!(\"Red : \\t\\t {}\", paint(\" \").on(Color::Red));\n\n println!(\"Dark Red: \\t {}\", paint(\" \").on(Color::DarkRed));\n\n println!(\"Green : \\t {}\", paint(\" \").on(Color::Green));\n\n println!(\"Dark Green : \\t {}\", paint(\" \").on(Color::DarkGreen));\n\n println!(\"Yellow : \\t {}\", paint(\" \").on(Color::Yellow));\n\n println!(\"Dark Yellow : \\t {}\", paint(\" \").on(Color::DarkYellow));\n\n println!(\"Blue : \\t\\t {}\", paint(\" \").on(Color::Blue));\n\n println!(\"Dark Blue : \\t {}\", paint(\" \").on(Color::DarkBlue));\n\n println!(\"Magenta : \\t {}\", paint(\" \").on(Color::Magenta));\n\n println!(\"Dark Magenta : \\t {}\", paint(\" \").on(Color::DarkMagenta));\n\n println!(\"Cyan : \\t\\t {}\", paint(\" \").on(Color::Cyan));\n\n println!(\"Dark Cyan : \\t {}\", paint(\" \").on(Color::DarkCyan));\n\n println!(\"Grey : \\t\\t {}\", paint(\" \").on(Color::Grey));\n\n println!(\"White : \\t {}\", paint(\" \").on(Color::White));\n\n #[cfg(unix)]\n\n println!(\"RGB (10,10,10): \\t {}\", paint(\" \").on(Color::Rgb {r: 10, g: 10, b: 10}));\n\n #[cfg(unix)]\n\n println!(\"RGB (10,10,10): \\t {}\", paint(\" \").on(Color::AnsiValue(50)));\n\n}\n\n\n\n/// Print font with all available attributes. Note that this can only be used at unix systems and that some are not supported widely | demonstration..\n", "file_path": "examples/color/mod.rs", "rank": 60, "score": 79539.70766062495 }, { "content": "/// print font with fore- background color | demonstration.\n\npub fn paint_foreground_and_background()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red font on blue background color\");\n\n /* Foreground color: \n\n Call the method `with()` on the object given by `paint()`\n\n Pass in an Color from the Color enum.\n\n */\n\n styledobject = styledobject.with(Color::Red);\n\n /* Background color: \n\n Call the method `on()` on the object given by `paint()`\n\n Pass in an Color from the Color enum.\n\n */\n\n styledobject = styledobject.on(Color::Blue);\n\n // Print the object to the console and see the result.\n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red font on blue background color\").with(Color::Red).on(Color::Blue));\n\n}\n\n\n", "file_path": "examples/color/mod.rs", "rank": 61, "score": 79539.70766062495 }, { "content": "/// Print all available foreground colors | demonstration.\n\npub fn print_all_foreground_colors()\n\n{\n\n println!(\"Black : \\t {}\", paint(\"■\").with(Color::Black));\n\n println!(\"Red : \\t\\t {}\", paint(\"■\").with(Color::Red));\n\n println!(\"Dark Red: \\t {}\", paint(\"■\").with(Color::DarkRed));\n\n println!(\"Green : \\t {}\", paint(\"■\").with(Color::Green));\n\n println!(\"Dark Green : \\t {}\", paint(\"■\").with(Color::DarkGreen));\n\n println!(\"Yellow : \\t {}\", paint(\"■\").with(Color::Yellow));\n\n println!(\"Dark Yellow : \\t {}\", paint(\"■\").with(Color::DarkYellow));\n\n println!(\"Blue : \\t\\t {}\", paint(\"■\").with(Color::Blue));\n\n println!(\"Dark Blue : \\t {}\", paint(\"■\").with(Color::DarkBlue));\n\n println!(\"Magenta : \\t {}\", paint(\"■\").with(Color::Magenta));\n\n println!(\"Dark Magenta : \\t {}\", paint(\"■\").with(Color::DarkMagenta));\n\n println!(\"Cyan : \\t\\t {}\", paint(\"■\").with(Color::Cyan));\n\n println!(\"Dark Cyan : \\t {}\", paint(\"■\").with(Color::DarkCyan));\n\n println!(\"Grey : \\t\\t {}\", paint(\"■\").with(Color::Grey));\n\n println!(\"White : \\t {}\", paint(\"■\").with(Color::White));\n\n}\n\n\n", "file_path": "examples/color/mod.rs", "rank": 62, "score": 79539.70766062495 }, { "content": "///! This trait defines the actions that can be preformed with the terminal.\n\n///! This trait can be implemented so that an concrete implementation of the ITerminal can forfill\n\n///! the wishes to work on an specific platform.\n\n///!\n\n///! ## For example:\n\n///!\n\n///! This trait is implemented for `WINAPI` (Windows specific) and `ANSI` (Unix specific),\n\n///! so that cursor related actions can be preformed on both unix and windows systems.\n\npub trait ITerminal {\n\n /// Clear the current cursor by specifying the clear type\n\n fn clear(&self, clear_type: ClearType);\n\n /// Get the terminal size (x,y)\n\n fn terminal_size(&self) -> (u16, u16);\n\n /// Scroll `n` lines up in the current terminal.\n\n fn scroll_up(&self, count: i16);\n\n /// Scroll `n` lines down in the current terminal.\n\n fn scroll_down(&self, count: i16);\n\n /// Resize terminal to the given width and height.\n\n fn set_size(&self,width: i16, height: i16);\n\n}\n", "file_path": "src/terminal/mod.rs", "rank": 64, "score": 78219.88985548262 }, { "content": "fn print_test_data()\n\n{\n\n for i in 0..100 {\n\n println!(\"Test data to test terminal: {}\",i);\n\n }\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 65, "score": 76779.84230676664 }, { "content": "/// Save current cursor position to recall later.\n\npub fn save_cursor_pos()\n\n{\n\n let position = pos();\n\n\n\n unsafe {\n\n SAVED_CURSOR_POS = (position.0, position.1);\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/cursor.rs", "rank": 66, "score": 75990.89073161634 }, { "content": "/// Reset to saved cursor position\n\npub fn reset_to_saved_position()\n\n{\n\n unsafe {\n\n kernel::set_console_cursor_position(SAVED_CURSOR_POS.0 as i16, SAVED_CURSOR_POS.1 as i16);\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/cursor.rs", "rank": 67, "score": 75990.89073161634 }, { "content": "/// Get whether ansi has been enabled.\n\npub fn ansi_enabled() -> bool\n\n{\n\n unsafe { IS_ANSI_ON_WINDOWS_ENABLED.unwrap_or_else(| | false) }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/ansi_support.rs", "rank": 68, "score": 72411.80595470814 }, { "content": "/// Get the std_output_handle of the console\n\npub fn get_output_handle() -> HANDLE {\n\n unsafe {\n\n if let Some(handle) = CONSOLE_OUTPUT_HANDLE {\n\n handle\n\n } else {\n\n let handle = GetStdHandle(STD_OUTPUT_HANDLE);\n\n\n\n if !is_valid_handle(&handle)\n\n {\n\n panic!(\"Cannot get output handle\")\n\n }\n\n\n\n CONSOLE_OUTPUT_HANDLE = Some(handle);\n\n handle\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 69, "score": 72411.80595470814 }, { "content": "/// Get the std_input_handle of the console\n\npub fn get_input_handle() -> HANDLE {\n\n unsafe {\n\n if let Some(handle) = CONSOLE_INPUT_HANDLE {\n\n handle\n\n } else {\n\n let handle = GetStdHandle(STD_INPUT_HANDLE);\n\n\n\n if !is_valid_handle(&handle)\n\n {\n\n panic!(\"Cannot get input handle\")\n\n }\n\n\n\n CONSOLE_INPUT_HANDLE = Some(handle);\n\n handle\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 70, "score": 72411.80595470814 }, { "content": "/// Get whether windows supports ansi\n\npub fn windows_supportable() -> bool\n\n{\n\n unsafe { DOES_WINDOWS_SUPPORT_ANSI.unwrap_or_else(| | false)}\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/ansi_support.rs", "rank": 71, "score": 72411.80595470814 }, { "content": "/// Get whether ansi has been tried to enable before.\n\npub fn has_been_tried_to_enable_ansi() -> bool\n\n{\n\n unsafe\n\n {\n\n return HAS_BEEN_TRYED_TO_ENABLE;\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/ansi_support.rs", "rank": 72, "score": 70916.7636300505 }, { "content": "/// Try enable `ANSI escape codes` and return the result.\n\npub fn try_enable_ansi_support() -> bool\n\n{\n\n use state::commands::win_commands::EnableAnsiCommand;\n\n let mut command = EnableAnsiCommand::new();\n\n let success = command.execute();\n\n\n\n set_is_windows_ansi_supportable(success);\n\n set_ansi_enabled(success);\n\n has_been_tried_to_enable(true);\n\n\n\n success\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/ansi_support.rs", "rank": 73, "score": 69514.79716438155 }, { "content": "///! This trait defines the actions that can be preformed with the terminal cursor.\n\n///! This trait can be implemented so that an concrete implementation of the ITerminalCursor can forfill\n\n///! the wishes to work on an specific platform.\n\n///!\n\n///! ## For example:\n\n///!\n\n///! This trait is implemented for `WINAPI` (Windows specific) and `ANSI` (Unix specific),\n\n///! so that cursor related actions can be preformed on both unix and windows systems.\n\npub trait ITerminalCursor {\n\n /// Goto some location (x,y) in the terminal.\n\n fn goto(&self, x: u16, y: u16);\n\n /// Get the location (x,y) of the current curor in the terminal\n\n fn pos(&self) -> (u16, u16);\n\n /// Move cursor n times up\n\n fn move_up(&self, count: u16);\n\n /// Move the cursor `n` times to the right.\n\n fn move_right(&self, count: u16);\n\n /// Move the cursor `n` times down.\n\n fn move_down(&self, count: u16);\n\n /// Move the cursor `n` times left.\n\n fn move_left(&self, count: u16);\n\n /// Save cursor position for recall later. Note that this position is stored program based not per instance of the cursor struct.\n\n fn save_position(&mut self);\n\n /// Return to saved cursor position\n\n fn reset_position(&self);\n\n}", "file_path": "src/cursor/mod.rs", "rank": 74, "score": 68045.91555233453 }, { "content": "/// Checks if the console handle is an invalid handle value.\n\npub fn is_valid_handle(handle: &HANDLE) -> bool {\n\n if *handle == INVALID_HANDLE_VALUE {\n\n false\n\n } else {\n\n true\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 75, "score": 67694.2127545841 }, { "content": "///! This trait defines the actions that can be preformed with the terminal color.\n\n///! This trait can be implemented so that an concrete implementation of the ITerminalColor can forfill\n\n///! the wishes to work on an specific platform.\n\n///!\n\n///! ## For example:\n\n///!\n\n///! This trait is implemented for `WINAPI` (Windows specific) and `ANSI` (Unix specific),\n\n///! so that color related actions can be preformed on both unix and windows systems.\n\npub trait ITerminalColor {\n\n /// Set the foreground color to the given color.\n\n fn set_fg(&self, fg_color: Color);\n\n /// Set the background color to the given color.\n\n fn set_bg(&self, fg_color: Color);\n\n /// Reset the terminal color to default.\n\n fn reset(&self);\n\n /// Gets an value that represents an color from the given `Color` and `ColorType`.\n\n fn color_value(&self, color: Color, color_type: ColorType) -> String;\n\n}", "file_path": "src/style/color/mod.rs", "rank": 76, "score": 66061.3811240201 }, { "content": "/// Wraps an displayable object so it can be formatted with colors and attributes.\n\n///\n\n/// Check `/examples/color` in the libary for more spesific examples.\n\n/// \n\n/// #Example\n\n///\n\n/// ```rust\n\n/// extern crate crossterm;\n\n///\n\n/// use self::crossterm::style::{paint,Color};\n\n///\n\n/// fn main()\n\n/// {\n\n/// // Create an styledobject object from the text 'Unstyled font' \n\n/// // Currently it has the default foregroundcolor and backgroundcolor.\n\n/// println!(\"{}\",paint(\"Unstyled font\"));\n\n///\n\n/// // Create an displayable object from the text 'Colored font', \n\n/// // Paint this with the `Red` foreground color and `Blue` backgroundcolor.\n\n/// // Print the result.\n\n/// let styledobject = paint(\"Colored font\").with(Color::Red).on(Color::Blue);\n\n/// println!(\"{}\", styledobject);\n\n/// \n\n/// // Or all in one line\n\n/// println!(\"{}\", paint(\"Colored font\").with(Color::Red).on(Color::Blue));\n\n/// }\n\n/// ```\n\npub fn paint<D>(val: D) -> StyledObject<D>\n\nwhere\n\n D: fmt::Display,\n\n{\n\n ObjectStyle::new().apply_to(val)\n\n}\n\n\n\n\n", "file_path": "src/style/color/color.rs", "rank": 77, "score": 63788.181101035065 }, { "content": "pub struct ToAlternateScreen;\n\n\n\nimpl fmt::Display for ToAlternateScreen\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n get_to_alternate_screen_command().execute();\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Struct that can be used for writing to an alternate screen.\n\n///\n\n/// #Example\n\n///\n\n/// ```rust\n\n/// extern crate crossterm;\n\n/// use self::crossterm::terminal::screen;\n\n/// use std::{time, thread};\n\n/// ...\n\n///\n", "file_path": "src/terminal/screen.rs", "rank": 78, "score": 62742.34351507732 }, { "content": "//! This module contains all the logic for switching between alternate screen and main screen.\n\n\n\nuse shared::functions;\n\nuse Context;\n\nuse state::commands::*;\n\n\n\nuse std::{ fmt, ops };\n\nuse std::io::{self, Write};\n\n\n\npub struct ToMainScreen;\n\n\n\nimpl fmt::Display for ToMainScreen\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n get_to_alternate_screen_command().undo();\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Struct that switches to alternate screen buffer on display.\n", "file_path": "src/terminal/screen.rs", "rank": 79, "score": 62741.76706087661 }, { "content": " pub fn from(mut output: W) -> Self {\n\n write!(output, \"{}\", ToAlternateScreen);\n\n AlternateScreen { output: output, context: Context::new()}\n\n }\n\n}\n\n\n\nimpl<W: Write> ops::Deref for AlternateScreen<W> {\n\n type Target = W;\n\n\n\n fn deref(&self) -> &W {\n\n &self.output\n\n }\n\n}\n\n\n\nimpl<W: Write> ops::DerefMut for AlternateScreen<W> {\n\n fn deref_mut(&mut self) -> &mut W {\n\n &mut self.output\n\n }\n\n}\n\n\n", "file_path": "src/terminal/screen.rs", "rank": 80, "score": 62738.854423688994 }, { "content": "impl<W: Write> Write for AlternateScreen<W> {\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n self.output.write(buf)\n\n }\n\n\n\n fn flush(&mut self) -> io::Result<()> {\n\n self.output.flush()\n\n }\n\n}\n\n\n\nimpl<W: Write> Drop for AlternateScreen<W>\n\n{\n\n fn drop(&mut self)\n\n {\n\n write!(self, \"{}\", ToMainScreen).expect(\"switch to main screen\");\n\n }\n\n}\n\n\n\n/// Get the alternate screen command to enable and disable alternate screen based on the current platform\n", "file_path": "src/terminal/screen.rs", "rank": 81, "score": 62734.226768235014 }, { "content": "/// // Initialize and switch to the alternate screen from an std output handle.\n\n/// // Now you can write to this screen.\n\n/// let mut screen = screen::AlternateScreen::from(stdout());\n\n/// // Write some text to the alternate screen.\n\n/// write!(screen, \"Welcome to the alternate screen. Wait 4 seconds to switch back\").unwrap();\n\n/// thread::sleep(time::Duration::from_secs(4));\n\n/// // switch back to main screen.\n\n/// write!(screen, \"{}\", screen::ToMainScreen);\n\n/// write!(screen, \"{}\", \"We are back again at the main screen\");\n\n///\n\n/// ...\n\n///\n\n/// ```\n\npub struct AlternateScreen<W: Write> {\n\n /// The output target.\n\n output: W,\n\n context: Context\n\n}\n\n\n\nimpl<W: Write> AlternateScreen<W> {\n", "file_path": "src/terminal/screen.rs", "rank": 82, "score": 62733.628947267505 }, { "content": "/// Parse integer to an bool\n\nfn is_true(value: i32) -> bool\n\n{\n\n if value == 0{\n\n return false;\n\n }\n\n else{\n\n return true;\n\n }\n\n}\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 83, "score": 61859.08282168742 }, { "content": "// alternate screen is not working correctly currently\n\n\n\n\n\n//extern crate crossterm;\n\n//\n\n//use crossterm::terminal::screen::{AlternateScreen, ToAlternateScreen, ToMainScreen};\n\n//use crossterm::cursor::cursor;\n\n//use crossterm::terminal::{self, ClearType};\n\n//\n\n//use std::io::{Write, stdout};\n\n//use std::{time, thread};\n\n//\n\n//fn print_wait_screen(screen: &mut Write)\n\n//{\n\n// terminal::terminal().clear(ClearType::All);\n\n// write!(screen,\n\n// \"Welcome to the wait screen.\\n\\\n\n// Please wait a few seconds until we arrive back at the main screen.\\n\\\n\n// Seconds to Go: \"\n\n// );\n", "file_path": "examples/terminal/alternate_screen.rs", "rank": 84, "score": 60136.876199116974 }, { "content": "// }\n\n//}\n\n//\n\n//pub fn with_alternate_screen_instance()\n\n//{\n\n// // create scope. If this scope ends the screen will be switched back to mainscreen.\n\n// // becouse `AlternateScreen` switches back to main screen when switching back.\n\n// {\n\n// // create new alternate screen instance and switch to the alternate screen.\n\n// let mut screen = AlternateScreen::from(stdout());\n\n//\n\n// // Print the wait screen.\n\n// print_wait_screen(&mut screen);\n\n// }\n\n//\n\n// println!(\"Whe are back at the main screen\");\n\n//}\n\n//\n\n//pub fn manually_switch_to_alternate_screen()\n\n//{\n", "file_path": "examples/terminal/alternate_screen.rs", "rank": 85, "score": 60136.064842146174 }, { "content": "// // You can switch to alternate screen manually but if you forget to switch back your terminal may cause some undefined behavior.\n\n//\n\n// let mut screen = stdout();\n\n//\n\n// // switch to alternate screeen\n\n// write!(screen, \"{}\", ToAlternateScreen);\n\n// // load wait screen\n\n// print_wait_screen(&mut screen);\n\n// // switch back\n\n// write!(screen,\"{}\", ToMainScreen);\n\n// println!(\"Whe are back at the main screen\");\n\n//\n\n//}", "file_path": "examples/terminal/alternate_screen.rs", "rank": 86, "score": 60133.54035565864 }, { "content": "//\n\n// let mut counter = 5;\n\n// // get cursor instance\n\n// let mut cursor = cursor();\n\n//\n\n// // loop until the counter hits 0\n\n// loop\n\n// {\n\n// // 1 second delay\n\n// thread::sleep(time::Duration::from_secs(1));\n\n// // decrement counter\n\n// counter -= 1;\n\n//\n\n// // print the current counter at the line of `Seconds to Go: {counter}`\n\n// cursor.goto(15,2).print(counter);\n\n//\n\n// if counter <= 0\n\n// {\n\n// break;\n\n// }\n", "file_path": "examples/terminal/alternate_screen.rs", "rank": 87, "score": 60127.184460128 }, { "content": "/// Set the is_windows_ansi_supportable property. So whe can determine whether windows supports ansi.\n\nfn set_is_windows_ansi_supportable(is_enabled :bool)\n\n{\n\n unsafe\n\n {\n\n DOES_WINDOWS_SUPPORT_ANSI = Some(is_enabled);\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/ansi_support.rs", "rank": 88, "score": 57873.902493045316 }, { "content": "fn main() {\n\n}", "file_path": "examples/bin.rs", "rank": 89, "score": 45069.01454741309 }, { "content": "/// This trait can be used to create an empty instance of an struct.\n\npub trait Empty {\n\n fn empty() -> Self;\n\n}\n\n\n", "file_path": "src/shared/traits.rs", "rank": 90, "score": 42133.95204710071 }, { "content": "/// This command can be used for simple commands witch just have an `undo()` and an `execute()`\n\npub trait ICommand\n\n{\n\n fn new() -> Box<Self> where Self: Sized;\n\n fn execute(&mut self) -> bool;\n\n fn undo(&mut self) -> bool;\n\n}\n\n\n", "file_path": "src/state/commands/mod.rs", "rank": 91, "score": 41061.76730603128 }, { "content": "/// This command is used for complex commands whits change the terminal state.\n\n/// By passing an `Context` instance this command will register it self to notify the terminal state change.\n\npub trait IContextCommand\n\n{\n\n fn new(context: &mut Context) -> (Box<Self>, i16) where Self: Sized;\n\n fn execute(&mut self) -> bool;\n\n fn undo(&mut self) -> bool;\n\n}\n\n\n", "file_path": "src/state/commands/mod.rs", "rank": 92, "score": 40074.96877624686 }, { "content": " ///\n\n /// #Example\n\n ///\n\n /// ```rust\n\n ///\n\n /// extern crate crossterm;\n\n /// use crossterm::terminal;\n\n ///\n\n /// let mut term = terminal::terminal();\n\n /// \n\n /// // Set of the size to X: 10 and Y: 10\n\n /// let size = term.set_size(10,10);\n\n /// \n\n /// ```\n\n pub fn set_size(&mut self, width: i16, height: i16)\n\n {\n\n if let Some (ref terminal) = self.terminal {\n\n terminal.set_size(width,height);\n\n }\n\n }\n", "file_path": "src/terminal/terminal.rs", "rank": 93, "score": 38068.438887744465 }, { "content": " /// #Example\n\n ///\n\n /// ```rust\n\n ///\n\n /// extern crate crossterm;\n\n /// use crossterm::terminal;\n\n ///\n\n /// let mut term = terminal::terminal();\n\n /// \n\n /// // scroll down by 5 lines\n\n /// let size = term.scroll_down(5);\n\n /// \n\n /// ```\n\n pub fn scroll_down(&mut self, count: i16) {\n\n if let Some(ref terminal) = self.terminal {\n\n terminal.scroll_down(count);\n\n }\n\n }\n\n\n\n /// Set the terminal size. Note that not all terminals can be set to a very small scale.\n", "file_path": "src/terminal/terminal.rs", "rank": 94, "score": 38063.994722207084 }, { "content": " ///\n\n /// ```rust\n\n ///\n\n /// extern crate crossterm;\n\n /// use crossterm::terminal;\n\n ///\n\n /// let mut term = terminal::terminal();\n\n /// \n\n /// // scroll up by 5 lines\n\n /// let size = term.scroll_up(5);\n\n /// \n\n /// ```\n\n pub fn scroll_up(&mut self, count: i16) {\n\n if let Some(ref terminal) = self.terminal {\n\n terminal.scroll_up(count);\n\n }\n\n }\n\n\n\n /// Scroll `n` lines up in the current terminal.\n\n /// \n", "file_path": "src/terminal/terminal.rs", "rank": 95, "score": 38061.47616661666 }, { "content": " ///\n\n /// extern crate crossterm;\n\n /// use crossterm::terminal;\n\n ///\n\n /// let mut term = terminal::terminal();\n\n ///\n\n /// let size = term.terminal_size();\n\n /// println!(\"{:?}\", size);\n\n /// \n\n /// ```\n\n pub fn terminal_size(&mut self) -> (u16, u16) {\n\n if let Some(ref terminal) = self.terminal {\n\n return terminal.terminal_size()\n\n }\n\n (0,0)\n\n }\n\n\n\n /// Scroll `n` lines up in the current terminal.\n\n /// \n\n /// #Example\n", "file_path": "src/terminal/terminal.rs", "rank": 96, "score": 38058.76025202956 }, { "content": "//! With this module you can perform actions that are terminal related.\n\n//! Like clearing and scrolling in the terminal or getting the size of the terminal.\n\n\n\nuse super::*;\n\nuse shared::functions;\n\nuse {Construct, Context};\n\n\n\nuse std::ops::Drop;\n\n\n\n/// Struct that stores an specific platform implementation for terminal related actions.\n\npub struct Terminal {\n\n terminal: Option<Box<ITerminal>>,\n\n}\n\n\n\nimpl Terminal {\n\n /// Create new terminal instance whereon terminal related actions can be performed.\n\n pub fn new() -> Terminal {\n\n #[cfg(target_os = \"windows\")]\n\n let terminal = functions::get_module::<Box<ITerminal>>(WinApiTerminal::new(), AnsiTerminal::new());\n\n\n", "file_path": "src/terminal/terminal.rs", "rank": 97, "score": 38058.47119593957 }, { "content": "}\n\n\n\n/// Get an Terminal implementation whereon terminal related actions can be performed.\n\n///\n\n/// Check `/examples/terminal` in the libary for more spesific examples.\n\n///\n\n/// #Example\n\n///\n\n/// ```rust\n\n///\n\n/// extern crate crossterm;\n\n/// use crossterm::terminal;\n\n///\n\n/// let mut term = terminal::terminal();\n\n///\n\n/// // scroll down by 5 lines\n\n/// let size = term.scroll_down(5);\n\n///\n\n/// ```\n\n///\n", "file_path": "src/terminal/terminal.rs", "rank": 98, "score": 38051.16763100565 }, { "content": " /// term.clear(terminal::ClearType::FromCursorDown);\n\n /// // clear all cells from the cursor position upwards in terminal.\n\n /// term.clear(terminal::ClearType::FromCursorUp);\n\n /// // clear current line cells in terminal.\n\n /// term.clear(terminal::ClearType::CurrentLine);\n\n /// // clear all cells from cursor position until new line in terminal.\n\n /// term.clear(terminal::ClearType::UntilNewLine);\n\n /// \n\n /// ```\n\n pub fn clear(&mut self, clear_type: ClearType) {\n\n if let Some(ref terminal) = self.terminal {\n\n terminal.clear(clear_type);\n\n }\n\n }\n\n\n\n /// Get the terminal size (x,y).\n\n /// \n\n /// #Example\n\n ///\n\n /// ```rust\n", "file_path": "src/terminal/terminal.rs", "rank": 99, "score": 38048.747206812404 } ]
Rust
src/skelly.rs
lain-dono/skelly
f161e4d3641c22fb764936bf441dac8434c3f9d1
use na::{Isometry3, Point3, RealField, Scalar, Translation3, UnitQuaternion}; pub struct Skelly<T: Scalar, D = ()> { bones: Vec<Bone<T, D>>, } struct Bone<T: Scalar, D> { isometry: Isometry3<T>, parent: Option<usize>, userdata: D, } impl<T> Skelly<T> where T: Scalar, { pub fn add_root(&mut self, position: Point3<T>) -> usize where T: RealField, { self.add_root_with(position, ()) } pub fn attach(&mut self, relative_position: Point3<T>, parent: usize) -> usize where T: RealField, { self.attach_with(relative_position, parent, ()) } } impl<T, D> Skelly<T, D> where T: Scalar, { pub fn new() -> Self { Skelly { bones: Vec::new() } } pub fn add_root_with(&mut self, position: Point3<T>, userdata: D) -> usize where T: RealField, { self.bones.push(Bone { isometry: Isometry3 { rotation: UnitQuaternion::identity(), translation: position.coords.into(), }, parent: None, userdata, }); self.bones.len() - 1 } pub fn attach_with(&mut self, relative_position: Point3<T>, parent: usize, userdata: D) -> usize where T: RealField, { assert!(parent < self.bones.len(), "Parent index is ouf of bounds"); self.bones.push(Bone { isometry: Isometry3 { rotation: UnitQuaternion::identity(), translation: relative_position.coords.into(), }, parent: Some(parent), userdata, }); self.bones.len() - 1 } pub fn rotate(&mut self, bone: usize, rotation: UnitQuaternion<T>) where T: RealField, { let bone = &mut self.bones[bone]; bone.isometry.rotation = bone.isometry.rotation * rotation; } pub fn translate_bone(&mut self, bone: usize, translation: Translation3<T>) where T: RealField, { let bone = &mut self.bones[bone]; bone.isometry.translation = bone.isometry.translation * translation; } pub fn set_bone_position(&mut self, bone: usize, position: Point3<T>) { self.bones[bone].isometry.translation = position.coords.into(); } pub fn get_userdata(&self, bone: usize) -> &D { &self.bones[bone].userdata } pub fn get_userdata_mut(&mut self, bone: usize) -> &mut D { &mut self.bones[bone].userdata } pub fn get_parent(&self, bone: usize) -> Option<usize> { self.bones[bone].parent } pub fn len(&self) -> usize { self.bones.len() } pub fn write_globals(&self, globals: &mut [Isometry3<T>]) where T: RealField, { self.bones .iter() .take(globals.len()) .enumerate() .for_each(|(index, bone)| match bone.parent { Some(parent) => { debug_assert!(parent < index); globals[index] = globals[parent] * bone.isometry; } None => { globals[index] = bone.isometry; } }) } pub fn write_globals_for_posture(&self, posture: &Posture<T>, globals: &mut [Isometry3<T>]) where T: RealField, { self.bones .iter() .zip(&posture.joints) .take(globals.len()) .enumerate() .for_each(|(index, (bone, isometry))| match bone.parent { Some(parent) => { debug_assert!(parent < index); globals[index] = globals[parent] * *isometry; } None => { globals[index] = *isometry; } }) } pub fn assume_posture(&mut self, posture: &Posture<T>) where T: Copy, { assert_eq!(self.bones.len(), posture.joints.len()); self.bones .iter_mut() .zip(&posture.joints) .for_each(|(bone, isometry)| bone.isometry = *isometry); } pub fn make_posture(&self) -> Posture<T> where T: Copy, { Posture { joints: self.bones.iter().map(|bone| bone.isometry).collect(), } } pub fn make_chain(&self, mut bone: usize, chain: &mut Vec<usize>) { while let Some(parent) = self.bones[bone].parent { chain.push(parent); bone = parent; } } } pub struct Posture<T: Scalar> { joints: Vec<Isometry3<T>>, } impl<T> Posture<T> where T: Scalar, { pub fn new(len: usize) -> Self where T: RealField, { Posture { joints: vec![Isometry3::identity(); len], } } pub fn len(&self) -> usize { self.joints.len() } pub fn get_joint(&self, bone: usize) -> &Isometry3<T> { &self.joints[bone] } pub fn get_joint_mut(&mut self, bone: usize) -> &mut Isometry3<T> { &mut self.joints[bone] } }
use na::{Isometry3, Point3, RealField, Scalar, Translation3, UnitQuaternion}; pub struct Skelly<T: Scalar, D = ()> { bones: Vec<Bone<T, D>>, } struct Bone<T: Scalar, D> { isometry: Isometry3<T>, parent: Option<usize>, userdata: D, } impl<T> Skelly<T> where T: Scalar, { pub fn add_root(&mut self, position: Point3<T>) -> usize where T: RealField, { self.add_root_with(position, ()) } pub fn a
mut self, bone: usize) -> &mut D { &mut self.bones[bone].userdata } pub fn get_parent(&self, bone: usize) -> Option<usize> { self.bones[bone].parent } pub fn len(&self) -> usize { self.bones.len() } pub fn write_globals(&self, globals: &mut [Isometry3<T>]) where T: RealField, { self.bones .iter() .take(globals.len()) .enumerate() .for_each(|(index, bone)| match bone.parent { Some(parent) => { debug_assert!(parent < index); globals[index] = globals[parent] * bone.isometry; } None => { globals[index] = bone.isometry; } }) } pub fn write_globals_for_posture(&self, posture: &Posture<T>, globals: &mut [Isometry3<T>]) where T: RealField, { self.bones .iter() .zip(&posture.joints) .take(globals.len()) .enumerate() .for_each(|(index, (bone, isometry))| match bone.parent { Some(parent) => { debug_assert!(parent < index); globals[index] = globals[parent] * *isometry; } None => { globals[index] = *isometry; } }) } pub fn assume_posture(&mut self, posture: &Posture<T>) where T: Copy, { assert_eq!(self.bones.len(), posture.joints.len()); self.bones .iter_mut() .zip(&posture.joints) .for_each(|(bone, isometry)| bone.isometry = *isometry); } pub fn make_posture(&self) -> Posture<T> where T: Copy, { Posture { joints: self.bones.iter().map(|bone| bone.isometry).collect(), } } pub fn make_chain(&self, mut bone: usize, chain: &mut Vec<usize>) { while let Some(parent) = self.bones[bone].parent { chain.push(parent); bone = parent; } } } pub struct Posture<T: Scalar> { joints: Vec<Isometry3<T>>, } impl<T> Posture<T> where T: Scalar, { pub fn new(len: usize) -> Self where T: RealField, { Posture { joints: vec![Isometry3::identity(); len], } } pub fn len(&self) -> usize { self.joints.len() } pub fn get_joint(&self, bone: usize) -> &Isometry3<T> { &self.joints[bone] } pub fn get_joint_mut(&mut self, bone: usize) -> &mut Isometry3<T> { &mut self.joints[bone] } }
ttach(&mut self, relative_position: Point3<T>, parent: usize) -> usize where T: RealField, { self.attach_with(relative_position, parent, ()) } } impl<T, D> Skelly<T, D> where T: Scalar, { pub fn new() -> Self { Skelly { bones: Vec::new() } } pub fn add_root_with(&mut self, position: Point3<T>, userdata: D) -> usize where T: RealField, { self.bones.push(Bone { isometry: Isometry3 { rotation: UnitQuaternion::identity(), translation: position.coords.into(), }, parent: None, userdata, }); self.bones.len() - 1 } pub fn attach_with(&mut self, relative_position: Point3<T>, parent: usize, userdata: D) -> usize where T: RealField, { assert!(parent < self.bones.len(), "Parent index is ouf of bounds"); self.bones.push(Bone { isometry: Isometry3 { rotation: UnitQuaternion::identity(), translation: relative_position.coords.into(), }, parent: Some(parent), userdata, }); self.bones.len() - 1 } pub fn rotate(&mut self, bone: usize, rotation: UnitQuaternion<T>) where T: RealField, { let bone = &mut self.bones[bone]; bone.isometry.rotation = bone.isometry.rotation * rotation; } pub fn translate_bone(&mut self, bone: usize, translation: Translation3<T>) where T: RealField, { let bone = &mut self.bones[bone]; bone.isometry.translation = bone.isometry.translation * translation; } pub fn set_bone_position(&mut self, bone: usize, position: Point3<T>) { self.bones[bone].isometry.translation = position.coords.into(); } pub fn get_userdata(&self, bone: usize) -> &D { &self.bones[bone].userdata } pub fn get_userdata_mut(&
random
[ { "content": "fn enque<T>(queue: &mut Vec<QueueItem<T>>, bone: usize, tip: Point3<T>, goal: Point3<T>)\n\nwhere\n\n T: Scalar,\n\n{\n\n let index = queue\n\n .binary_search_by(|item| item.bone.cmp(&bone))\n\n .unwrap_or_else(|x| x);\n\n\n\n queue.insert(index, QueueItem { bone, tip, goal });\n\n}\n\n\n", "file_path": "src/ik/rotor.rs", "rank": 1, "score": 63626.30896142169 }, { "content": "fn deque<T>(queue: &mut Vec<QueueItem<T>>) -> Option<(usize, Point3<T>, Point3<T>)>\n\nwhere\n\n T: RealField,\n\n{\n\n let first = queue.pop()?;\n\n\n\n let mut count = T::one();\n\n let mut tip_sum = first.tip.coords;\n\n let mut goal_sum = first.goal.coords;\n\n while let Some(item) = queue.pop() {\n\n if item.bone != first.bone {\n\n queue.push(item);\n\n break;\n\n }\n\n\n\n count += T::one();\n\n tip_sum += item.tip.coords;\n\n goal_sum += item.goal.coords;\n\n }\n\n\n\n Some((\n\n first.bone,\n\n Point3::from(tip_sum / count),\n\n Point3::from(goal_sum / count),\n\n ))\n\n}\n", "file_path": "src/ik/rotor.rs", "rank": 2, "score": 51668.67717771848 }, { "content": "/// Trait for ik solvers.\n\n/// Using this common interface user may replace implementation easily.\n\npub trait IkSolver<T: Scalar> {\n\n /// Returns new solver with maximum tolerable error.\n\n fn new(error: T) -> Self;\n\n\n\n /// Performs one step toward solution.\n\n fn solve_step<D>(&mut self, skelly: &Skelly<T, D>, posture: &mut Posture<T>) -> StepResult<T>;\n\n}\n", "file_path": "src/ik.rs", "rank": 3, "score": 50516.357563118814 }, { "content": "#[derive(Clone, Copy)]\n\nstruct IkGoal<T: Scalar> {\n\n bone: usize,\n\n position: Option<Point3<T>>,\n\n orientation: Option<UnitQuaternion<T>>,\n\n}\n\n\n\npub struct RotorSolver<T: Scalar> {\n\n epsilon: T,\n\n min_len: usize,\n\n goals: Vec<IkGoal<T>>,\n\n\n\n // temp vectors. saved to keep allocation.\n\n queue: Vec<QueueItem<T>>,\n\n globals: Vec<Isometry3<T>>,\n\n}\n\n\n\nimpl<T> Clone for RotorSolver<T>\n\nwhere\n\n T: Scalar,\n\n{\n", "file_path": "src/ik/rotor.rs", "rank": 4, "score": 50495.17904023903 }, { "content": "#[derive(Debug)]\n\nstruct QueueItem<T: Scalar> {\n\n bone: usize,\n\n tip: Point3<T>,\n\n goal: Point3<T>,\n\n}\n\n\n", "file_path": "src/ik/rotor.rs", "rank": 5, "score": 50495.17904023903 }, { "content": "fn draw_skelly(\n\n skelly: &Skelly<f32, Color>,\n\n posture: &Posture<f32>,\n\n globals: &mut Vec<Isometry3<f32>>,\n\n) {\n\n skelly.write_globals_for_posture(posture, globals);\n\n\n\n for index in 0..skelly.len() {\n\n if let Some(parent) = skelly.get_parent(index) {\n\n let start = &globals[parent].translation.vector;\n\n let end = &globals[index].translation.vector;\n\n let color = *skelly.get_userdata(index);\n\n draw_line_3d(\n\n macroquad::math::Vec3::new(start.x, start.y, start.z),\n\n macroquad::math::Vec3::new(end.x, end.y, end.z),\n\n color,\n\n );\n\n }\n\n }\n\n}\n", "file_path": "examples/demo.rs", "rank": 6, "score": 30522.085605077118 }, { "content": " epsilon,\n\n }\n\n }\n\n\n\n pub fn set_position_goal(&mut self, bone: usize, position: Point3<T>)\n\n where\n\n T: Copy,\n\n {\n\n match self\n\n .goals\n\n .iter_mut()\n\n .skip_while(|goal| goal.bone != bone)\n\n .next()\n\n {\n\n Some(goal) => {\n\n if goal.bone == bone {\n\n goal.position = Some(position);\n\n }\n\n }\n\n None => {\n", "file_path": "src/ik/rotor.rs", "rank": 15, "score": 9.669272291451673 }, { "content": " self.min_len = self.min_len.min(bone + 1);\n\n self.goals.push(IkGoal {\n\n bone,\n\n position: Some(position),\n\n orientation: None,\n\n })\n\n }\n\n }\n\n }\n\n\n\n pub fn set_orientation_goal(&mut self, bone: usize, orientation: UnitQuaternion<T>)\n\n where\n\n T: Copy,\n\n {\n\n match self\n\n .goals\n\n .iter_mut()\n\n .skip_while(|goal| goal.bone != bone)\n\n .next()\n\n {\n", "file_path": "src/ik/rotor.rs", "rank": 19, "score": 7.170181472281479 }, { "content": " enque(&mut self.queue, parent, tip, goal);\n\n }\n\n }\n\n\n\n total\n\n }\n\n\n\n // fn solve_one_pos<D>(\n\n // chain: &mut Vec<usize>,\n\n // globals: &mut Vec<Isometry3<T>>,\n\n // skelly: &Skelly<T, D>,\n\n // posture: &mut Posture<T>,\n\n // epsilon: T,\n\n // bone: usize,\n\n // goal: Point3<T>,\n\n // ) -> StepResult<T>\n\n // where\n\n // T: RealField,\n\n // {\n\n // skelly.make_chain(bone, chain);\n", "file_path": "src/ik/rotor.rs", "rank": 21, "score": 6.395582051043273 }, { "content": " // if error < self.epsilon {\n\n // continue;\n\n // }\n\n if let Some(parent) = skelly.get_parent(goal.bone) {\n\n enque(&mut self.queue, parent, tip, position);\n\n }\n\n }\n\n }\n\n\n\n while let Some((bone, tip, goal)) = deque(&mut self.queue) {\n\n let inv = self.globals[bone].inverse();\n\n\n\n let mut tip_local = inv * tip;\n\n let goal_local = inv * goal;\n\n\n\n if tip_local.coords.magnitude_squared() < self.epsilon {\n\n continue;\n\n }\n\n\n\n if goal_local.coords.magnitude_squared() < self.epsilon {\n", "file_path": "src/ik/rotor.rs", "rank": 22, "score": 5.505904659588145 }, { "content": "//! This module contains inverse-kinematic functionality for the skelly crate.\n\n\n\nuse {\n\n super::{IkSolver, Status, StepResult},\n\n crate::skelly::{Posture, Skelly},\n\n na::{Isometry3, Point3, RealField, Scalar, UnitQuaternion},\n\n};\n\n\n\n#[derive(Clone, Copy)]\n", "file_path": "src/ik/rotor.rs", "rank": 23, "score": 5.114370063128527 }, { "content": " Some(goal) => {\n\n if goal.bone == bone {\n\n goal.orientation = Some(orientation);\n\n }\n\n }\n\n None => {\n\n self.min_len = self.min_len.min(bone + 1);\n\n self.goals.push(IkGoal {\n\n bone,\n\n position: None,\n\n orientation: Some(orientation),\n\n })\n\n }\n\n }\n\n }\n\n\n\n pub fn solve_step<D>(\n\n &mut self,\n\n skelly: &Skelly<T, D>,\n\n posture: &mut Posture<T>,\n", "file_path": "src/ik/rotor.rs", "rank": 24, "score": 5.085128635007751 }, { "content": "{\n\n fn new(error: T) -> Self {\n\n Self::new(error)\n\n }\n\n\n\n fn solve_step<D>(&mut self, skelly: &Skelly<T, D>, posture: &mut Posture<T>) -> StepResult<T> {\n\n self.solve_step(skelly, posture)\n\n }\n\n}\n\n\n\nimpl<T> RotorSolver<T>\n\nwhere\n\n T: Scalar,\n\n{\n\n pub fn new(epsilon: T) -> Self {\n\n RotorSolver {\n\n goals: Vec::new(),\n\n min_len: 0,\n\n queue: Vec::new(),\n\n globals: Vec::new(),\n", "file_path": "src/ik/rotor.rs", "rank": 25, "score": 4.7829167271290896 }, { "content": "//! This module contains inverse-kinematic functionality for the skelly crate.\n\n\n\n// pub mod fabrik;\n\npub mod rotor;\n\n\n\nuse {\n\n crate::skelly::{Posture, Skelly},\n\n core::ops::Add,\n\n na::Scalar,\n\n};\n\n\n\n/// Variants of results for `IkSolver::solve_step` method.\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]\n\npub enum Status {\n\n /// All constrains and goals are satisfied with error less than configured for solver.\n\n Solved,\n\n\n\n /// Error in constraints and goals are unsatisfied\n\n Unsolved,\n\n\n", "file_path": "src/ik.rs", "rank": 26, "score": 4.602183781361522 }, { "content": "//!\n\n//! skelly crate.\n\n//!\n\n\n\n#[cfg(feature = \"ik\")]\n\npub mod ik;\n\n\n\nmod skelly;\n\n\n\npub use self::skelly::*;\n", "file_path": "src/lib.rs", "rank": 27, "score": 4.487047789918124 }, { "content": " pub fn unsolved(error: T) -> Self {\n\n StepResult {\n\n error,\n\n status: Status::Unsolved,\n\n }\n\n }\n\n\n\n pub fn infeasible(error: T) -> Self {\n\n StepResult {\n\n error,\n\n status: Status::Infeasible,\n\n }\n\n }\n\n\n\n /// Combine two step results into one\n\n pub fn reduce(self, rhs: Self) -> Self {\n\n let status = match (self.status, rhs.status) {\n\n (Status::Infeasible, _) | (_, Status::Infeasible) => Status::Infeasible,\n\n (Status::Unsolved, _) | (_, Status::Unsolved) => Status::Unsolved,\n\n (Status::Solved, Status::Solved) => Status::Solved,\n", "file_path": "src/ik.rs", "rank": 28, "score": 4.417324534390158 }, { "content": "\n\n // if chain.is_empty() {\n\n // // For the root case, just move it to the target position.\n\n // posture.get_joint_mut(bone).translation = goal.coords.into();\n\n // return StepResult {\n\n // error: T::zero(),\n\n // status: Status::Solved,\n\n // };\n\n // }\n\n\n\n // for &bone in chain.iter().rev() {\n\n // let identity = Isometry3::identity();\n\n // let last = globals.last().unwrap_or(&identity);\n\n // let global = last * posture.get_joint(bone);\n\n // globals.push(global);\n\n // }\n\n\n\n // let mut tip =\n\n // globals.last().unwrap() * posture.get_joint(bone).translation * Point3::origin();\n\n\n", "file_path": "src/ik/rotor.rs", "rank": 29, "score": 4.411926539077264 }, { "content": " ) -> StepResult<T>\n\n where\n\n T: RealField,\n\n {\n\n assert_eq!(skelly.len(), posture.len());\n\n assert!(self.min_len <= skelly.len());\n\n\n\n self.globals.resize_with(skelly.len(), Isometry3::identity);\n\n skelly.write_globals_for_posture(posture, &mut self.globals);\n\n\n\n let mut total = StepResult {\n\n error: T::zero(),\n\n status: Status::Solved,\n\n };\n\n\n\n for goal in &self.goals {\n\n if let Some(position) = goal.position {\n\n let tip = Point3::from(self.globals[goal.bone].translation.vector);\n\n\n\n // let error = tip.coords.metric_distance(&position.coords);\n", "file_path": "src/ik/rotor.rs", "rank": 30, "score": 4.232582300999109 }, { "content": "\n\n let camera_matrix_inv = camera_matrix.inverse();\n\n let o = camera_matrix_inv.transform_point3(macroquad::math::Vec3::zero());\n\n let t = camera_matrix_inv.transform_point3(macroquad::math::Vec3::new(x, y, 0.999));\n\n let d = t - o;\n\n let f = -o.y / d.y;\n\n let x = d * f + o;\n\n\n\n left_target = Point3::from(Vector::from([x.x, x.y, x.z]));\n\n solver.set_position_goal(left, left_target);\n\n }\n\n\n\n if is_mouse_button_down(MouseButton::Right) {\n\n let (x, y) = mouse_position();\n\n let (x, y) = (\n\n x * 2.0 / screen_width() - 1.0,\n\n 1.0 - y * 2.0 / screen_height(),\n\n );\n\n\n\n let camera_matrix_inv = camera_matrix.inverse();\n", "file_path": "examples/demo.rs", "rank": 31, "score": 3.9495897015155323 }, { "content": " /// Returned if solver determined that goals cannot be satisfied given the constraitns.\n\n Infeasible,\n\n}\n\n\n\npub struct StepResult<T> {\n\n error: T,\n\n status: Status,\n\n}\n\n\n\nimpl<T> StepResult<T>\n\nwhere\n\n T: Add<Output = T>,\n\n{\n\n pub fn solved(error: T) -> Self {\n\n StepResult {\n\n error,\n\n status: Status::Solved,\n\n }\n\n }\n\n\n", "file_path": "src/ik.rs", "rank": 32, "score": 3.4676566261979245 }, { "content": " continue;\n\n }\n\n\n\n let rot = UnitQuaternion::rotation_between(&tip_local.coords, &goal_local.coords)\n\n // .unwrap_or_else(|| UnitQuaternion::from_euler_angles(T::one(), T::one(), T::one()));\n\n .unwrap_or_else(UnitQuaternion::identity);\n\n\n\n // rotate the joint\n\n let local = posture.get_joint_mut(bone);\n\n *local *= rot;\n\n tip_local = rot * tip_local;\n\n\n\n let error = tip_local.coords.metric_distance(&goal_local.coords);\n\n if error < self.epsilon {\n\n total = total.reduce(StepResult::solved(error));\n\n continue;\n\n }\n\n\n\n if let Some(parent) = skelly.get_parent(bone) {\n\n let tip = self.globals[bone] * tip_local;\n", "file_path": "src/ik/rotor.rs", "rank": 33, "score": 3.33270807085348 }, { "content": " let o = camera_matrix_inv.transform_point3(macroquad::math::Vec3::new(0.0, 0.0, 0.0));\n\n let t = camera_matrix_inv.transform_point3(macroquad::math::Vec3::new(x, y, 0.999));\n\n let d = t - o;\n\n let f = -o.y / d.y;\n\n let x = d * f + o;\n\n\n\n right_target = Point::from(Vector::from([x.x, x.y, x.z]));\n\n solver.set_position_goal(right, right_target);\n\n }\n\n\n\n solver_wait_for -= get_frame_time();\n\n while solver_wait_for < 0.0 {\n\n let _solved = solver.solve_step(&skelly, &mut posture);\n\n solver_wait_for += 0.001;\n\n }\n\n\n\n set_camera(camera);\n\n\n\n next_frame().await;\n\n clear_background(DARKGRAY);\n", "file_path": "examples/demo.rs", "rank": 34, "score": 3.0056110127555318 }, { "content": "use {\n\n macroquad::{\n\n camera::{set_camera, Camera as _, Camera3D},\n\n color::{\n\n Color, BLUE, DARKGRAY, GOLD, GREEN, LIME, MAGENTA, MAROON, ORANGE, PINK, RED, WHITE,\n\n YELLOW,\n\n },\n\n input::{is_key_pressed, is_mouse_button_down, mouse_position, KeyCode, MouseButton},\n\n models::{draw_line_3d, draw_sphere},\n\n time::get_frame_time,\n\n window::{clear_background, next_frame, screen_height, screen_width},\n\n },\n\n na::{Isometry3, Point, Point3, Vector, Vector3},\n\n skelly::{ik::rotor::RotorSolver, Posture, Skelly},\n\n};\n\n\n\n#[macroquad::main(\"ik-test\")]\n\nasync fn main() {\n\n let mut skelly = Skelly::<f32, Color>::new();\n\n let mut index = skelly.add_root_with(Point::origin(), GOLD);\n", "file_path": "examples/demo.rs", "rank": 35, "score": 2.873775450089378 }, { "content": " };\n\n\n\n StepResult {\n\n error: self.error + rhs.error,\n\n status,\n\n }\n\n }\n\n}\n\n\n\n/// Trait for ik solvers.\n\n/// Using this common interface user may replace implementation easily.\n", "file_path": "src/ik.rs", "rank": 36, "score": 2.0220568773226635 }, { "content": " let mut right_target = Point::origin();\n\n let mut posture = skelly.make_posture();\n\n\n\n camera.position.y += 5.0;\n\n\n\n let mut solver_wait_for = 1.0;\n\n\n\n loop {\n\n if is_key_pressed(KeyCode::Escape) {\n\n break;\n\n }\n\n\n\n let camera_matrix = camera.matrix();\n\n\n\n if is_mouse_button_down(MouseButton::Left) {\n\n let (x, y) = mouse_position();\n\n let (x, y) = (\n\n x * 2.0 / screen_width() - 1.0,\n\n 1.0 - y * 2.0 / screen_height(),\n\n );\n", "file_path": "examples/demo.rs", "rank": 37, "score": 2.0150079208678875 }, { "content": " fn clone(&self) -> Self {\n\n RotorSolver {\n\n epsilon: self.epsilon.clone(),\n\n min_len: self.min_len,\n\n goals: self.goals.clone(),\n\n queue: Vec::new(),\n\n globals: Vec::new(),\n\n }\n\n }\n\n\n\n fn clone_from(&mut self, source: &Self) {\n\n self.epsilon = source.epsilon.clone();\n\n self.min_len = source.min_len;\n\n self.goals = source.goals.clone();\n\n }\n\n}\n\n\n\nimpl<T> IkSolver<T> for RotorSolver<T>\n\nwhere\n\n T: RealField,\n", "file_path": "src/ik/rotor.rs", "rank": 38, "score": 1.9938947931631015 }, { "content": " index = skelly.attach_with(Vector3::z().into(), index, MAROON);\n\n index = skelly.attach_with(Vector3::z().into(), index, PINK);\n\n\n\n let mut left = skelly.attach_with(Vector3::z().into(), index, ORANGE);\n\n left = skelly.attach_with((-Vector3::x()).into(), left, MAGENTA);\n\n left = skelly.attach_with((-Vector3::x()).into(), left, BLUE);\n\n\n\n let mut right = skelly.attach_with(Vector3::z().into(), index, LIME);\n\n right = skelly.attach_with(Vector3::x().into(), right, YELLOW);\n\n right = skelly.attach_with(Vector3::x().into(), right, WHITE);\n\n\n\n let mut globals = vec![Isometry3::identity(); skelly.len()];\n\n\n\n let mut solver = RotorSolver::<f32>::new(0.0001);\n\n // solver.set_position_goal(index, Point::origin());\n\n solver.set_position_goal(left, Point::origin());\n\n solver.set_position_goal(right, Point::origin());\n\n\n\n let mut camera = Camera3D::default();\n\n let mut left_target = Point::origin();\n", "file_path": "examples/demo.rs", "rank": 39, "score": 1.6939010846742197 }, { "content": " // let error = tip.coords.metric_distance(&goal.coords);\n\n // if error < epsilon {\n\n // return StepResult {\n\n // error,\n\n // status: Status::Solved,\n\n // };\n\n // }\n\n\n\n // globals.reverse();\n\n\n\n // for (chain_index, &bone) in chain.iter().enumerate() {\n\n // let inv = globals[chain_index].inverse();\n\n\n\n // let mut tip_local = inv * tip;\n\n // if tip_local.coords.magnitude_squared() < epsilon {\n\n // continue;\n\n // }\n\n\n\n // let goal_local = inv * goal;\n\n // if goal_local.coords.magnitude_squared() < epsilon {\n", "file_path": "src/ik/rotor.rs", "rank": 40, "score": 1.1306029664346922 }, { "content": " // continue;\n\n // }\n\n\n\n // let rot = UnitQuaternion::rotation_between(&tip_local.coords, &goal_local.coords)\n\n // // .unwrap_or_else(|| UnitQuaternion::from_euler_angles(T::one(), T::one(), T::one()));\n\n // .unwrap_or_else(UnitQuaternion::identity);\n\n\n\n // // rotate the joint\n\n // let local = posture.get_joint_mut(bone);\n\n // *local *= rot;\n\n // tip_local = rot * tip_local;\n\n\n\n // let error = tip_local.coords.metric_distance(&goal_local.coords);\n\n // if error < epsilon {\n\n // return StepResult {\n\n // error,\n\n // status: Status::Solved,\n\n // };\n\n // }\n\n\n", "file_path": "src/ik/rotor.rs", "rank": 41, "score": 1.0260382026333987 } ]
Rust
benches/spmc_mt_read_write_bench.rs
tower120/lockless_event_queue
aa55039161736c88b103a520f83ce8f8ab66288f
use rc_event_queue::spmc::{EventQueue, EventReader, Settings}; use criterion::{Criterion, black_box, criterion_main, criterion_group, BenchmarkId}; use std::time::{Duration, Instant}; use std::thread; use std::sync::Arc; use std::sync::atomic::{AtomicBool, Ordering}; use std::pin::Pin; use rc_event_queue::{CleanupMode, LendingIterator}; const QUEUE_SIZE: usize = 100000; struct S{} impl Settings for S{ const MIN_CHUNK_SIZE: u32 = 512; const MAX_CHUNK_SIZE: u32 = 512; const CLEANUP: CleanupMode = CleanupMode::Never; } type Event = EventQueue<usize, S>; fn bench_event_read_write<F>(iters: u64, writer_fn: F) -> Duration where F: Fn(&mut Event, usize, usize) -> () + Send + 'static + Clone { let mut total = Duration::ZERO; let readers_thread_count = 4; for _ in 0..iters { let mut event = Event::new(); let mut readers = Vec::new(); for _ in 0..readers_thread_count{ readers.push(EventReader::new(&mut event)); } let writer_thread = { let writer_fn = writer_fn.clone(); Box::new(thread::spawn(move || { writer_fn(&mut event, 0, QUEUE_SIZE); })) }; let readers_stop = Arc::new(AtomicBool::new(false)); let mut reader_threads = Vec::new(); for mut reader in readers{ let readers_stop = readers_stop.clone(); let thread = Box::new(thread::spawn(move || { let mut local_sum0: usize = 0; loop{ let stop = readers_stop.load(Ordering::Acquire); let mut iter = reader.iter(); while let Some(i) = iter.next(){ local_sum0 += i; } if stop{ break; } } black_box(local_sum0); })); reader_threads.push(thread); } let start = Instant::now(); writer_thread.join().unwrap(); readers_stop.store(true, Ordering::Release); for thread in reader_threads { thread.join().unwrap(); } total += start.elapsed(); } total } pub fn mt_read_write_event_benchmark(c: &mut Criterion) { let mut group = c.benchmark_group("spmc mt read write"); for session_size in [4, 8, 16, 32, 128, 512 as usize]{ group.bench_with_input( BenchmarkId::new("spmc::EventQueue extend", session_size), &session_size, |b, input| b.iter_custom(|iters| { let session_len = *input; let f = move |event: &mut Event, from: usize, to: usize|{ write_extend(session_len, event, from, to); }; bench_event_read_write(iters, f) })); } #[inline(always)] fn write_push(event: &mut Event, from: usize, to: usize){ for i in from..to{ event.push(black_box(i)); } } #[inline(always)] fn write_extend(session_len: usize, event: &mut Event, from: usize, to: usize){ let mut i = from; loop{ let session_from = i; let session_to = session_from + session_len; if session_to>=to{ return; } event.extend(black_box(session_from..session_to)); i = session_to; } } group.bench_function("spmc::EventQueue push", |b|b.iter_custom(|iters| bench_event_read_write(iters, write_push))); } criterion_group!(benches, mt_read_write_event_benchmark); criterion_main!(benches);
use rc_event_queue::spmc::{EventQueue, EventReader, Settings}; use criterion::{Criterion, black_box, criterion_main, criterion_group, BenchmarkId}; use std::time::{Duration, Instant}; use std::thread; use std::sync::Arc; use std::sync::atomic::{AtomicBool, Ordering}; use std::pin::Pin; use rc_event_queue::{CleanupMode, LendingIterator}; const QUEUE_SIZE: usize = 100000; struct S{} impl Settings for S{ const MIN_CHUNK_SIZE: u32 = 512; const MAX_CHUNK_SIZE: u32 = 512; const CLEANUP: CleanupMode = CleanupMode::Never; } type Event = EventQueue<usize, S>; fn bench_event_read_write<F>(iters: u64, writer_fn: F) -> Duration where F: Fn(&mut Event, usize, usize) -> ()
let thread = Box::new(thread::spawn(move || { let mut local_sum0: usize = 0; loop{ let stop = readers_stop.load(Ordering::Acquire); let mut iter = reader.iter(); while let Some(i) = iter.next(){ local_sum0 += i; } if stop{ break; } } black_box(local_sum0); })); reader_threads.push(thread); } let start = Instant::now(); writer_thread.join().unwrap(); readers_stop.store(true, Ordering::Release); for thread in reader_threads { thread.join().unwrap(); } total += start.elapsed(); } total } pub fn mt_read_write_event_benchmark(c: &mut Criterion) { let mut group = c.benchmark_group("spmc mt read write"); for session_size in [4, 8, 16, 32, 128, 512 as usize]{ group.bench_with_input( BenchmarkId::new("spmc::EventQueue extend", session_size), &session_size, |b, input| b.iter_custom(|iters| { let session_len = *input; let f = move |event: &mut Event, from: usize, to: usize|{ write_extend(session_len, event, from, to); }; bench_event_read_write(iters, f) })); } #[inline(always)] fn write_push(event: &mut Event, from: usize, to: usize){ for i in from..to{ event.push(black_box(i)); } } #[inline(always)] fn write_extend(session_len: usize, event: &mut Event, from: usize, to: usize){ let mut i = from; loop{ let session_from = i; let session_to = session_from + session_len; if session_to>=to{ return; } event.extend(black_box(session_from..session_to)); i = session_to; } } group.bench_function("spmc::EventQueue push", |b|b.iter_custom(|iters| bench_event_read_write(iters, write_push))); } criterion_group!(benches, mt_read_write_event_benchmark); criterion_main!(benches);
+ Send + 'static + Clone { let mut total = Duration::ZERO; let readers_thread_count = 4; for _ in 0..iters { let mut event = Event::new(); let mut readers = Vec::new(); for _ in 0..readers_thread_count{ readers.push(EventReader::new(&mut event)); } let writer_thread = { let writer_fn = writer_fn.clone(); Box::new(thread::spawn(move || { writer_fn(&mut event, 0, QUEUE_SIZE); })) }; let readers_stop = Arc::new(AtomicBool::new(false)); let mut reader_threads = Vec::new(); for mut reader in readers{ let readers_stop = readers_stop.clone();
function_block-random_span
[ { "content": "/// We test high-contention read-write case.\n\nfn bench_event_read_write<F>(iters: u64, writer_fn: F) -> Duration\n\n where F: Fn(&ArcEvent, usize, usize) -> () + Send + 'static + Clone\n\n{\n\n let mut total = Duration::ZERO;\n\n\n\n let writers_thread_count = 2;\n\n let readers_thread_count = 4;\n\n\n\n\n\n for _ in 0..iters {\n\n let event = Event::new();\n\n\n\n let mut readers = Vec::new();\n\n for _ in 0..readers_thread_count{\n\n readers.push(EventReader::new(&event));\n\n }\n\n\n\n // write\n\n let mut writer_threads = Vec::new();\n\n let writer_chunk = QUEUE_SIZE / writers_thread_count;\n", "file_path": "benches/mpmc_mt_read_write_bench.rs", "rank": 0, "score": 184919.74326820672 }, { "content": "struct Data<F: FnMut(usize)>{\n\n i : usize,\n\n on_destroy : F\n\n}\n\n\n\nimpl<F: FnMut(usize)> Drop for Data<F>{\n\n fn drop(&mut self) {\n\n (self.on_destroy)(self.i);\n\n }\n\n}\n\n\n\n\n", "file_path": "src/dynamic_array/test.rs", "rank": 2, "score": 162507.46102636112 }, { "content": "//#[derive(Clone, Eq, PartialEq, Hash)]\n\nstruct Data<F: FnMut()>{\n\n id : usize,\n\n _name: String,\n\n on_destroy: F\n\n}\n\n\n\nimpl<F: FnMut()> Data<F>{\n\n fn from(i:usize, on_destroy: F) -> Self {\n\n Self{\n\n id : i,\n\n _name: i.to_string(),\n\n on_destroy\n\n }\n\n }\n\n}\n\n\n\nimpl<F: FnMut()> Drop for Data<F>{\n\n fn drop(&mut self) {\n\n (self.on_destroy)();\n\n }\n\n}\n\n\n", "file_path": "src/tests/mpmc.rs", "rank": 3, "score": 147235.60885790075 }, { "content": "struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 4;\n\n const MAX_CHUNK_SIZE: u32 = u32::MAX;\n\n const CLEANUP: CleanupMode = DefaultSettings::CLEANUP;\n\n}\n\n\n", "file_path": "src/event_queue/test.rs", "rank": 4, "score": 141566.12317366217 }, { "content": "fn bench_event_reader(iters: u64, read_session_size: usize) -> Duration{\n\n let mut total = Duration::ZERO;\n\n for _ in 0..iters {\n\n let event = EventQueue::<usize, EventQueueSettings>::new();\n\n let mut reader = EventReader::new(&event);\n\n for i in 0..QUEUE_SIZE {\n\n event.push(i);\n\n }\n\n\n\n let start = Instant::now();\n\n 'outer: loop{\n\n // simulate \"read sessions\"\n\n // Testing this, because constructing iterator _and switching chunk_\n\n // is the only potentially \"heavy\" operations\n\n\n\n let mut iter = reader.iter();\n\n for _ in 0..read_session_size {\n\n let next = iter.next();\n\n match next{\n\n None => {break 'outer;}\n\n Some(i) => {black_box(i);}\n\n }\n\n }\n\n }\n\n total += start.elapsed();\n\n }\n\n total\n\n}\n\n\n", "file_path": "benches/read_bench.rs", "rank": 5, "score": 141309.2259309468 }, { "content": "struct Header<F: FnMut(u8)>{\n\n i : u8,\n\n on_destroy : F\n\n}\n\nimpl<F: FnMut(u8)> Drop for Header<F>{\n\n fn drop(&mut self) {\n\n (self.on_destroy)(self.i);\n\n }\n\n}\n\n\n", "file_path": "src/dynamic_array/test.rs", "rank": 6, "score": 136364.326673339 }, { "content": "fn bench_event_reader(iters: u64) -> Duration{\n\n let mut total = Duration::ZERO;\n\n for _ in 0..iters {\n\n let event = EventQueue::<usize, EventQueueSettings>::new();\n\n let mut reader = EventReader::new(&event);\n\n let start = Instant::now();\n\n let mut iter = reader.iter();\n\n while let Some(i) = iter.next(){\n\n black_box(i);\n\n }\n\n total += start.elapsed();\n\n }\n\n total\n\n}\n\n\n", "file_path": "benches/read_empty_bench.rs", "rank": 7, "score": 131816.35860386016 }, { "content": "fn bench_event_reader_whole(iters: u64) -> Duration{\n\n let mut total = Duration::ZERO;\n\n for _ in 0..iters {\n\n let event = EventQueue::<usize, EventQueueSettings>::new();\n\n let mut reader = EventReader::new(&event);\n\n for i in 0..QUEUE_SIZE {\n\n event.push(i);\n\n }\n\n\n\n let start = Instant::now();\n\n let mut iter = reader.iter();\n\n while let Some(i) = iter.next(){\n\n black_box(i);\n\n }\n\n total += start.elapsed();\n\n }\n\n total\n\n}\n\n\n", "file_path": "benches/read_bench.rs", "rank": 8, "score": 131816.35860386014 }, { "content": "#[inline(always)]\n\n#[allow(unreachable_code)]\n\npub fn bittest_u64<const N: u8>(value: u64) -> bool {\n\n #[cfg(not(miri))]\n\n unsafe {\n\n #[cfg(target_arch = \"x86_64\")]\n\n return core::arch::x86_64::_bittest64(&(value as i64), N as i64) != 0;\n\n }\n\n\n\n return value & (1 << N) != 0;\n\n}\n", "file_path": "src/utils.rs", "rank": 9, "score": 122615.13751992896 }, { "content": "#[inline(always)]\n\n#[must_use]\n\npub fn bitset_u64<const N: u8>(mut value: u64, bit: bool) -> u64 {\n\n // should be const. Lets hope rust precalculate it.\n\n let mask: u64 = 1<<N; // all bits = 0, Nth = 1\n\n\n\n // clear bit\n\n value &= !mask;\n\n\n\n // set bit\n\n value |= (bit as u64) << N;\n\n\n\n value\n\n}\n\n\n\n/// Epoch or generation or version.\n\n#[derive(Copy, Clone, PartialEq, Debug)]\n\npub struct Epoch<T, const MAX: u64> (T)\n\n where T : Copy + Add<Output = T> + PartialOrd + From<u8> + Into<u64>;\n\n\n\nimpl<T, const MAX: u64> Epoch<T, MAX>\n\n where T : Copy + Add<Output = T> + PartialOrd + From<u8> + Into<u64>\n", "file_path": "src/utils.rs", "rank": 10, "score": 121745.79080177462 }, { "content": "type Event = EventQueue<usize, S>;\n", "file_path": "benches/mpmc_mt_read_write_bench.rs", "rank": 11, "score": 119229.70392293435 }, { "content": "/// Epoch of EventQueue::start_position\n\ntype StartPositionEpoch = crate::utils::Epoch<u32, {i32::MAX as u64}>;\n\n\n\npub use crate::event_queue::CleanupMode;\n\npub use crate::event_reader::LendingIterator;\n\n\n\npub mod prelude{\n\n pub use crate::CleanupMode;\n\n pub use crate::LendingIterator;\n\n}\n\n\n\npub mod mpmc;\n\npub mod spmc;\n\n\n\n#[cfg(test)]\n\nmod tests;", "file_path": "src/lib.rs", "rank": 13, "score": 113247.83669553409 }, { "content": "struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 512;\n\n const MAX_CHUNK_SIZE: u32 = 512;\n\n const CLEANUP: CleanupMode = CleanupMode::Never;\n\n}\n", "file_path": "benches/mpmc_mt_read_write_bench.rs", "rank": 15, "score": 111943.04026344072 }, { "content": "fn bench_deque(iters: u64) -> Duration{\n\n let mut total = Duration::ZERO;\n\n for _ in 0..iters {\n\n let deque = VecDeque::<usize>::new();\n\n\n\n let start = Instant::now();\n\n for i in deque.iter(){\n\n black_box(i);\n\n }\n\n total += start.elapsed();\n\n }\n\n total\n\n}\n\n\n\n\n\n\n", "file_path": "benches/read_empty_bench.rs", "rank": 16, "score": 111580.37221020772 }, { "content": "fn bench_deque_whole(iters: u64) -> Duration{\n\n let mut total = Duration::ZERO;\n\n for _ in 0..iters {\n\n let mut deque = VecDeque::new();\n\n for i in 0..QUEUE_SIZE {\n\n deque.push_back(i);\n\n }\n\n\n\n let start = Instant::now();\n\n for i in deque.iter(){\n\n black_box(i);\n\n }\n\n total += start.elapsed();\n\n }\n\n total\n\n}\n\n\n\n\n\n\n", "file_path": "benches/read_bench.rs", "rank": 17, "score": 111580.37221020772 }, { "content": "fn bench_deque_extend(iters: u64) -> Duration{\n\n let mut total = Duration::ZERO;\n\n for _ in 0..iters {\n\n let mut vec = VecDeque::new();\n\n let start = Instant::now();\n\n\n\n vec.extend(black_box(0..QUEUE_SIZE));\n\n\n\n total += start.elapsed();\n\n }\n\n total\n\n}\n\n\n\n\n", "file_path": "benches/write_bench.rs", "rank": 18, "score": 111580.37221020772 }, { "content": "fn bench_deque_push(iters: u64) -> Duration{\n\n let mut total = Duration::ZERO;\n\n for _ in 0..iters {\n\n let mut vec = VecDeque::new();\n\n let start = Instant::now();\n\n\n\n for i in 0..QUEUE_SIZE {\n\n vec.push_back(black_box(i));\n\n }\n\n\n\n total += start.elapsed();\n\n }\n\n total\n\n}\n\n\n", "file_path": "benches/write_bench.rs", "rank": 19, "score": 111580.37221020772 }, { "content": "fn bench_vector_whole(iters: u64) -> Duration{\n\n let mut total = Duration::ZERO;\n\n for _ in 0..iters {\n\n let mut vec = Vec::new();\n\n for i in 0..QUEUE_SIZE {\n\n vec.push(i);\n\n }\n\n\n\n let start = Instant::now();\n\n for i in vec.iter(){\n\n black_box(i);\n\n }\n\n total += start.elapsed();\n\n }\n\n total\n\n}\n\n\n", "file_path": "benches/read_bench.rs", "rank": 20, "score": 111580.37221020772 }, { "content": "fn bench_vector(iters: u64) -> Duration{\n\n let mut total = Duration::ZERO;\n\n for _ in 0..iters {\n\n let vec = Vec::<usize>::new();\n\n\n\n let start = Instant::now();\n\n for i in vec.iter(){\n\n black_box(i);\n\n }\n\n total += start.elapsed();\n\n }\n\n total\n\n}\n\n\n", "file_path": "benches/read_empty_bench.rs", "rank": 21, "score": 111580.37221020772 }, { "content": "fn bench_vector_push(iters: u64) -> Duration{\n\n let mut total = Duration::ZERO;\n\n for _ in 0..iters {\n\n let mut vec = Vec::new();\n\n let start = Instant::now();\n\n\n\n for i in 0..QUEUE_SIZE {\n\n vec.push(black_box(i));\n\n }\n\n\n\n total += start.elapsed();\n\n }\n\n total\n\n}\n\n\n", "file_path": "benches/write_bench.rs", "rank": 22, "score": 111580.37221020772 }, { "content": "fn bench_vector_extend(iters: u64) -> Duration{\n\n let mut total = Duration::ZERO;\n\n for _ in 0..iters {\n\n let mut vec = Vec::new();\n\n let start = Instant::now();\n\n\n\n vec.extend(black_box(0..QUEUE_SIZE));\n\n\n\n total += start.elapsed();\n\n }\n\n total\n\n}\n\n\n\n\n", "file_path": "benches/write_bench.rs", "rank": 23, "score": 111580.37221020772 }, { "content": "struct EventQueueSettings{}\n\nimpl Settings for EventQueueSettings{\n\n const MIN_CHUNK_SIZE: u32 = 512;\n\n const MAX_CHUNK_SIZE: u32 = 512;\n\n const CLEANUP: CleanupMode = CleanupMode::Never;\n\n}\n\n\n", "file_path": "benches/read_bench.rs", "rank": 24, "score": 109851.85353659627 }, { "content": "struct EventQueueSettings{}\n\nimpl Settings for EventQueueSettings{\n\n const MIN_CHUNK_SIZE: u32 = 512;\n\n const MAX_CHUNK_SIZE: u32 = 512;\n\n const CLEANUP: CleanupMode = CleanupMode::Never;\n\n}\n\n\n", "file_path": "benches/read_empty_bench.rs", "rank": 25, "score": 106810.04963628178 }, { "content": "struct SPMCEventQueueSettings{}\n\nimpl spmc::Settings for SPMCEventQueueSettings{\n\n const MIN_CHUNK_SIZE: u32 = 512;\n\n const MAX_CHUNK_SIZE: u32 = 512;\n\n const CLEANUP: CleanupMode = CleanupMode::Never;\n\n}\n\nevent_queue_bench!(spmc_bench, crate::spmc::EventQueue<usize, crate::SPMCEventQueueSettings>);\n\n\n", "file_path": "benches/write_bench.rs", "rank": 26, "score": 106810.04963628178 }, { "content": "struct MPMCEventQueueSettings{}\n\nimpl mpmc::Settings for MPMCEventQueueSettings{\n\n const MIN_CHUNK_SIZE: u32 = 512;\n\n const MAX_CHUNK_SIZE: u32 = 512;\n\n const CLEANUP: CleanupMode = CleanupMode::Never;\n\n}\n\nevent_queue_bench!(mpmc_bench, crate::mpmc::EventQueue<usize, crate::MPMCEventQueueSettings>);\n\n\n", "file_path": "benches/write_bench.rs", "rank": 27, "score": 106810.04963628178 }, { "content": "fn factual_capacity<T, S: Settings>(event_queue: &EventQueue<T, S>) -> usize {\n\n let list = &event_queue.0.list.lock();\n\n let mut total = 0;\n\n unsafe {\n\n foreach_chunk(\n\n list.first,\n\n null(),\n\n Ordering::Relaxed, // we're under mutex\n\n |chunk| {\n\n total += chunk.capacity();\n\n Continue(())\n\n }\n\n );\n\n }\n\n total\n\n}\n\n\n\n\n", "file_path": "src/event_queue/test.rs", "rank": 28, "score": 105341.39788733715 }, { "content": "pub fn consume_mapped<T, F, R>(iter: &mut impl LendingIterator<ItemValue = T>, f: F) -> Vec<R>\n\nwhere F: Fn(&T) -> R\n\n{\n\n let mut v = Vec::new();\n\n while let Some(item) = iter.next(){\n\n v.push( f(item) );\n\n }\n\n v\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 29, "score": 105192.33050478244 }, { "content": "#[test]\n\nfn force_cleanup_test(){\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 4;\n\n const MAX_CHUNK_SIZE: u32 = 4;\n\n }\n\n // clear force cleanup effect\n\n {\n\n let event = EventQueue::<usize, S>::new();\n\n let mut _reader = EventReader::new(&event);\n\n\n\n event.extend(0..16);\n\n assert_equal(get_chunks_capacities(&event), [4,4,4,4]);\n\n\n\n event.clear();\n\n // first - because occupied by reader, last - because tip of the queue\n\n assert_equal(get_chunks_capacities(&event), [4, 4]);\n\n }\n\n // truncate force cleanup effect\n\n {\n\n let event = EventQueue::<usize, S>::new();\n", "file_path": "src/event_queue/test.rs", "rank": 30, "score": 100629.01506313942 }, { "content": "fn get_chunks_lens<T, S: Settings>(event_queue: &EventQueue<T, S>) -> Vec<usize> {\n\n let list = &event_queue.0.list.lock();\n\n let mut chunk_lens = Vec::new();\n\n unsafe{\n\n foreach_chunk(list.first, null(), Ordering::Acquire,\n\n |chunk|{\n\n chunk_lens.push( chunk.chunk_state(Ordering::Relaxed).len() as usize );\n\n Continue(())\n\n });\n\n }\n\n chunk_lens\n\n}\n\n\n", "file_path": "src/event_queue/test.rs", "rank": 31, "score": 100560.96022540651 }, { "content": "fn get_chunks_capacities<T, S: Settings>(event_queue: &EventQueue<T, S>) -> Vec<usize> {\n\n let list = &event_queue.0.list.lock();\n\n let mut chunk_capacities = Vec::new();\n\n unsafe{\n\n foreach_chunk(list.first, null(), Ordering::Acquire,\n\n |chunk|{\n\n chunk_capacities.push( chunk.capacity() );\n\n Continue(())\n\n });\n\n }\n\n chunk_capacities\n\n}\n\n\n", "file_path": "src/event_queue/test.rs", "rank": 32, "score": 100560.96022540651 }, { "content": "#[test]\n\n#[allow(non_snake_case)]\n\nfn CleanupMode_Never_test(){\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 4;\n\n const MAX_CHUNK_SIZE: u32 = 4;\n\n const CLEANUP: CleanupMode = CleanupMode::Never;\n\n }\n\n let event = EventQueue::<usize, S>::new();\n\n let mut reader = EventReader::new(&event);\n\n\n\n event.extend(0..12);\n\n assert_equal(get_chunks_capacities(&event), [4,4,4]);\n\n\n\n skip(&mut reader.iter(), 5);\n\n assert_equal(get_chunks_capacities(&event), [4,4,4]);\n\n\n\n event.push(100);\n\n assert_equal(get_chunks_capacities(&event), [4,4,4,4]);\n\n\n\n consume_copies(&mut reader.iter());\n\n assert_equal(get_chunks_capacities(&event), [4,4,4,4]);\n\n\n\n event.cleanup();\n\n assert_equal(get_chunks_capacities(&event), [4]);\n\n}", "file_path": "src/event_queue/test.rs", "rank": 33, "score": 97831.33801171831 }, { "content": "#[test]\n\n#[allow(non_snake_case)]\n\nfn CleanupMode_OnNewChunk_test(){\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 4;\n\n const MAX_CHUNK_SIZE: u32 = 4;\n\n const CLEANUP: CleanupMode = CleanupMode::OnNewChunk;\n\n }\n\n let event = EventQueue::<usize, S>::new();\n\n let mut reader = EventReader::new(&event);\n\n\n\n event.extend(0..16);\n\n assert_equal(get_chunks_capacities(&event), [4,4,4,4]);\n\n\n\n // 8 - will stop reader on the very last element of 2nd chunk. And will not leave it. So use 9\n\n skip(&mut reader.iter(), 9);\n\n assert_equal(get_chunks_capacities(&event), [4,4,4,4]);\n\n\n\n event.push(100);\n\n assert_equal(get_chunks_capacities(&event), [4,4,4]);\n\n}\n\n\n", "file_path": "src/event_queue/test.rs", "rank": 34, "score": 95222.36058546633 }, { "content": "type ArcEvent = Pin<Arc<Event>>;\n\n\n\n\n", "file_path": "benches/mpmc_mt_read_write_bench.rs", "rank": 35, "score": 94738.47091639444 }, { "content": "pub fn skip<T>(iter: &mut impl LendingIterator<ItemValue = T>, len : usize) {\n\n let mut i = 0;\n\n while let Some(_) = iter.next(){\n\n i+=1;\n\n\n\n if i == len {\n\n break;\n\n }\n\n }\n\n}\n", "file_path": "src/tests/utils.rs", "rank": 36, "score": 84906.97528155944 }, { "content": "struct Header<T, S: Settings>{\n\n /// Just to compare chunks by age/sequence fast. Brings order.\n\n /// Will overflow after years... So just ignore that possibility.\n\n pub(super) id : usize,\n\n pub(super) next : AtomicPtr<DynamicChunk<T, S>>,\n\n\n\n /// locked in reader next chunk and force_cleanup\n\n pub(super) chunk_switch_mutex : SpinSharedMutex<()>,\n\n /// Grow only.\n\n pub(super) readers_entered: AtomicUsize,\n\n\n\n /// When == readers count, it is safe to delete this chunk.\n\n /// Chunk read completely if reader consumed CHUNK_SIZE'ed element.\n\n /// Last chunk always exists.\n\n ///\n\n /// Grow only.\n\n pub(super) read_completely_times : AtomicUsize,\n\n\n\n // This needed to access Event from EventReader.\n\n // Never changes.\n", "file_path": "src/dynamic_chunk.rs", "rank": 37, "score": 75257.9589601455 }, { "content": "#[test]\n\nfn capacity_test(){\n\n let event = EventQueue::<usize, S>::new();\n\n\n\n event.extend(0..26);\n\n assert_equal(get_chunks_capacities(&event), [4,4,8,8,16]);\n\n\n\n assert_eq!(event.chunk_capacity(), 16);\n\n assert_eq!(event.total_capacity(), get_chunks_capacities(&event).iter().sum());\n\n}\n\n\n", "file_path": "src/event_queue/test.rs", "rank": 38, "score": 74405.13509521044 }, { "content": "#[test]\n\nfn resize_test(){\n\n let event = EventQueue::<usize, S>::new();\n\n let mut reader = EventReader::new(&event);\n\n\n\n event.extend(0..32);\n\n assert_equal(get_chunks_capacities(&event), [4,4,8,8,16]);\n\n\n\n event.change_chunk_capacity(6);\n\n assert_equal(get_chunks_capacities(&event), [4,4,8,8,16,6]);\n\n assert_equal(get_chunks_lens(&event), [4,4,8,8,8,0]);\n\n\n\n event.push(32);\n\n assert_equal(get_chunks_capacities(&event), [4,4,8,8,16,6]);\n\n assert_equal(get_chunks_lens(&event), [4,4,8,8,8,1]);\n\n\n\n consume_copies(&mut reader.iter());\n\n assert_equal(get_chunks_capacities(&event), [6]);\n\n assert_equal(get_chunks_lens(&event), [1]);\n\n\n\n event.extend(0..6);\n\n assert_equal(get_chunks_capacities(&event), [6,6]);\n\n assert_equal(get_chunks_lens(&event), [6, 1]);\n\n}\n\n\n", "file_path": "src/event_queue/test.rs", "rank": 39, "score": 74405.13509521044 }, { "content": "#[test]\n\nfn truncate_front_test(){\n\n let event = EventQueue::<usize, S>::new();\n\n let mut reader = EventReader::new(&event);\n\n\n\n event.extend(0..26);\n\n assert_equal(get_chunks_capacities(&event), [4,4,8,8,16]);\n\n\n\n // basic\n\n event.truncate_front(4);\n\n reader.update_position();\n\n assert_equal(get_chunks_capacities(&event), [8,16]);\n\n assert_equal(consume_copies(&mut reader.iter()), [22, 23, 24, 25]);\n\n\n\n // more then queue\n\n event.extend(0..5);\n\n event.truncate_front(10);\n\n assert_equal(consume_copies(&mut reader.iter()), 0..5 as usize);\n\n\n\n // clear all queue\n\n event.extend(0..5);\n\n event.truncate_front(0);\n\n assert_equal(consume_copies(&mut reader.iter()), []);\n\n}\n\n\n", "file_path": "src/event_queue/test.rs", "rank": 40, "score": 72454.63204863155 }, { "content": "#[test]\n\nfn chunks_size_test(){\n\n let event = EventQueue::<usize, S>::new();\n\n event.extend(0..32);\n\n\n\n assert_equal(get_chunks_capacities(&event), [4,4,8,8,16]);\n\n}\n\n\n", "file_path": "src/event_queue/test.rs", "rank": 41, "score": 72454.63204863155 }, { "content": "#[cfg(feature = \"double_buffering\")]\n\n#[test]\n\nfn double_buffering_test(){\n\n let event = EventQueue::<usize, S>::new();\n\n let mut reader = EventReader::new(&event);\n\n\n\n event.extend(0..24);\n\n assert_equal(get_chunks_capacities(&event), [4,4,8,8]);\n\n\n\n consume_copies(&mut reader.iter());\n\n assert_eq!(event.0.list.lock().free_chunk.as_ref().unwrap().capacity(), 8);\n\n assert_equal(get_chunks_capacities(&event), [8]);\n\n\n\n event.extend(0..32);\n\n assert!(event.0.list.lock().free_chunk.is_none());\n\n assert_equal(get_chunks_capacities(&event), [8, 8, 16, 16]);\n\n}\n\n\n", "file_path": "src/event_queue/test.rs", "rank": 42, "score": 72454.63204863155 }, { "content": "struct Node(DynamicArray<Header, usize>);\n\n\n\nimpl Node{\n\n pub fn construct()-> &'static mut Self {\n\n let base = DynamicArray::<Header, usize>::construct(\n\n Header { i: 20 },\n\n 0,\n\n 10\n\n );\n\n unsafe { &mut *(base as *mut _ as *mut Self) }\n\n }\n\n\n\n pub unsafe fn destruct(this: *mut Self){\n\n DynamicArray::<Header, usize>::destruct(\n\n this as *mut DynamicArray<Header, usize>\n\n );\n\n }\n\n}*/", "file_path": "src/dynamic_array/test.rs", "rank": 43, "score": 71623.6479470794 }, { "content": "#[test]\n\nfn fuzzy_capacity_size_test(){\n\n use rand::Rng;\n\n let mut rng = rand::thread_rng();\n\n let size_bound = if cfg!(miri){ 1000 } else { 100000 };\n\n let read_bound = if cfg!(miri){ 100 } else { 10000 };\n\n for _ in 0..100{\n\n let size = rng.gen_range(0..size_bound);\n\n let event = EventQueue::<usize, S>::new();\n\n let mut reader = EventReader::new(&event);\n\n event.extend(0..size);\n\n {\n\n let mut iter = reader.iter();\n\n for _ in 0..rng.gen_range(0..read_bound){\n\n iter.next();\n\n }\n\n }\n\n\n\n assert_eq!(event.total_capacity(), factual_capacity(&event));\n\n }\n\n}\n\n\n", "file_path": "src/event_queue/test.rs", "rank": 44, "score": 70640.27944232983 }, { "content": "fn read_bench<S: 'static + Settings>(\n\n readers_start_offset_step: usize,\n\n read_session_size: usize,\n\n readers_thread_count: usize\n\n) -> Duration {\n\n let event = EventQueue::<usize, S>::new();\n\n\n\n let mut readers = Vec::new();\n\n let mut queue_n = 0;\n\n for _ in 0..readers_thread_count {\n\n event.extend(queue_n.. queue_n+ readers_start_offset_step);\n\n readers.push(EventReader::new(&event));\n\n queue_n += readers_start_offset_step;\n\n }\n\n event.extend(queue_n..QUEUE_SIZE);\n\n\n\n // read\n\n let mut threads = Vec::new();\n\n for mut reader in readers{\n\n let thread = Box::new(thread::spawn(move || {\n", "file_path": "benches/mt_read_bench.rs", "rank": 45, "score": 66404.35525921697 }, { "content": "pub fn write_event_benchmark(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Write\");\n\n // -------------------------- mpmc ---------------------------------------\n\n for session_size in [1, 4, 8, 16, 32, 128, 512]{\n\n group.bench_with_input(\n\n BenchmarkId::new(\"mpmc::EventQueue::extend session\", session_size),\n\n &session_size,\n\n |b, input| b.iter_custom(|iters| { mpmc_bench::bench_event_extend_session(iters, *input) }));\n\n }\n\n group.bench_function(\"mpmc::EventQueue::extend\", |b|b.iter_custom(|iters| mpmc_bench::bench_event_extend(iters)));\n\n group.bench_function(\"mpmc::EventQueue::push\", |b|b.iter_custom(|iters| mpmc_bench::bench_event_push(iters)));\n\n\n\n // -------------------------- spmc ---------------------------------------\n\n for session_size in [1, 4, 8, 16, 32, 128, 512]{\n\n group.bench_with_input(\n\n BenchmarkId::new(\"spmc::EventQueue::extend session\", session_size),\n\n &session_size,\n\n |b, input| b.iter_custom(|iters| { spmc_bench::bench_event_extend_session(iters, *input) }));\n\n }\n\n group.bench_function(\"spmc::EventQueue::extend\", |b|b.iter_custom(|iters| spmc_bench::bench_event_extend(iters)));\n", "file_path": "benches/write_bench.rs", "rank": 46, "score": 62739.50919720462 }, { "content": "pub fn read_event_benchmark(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Read\");\n\n for read_session_size in [4, 8, 16, 32, 128, 512]{\n\n group.bench_with_input(\n\n BenchmarkId::new(\"EventReader\", read_session_size),\n\n &read_session_size,\n\n |b, input| b.iter_custom(|iters| { bench_event_reader(iters, *input) }));\n\n }\n\n group.bench_function(\"EventReader/Whole\", |b|b.iter_custom(|iters| bench_event_reader_whole(iters)));\n\n group.bench_function(\"Vec\", |b|b.iter_custom(|iters| bench_vector_whole(iters)));\n\n group.bench_function(\"Deque\", |b|b.iter_custom(|iters| bench_deque_whole(iters)));\n\n}\n\n\n\ncriterion_group!(benches, read_event_benchmark);\n\ncriterion_main!(benches);", "file_path": "benches/read_bench.rs", "rank": 47, "score": 62739.50919720462 }, { "content": "pub fn consume_copies<T: Clone>(iter: &mut impl LendingIterator<ItemValue = T>) -> Vec<T> {\n\n consume_mapped(iter, |item| item.clone())\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 48, "score": 60507.13553811757 }, { "content": "pub fn read_empty_event_benchmark(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Read empty\");\n\n group.bench_function(\"EventReader\", |b|b.iter_custom(|iters| bench_event_reader(iters)));\n\n group.bench_function(\"Vec\", |b|b.iter_custom(|iters| bench_vector(iters)));\n\n group.bench_function(\"Deque\", |b|b.iter_custom(|iters| bench_deque(iters)));\n\n}\n\n\n\ncriterion_group!(benches, read_empty_event_benchmark);\n\ncriterion_main!(benches);", "file_path": "benches/read_empty_bench.rs", "rank": 49, "score": 59676.28725074942 }, { "content": "pub fn mt_read_event_benchmark(c: &mut Criterion) {\n\n fn bench(group: &mut BenchmarkGroup<WallTime>, id: &str, mut f: impl FnMut() -> Duration) {\n\n group.bench_function(id, |b| b.iter_custom(|iters| {\n\n let mut total = Duration::ZERO;\n\n for _ in 0..iters {\n\n total += f();\n\n }\n\n total\n\n }));\n\n }\n\n\n\n let mut test_group = |name: &str, readers_start_offset_step: usize, read_session_size: usize, threads_count: usize|{\n\n let mut group = c.benchmark_group(name);\n\n\n\n bench(&mut group, \"chunk:32\", ||{\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 32;\n\n const MAX_CHUNK_SIZE: u32 = 32;\n\n const CLEANUP: CleanupMode = CleanupMode::Never;\n\n }\n", "file_path": "benches/mt_read_bench.rs", "rank": 50, "score": 59676.28725074942 }, { "content": "pub trait Settings{\n\n const MIN_CHUNK_SIZE : u32;\n\n const MAX_CHUNK_SIZE : u32;\n\n const CLEANUP : CleanupMode;\n\n\n\n // for spmc/mpmc\n\n /// Lock on new chunk cleanup event. Will dead-lock if already locked.\n\n const LOCK_ON_NEW_CHUNK_CLEANUP: bool;\n\n /// Call cleanup on unsubscribe?\n\n const CLEANUP_IN_UNSUBSCRIBE: bool;\n\n}\n\n\n\npub struct List<T, S: Settings>{\n\n first: *mut DynamicChunk<T, S>,\n\n last : *mut DynamicChunk<T, S>,\n\n chunk_id_counter: usize,\n\n total_capacity: usize,\n\n\n\n readers_count: u32,\n\n\n", "file_path": "src/event_queue.rs", "rank": 51, "score": 57248.164846492815 }, { "content": "pub fn mt_read_write_event_benchmark(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"mpmc mt read write\");\n\n for session_size in [4, 8, 16, 32, 128, 512 as usize]{\n\n group.bench_with_input(\n\n BenchmarkId::new(\"mpmc::EventQueue extend\", session_size),\n\n &session_size,\n\n |b, input| b.iter_custom(|iters| {\n\n let session_len = *input;\n\n let f = move |event: &ArcEvent, from: usize, to: usize|{\n\n write_extend(session_len, event, from, to);\n\n };\n\n bench_event_read_write(iters, f)\n\n }));\n\n }\n\n\n\n #[inline(always)]\n\n fn write_push(event: &ArcEvent, from: usize, to: usize){\n\n for i in from..to{\n\n event.push(black_box(i));\n\n }\n", "file_path": "benches/mpmc_mt_read_write_bench.rs", "rank": 53, "score": 55744.44778873729 }, { "content": "#[test]\n\nfn clear_test() {\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 4;\n\n const MAX_CHUNK_SIZE: u32 = 4;\n\n const CLEANUP: CleanupMode = DefaultSettings::CLEANUP;\n\n }\n\n\n\n let event = EventQueue::<usize, S>::new();\n\n let mut reader = EventReader::new(&event);\n\n\n\n event.push(0);\n\n event.push(1);\n\n event.push(2);\n\n event.push(3);\n\n\n\n event.clear();\n\n assert!(reader.iter().next().is_none());\n\n\n\n event.push(4);\n\n event.push(5);\n\n assert_equal(\n\n consume_copies(&mut reader.iter()),\n\n [4, 5 as usize]\n\n );\n\n}\n\n\n", "file_path": "src/tests/mpmc.rs", "rank": 54, "score": 49325.49817706735 }, { "content": "#[test]\n\nfn extend_test() {\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 8;\n\n const MAX_CHUNK_SIZE: u32 = 8;\n\n const CLEANUP: CleanupMode = DefaultSettings::CLEANUP;\n\n }\n\n\n\n let event = EventQueue::<usize, S>::new();\n\n let mut reader = EventReader::new(&event);\n\n\n\n let len =\n\n if cfg!(miri){ 1000 } else { 100000 };\n\n let rng : Range<usize> = 0..len;\n\n\n\n event.extend(rng.clone());\n\n\n\n assert_eq!(\n\n consume_copies(&mut reader.iter()).iter().sum::<usize>(),\n\n rng.sum()\n\n );\n\n}\n\n\n", "file_path": "src/tests/mpmc.rs", "rank": 55, "score": 49325.49817706735 }, { "content": "#[test]\n\nfn basic_test(){\n\n let mut event = EventQueue::<usize>::new();\n\n let mut reader1 = EventReader::new(&mut event);\n\n\n\n event.push(1);\n\n event.extend(2..5);\n\n\n\n assert_equal( consume_copies(&mut reader1.iter()), [1,2,3,4 as usize]);\n\n}\n\n\n", "file_path": "src/tests/spmc.rs", "rank": 56, "score": 49325.49817706735 }, { "content": "#[test]\n\n#[cfg(not(miri))]\n\nfn compile_test() {\n\n run_mode(\"compile-fail\");\n\n}", "file_path": "tests/compile-tests.rs", "rank": 57, "score": 49325.49817706735 }, { "content": "#[test]\n\n#[allow(unused_assignments)]\n\nfn push_drop_test() {\n\n let destruct_counter = AtomicUsize::new(0);\n\n let destruct_counter_ref = &destruct_counter;\n\n let on_destroy = ||{destruct_counter_ref.fetch_add(1, Ordering::Relaxed);};\n\n\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 4;\n\n const MAX_CHUNK_SIZE: u32 = 4;\n\n const CLEANUP: CleanupMode = DefaultSettings::CLEANUP;\n\n }\n\n\n\n let mut reader_option : Option<_> = None;\n\n {\n\n let chunk_list = EventQueue::<_, S>::new();\n\n reader_option = Option::Some(EventReader::new(&chunk_list));\n\n\n\n chunk_list.push(Data::from(0, on_destroy));\n\n chunk_list.push(Data::from(1, on_destroy));\n\n chunk_list.push(Data::from(2, on_destroy));\n\n chunk_list.push(Data::from(3, on_destroy));\n", "file_path": "src/tests/mpmc.rs", "rank": 58, "score": 48242.809288801654 }, { "content": "#[test]\n\nfn uninit_test(){\n\n let data_destruct_order = RefCell::new(Vec::new());\n\n let on_destroy = |i:usize|{\n\n data_destruct_order.borrow_mut().push(i);\n\n };\n\n\n\n let header_destruct_counter = RefCell::new(0);\n\n let on_header_destroy = |_|{\n\n *header_destruct_counter.borrow_mut() += 1;\n\n };\n\n\n\n let fla = unsafe {\n\n &mut *DynamicArray::<Header<_>, Data<_>>::construct_uninit(\n\n Header { i: 100, on_destroy: on_header_destroy },\n\n 8\n\n )\n\n };\n\n\n\n unsafe { fla.write_at(1, Data { i: 800, on_destroy }); }\n\n\n", "file_path": "src/dynamic_array/test.rs", "rank": 59, "score": 48242.809288801654 }, { "content": "#[test]\n\n#[cfg(any(not(miri), not(target_os = \"windows\")))]\n\nfn mt_read_test() {\n\n for _ in 0..10{\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 512;\n\n const MAX_CHUNK_SIZE: u32 = 512;\n\n const CLEANUP: CleanupMode = DefaultSettings::CLEANUP;\n\n }\n\n mt_read_test_impl::<S>(4, if cfg!(miri){ 1000 } else { 1000000 });\n\n }\n\n}\n\n\n", "file_path": "src/tests/mpmc.rs", "rank": 60, "score": 48242.809288801654 }, { "content": "#[test]\n\nfn huge_push_test() {\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 4;\n\n const MAX_CHUNK_SIZE: u32 = 4;\n\n const CLEANUP: CleanupMode = DefaultSettings::CLEANUP;\n\n }\n\n\n\n let event = EventQueue::<usize, S>::new();\n\n let mut reader = EventReader::new(&event);\n\n\n\n let len =\n\n if cfg!(miri){ 1000 } else { 100000 };\n\n\n\n for i in 0..len{\n\n event.push(i);\n\n }\n\n\n\n consume_copies(&mut reader.iter());\n\n}\n\n\n", "file_path": "src/tests/mpmc.rs", "rank": 61, "score": 48242.809288801654 }, { "content": "#[test]\n\nfn default_test(){\n\n let data_destruct_order = RefCell::new(Vec::new());\n\n let on_destroy = |i:usize|{\n\n data_destruct_order.borrow_mut().push(i);\n\n };\n\n\n\n let header_destruct_counter = RefCell::new(0);\n\n let on_header_destroy = |_|{\n\n *header_destruct_counter.borrow_mut() += 1;\n\n };\n\n\n\n\n\n let fla = unsafe{\n\n &mut *DynamicArray::<Header<_>, Data<_>>::construct(\n\n Header { i: 100, on_destroy: on_header_destroy },\n\n Data { i: 0, on_destroy },\n\n 4\n\n )\n\n };\n\n\n", "file_path": "src/dynamic_array/test.rs", "rank": 62, "score": 48242.809288801654 }, { "content": "#[test]\n\n#[cfg(any(not(miri), not(target_os = \"windows\")))]\n\nfn mt_write_read_test() {\n\nfor _ in 0..if cfg!(miri){10} else {100} {\n\n let writer_chunk = if cfg!(miri){ 1000 } else { 10000 };\n\n let writers_thread_count = 2;\n\n let readers_thread_count = 4;\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 32;\n\n const MAX_CHUNK_SIZE: u32 = 32;\n\n const CLEANUP: CleanupMode = DefaultSettings::CLEANUP;\n\n }\n\n\n\n let event = EventQueue::<[usize;4], S>::new();\n\n\n\n let mut readers = Vec::new();\n\n for _ in 0..readers_thread_count{\n\n readers.push(EventReader::new(&event));\n\n }\n\n\n\n // etalon\n\n let sum0: usize = (0..writers_thread_count*writer_chunk).map(|i|i+0).sum();\n", "file_path": "src/tests/mpmc.rs", "rank": 63, "score": 47238.42808742309 }, { "content": "#[test]\n\n#[cfg(any(not(miri), not(target_os = \"windows\")))]\n\nfn mt_write_read_test() {\n\nfor _ in 0..if cfg!(miri){10} else {100} {\n\n let queue_size = if cfg!(miri){ 1000 } else { 10000 };\n\n let readers_thread_count = 4;\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 32;\n\n const MAX_CHUNK_SIZE: u32 = 32;\n\n }\n\n\n\n let mut event = EventQueue::<[usize;4], S>::new();\n\n\n\n let mut readers = Vec::new();\n\n for _ in 0..readers_thread_count{\n\n readers.push(EventReader::new(&mut event));\n\n }\n\n\n\n // etalon\n\n let sum0: usize = (0..queue_size).map(|i|i+0).sum();\n\n let sum1: usize = (0..queue_size).map(|i|i+1).sum();\n\n let sum2: usize = (0..queue_size).map(|i|i+2).sum();\n", "file_path": "src/tests/spmc.rs", "rank": 64, "score": 47238.42808742309 }, { "content": "#[test]\n\n#[cfg(any(not(miri), not(target_os = \"windows\")))]\n\nfn mt_push_truncate_test() {\n\nfor _ in 0..if cfg!(miri){1} else {100}{\n\n struct S{} impl Settings for S{\n\n const MAX_CHUNK_SIZE: u32 = 256;\n\n }\n\n\n\n let event = EventQueue::<usize, S>::new();\n\n\n\n let mut readers = Vec::new();\n\n for _ in 0..2{\n\n readers.push(EventReader::new(&event));\n\n }\n\n\n\n let writer_thread = {\n\n let event = event.clone();\n\n Box::new(thread::spawn(move || {\n\n for i in 0..10000{\n\n event.push(i);\n\n }\n\n }))\n", "file_path": "src/tests/mpmc.rs", "rank": 65, "score": 47238.42808742309 }, { "content": "#[test]\n\nfn read_on_full_chunk_test() {\n\n let destruct_counter = AtomicUsize::new(0);\n\n let destruct_counter_ref = &destruct_counter;\n\n let on_destroy = ||{destruct_counter_ref.fetch_add(1, Ordering::Relaxed);};\n\n\n\n {\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 4;\n\n const MAX_CHUNK_SIZE: u32 = 4;\n\n const CLEANUP: CleanupMode = DefaultSettings::CLEANUP;\n\n }\n\n\n\n let chunk_list = EventQueue::<_, S>::new();\n\n let mut reader = EventReader::new(&chunk_list);\n\n\n\n chunk_list.push(Data::from(0, on_destroy));\n\n chunk_list.push(Data::from(1, on_destroy));\n\n chunk_list.push(Data::from(2, on_destroy));\n\n chunk_list.push(Data::from(3, on_destroy));\n\n\n", "file_path": "src/tests/mpmc.rs", "rank": 66, "score": 47238.42808742309 }, { "content": "#[test]\n\nfn loom_mt_read_test(){\n\n loom::model(|| {\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 4;\n\n const MAX_CHUNK_SIZE: u32 = 4;\n\n const CLEANUP: CleanupMode = DefaultSettings::CLEANUP;\n\n }\n\n mt_read_test_impl::<S>(3, 7);\n\n });\n\n}\n\n\n", "file_path": "src/tests/loom_test.rs", "rank": 67, "score": 46304.15543727886 }, { "content": "fn main() {\n\n let event = EventQueue::<usize>::new();\n\n let mut reader = EventReader::new(&event);\n\n\n\n event.extend(0..10);\n\n\n\n let v = 100;\n\n let mut i: &usize = &v;\n\n {\n\n let mut iter = reader.iter();\n\n let item = iter.next().unwrap(); //~ ERROR `iter` does not live long enough\n\n i = item;\n\n }\n\n assert_eq!(*i, 100);\n\n}", "file_path": "tests/compile-tests/compile-fail/iter_lifetimes.rs", "rank": 68, "score": 46304.15543727886 }, { "content": "#[test]\n\nfn loom_mt_write_read_test(){\n\n // Use Condvar, instead of AtomicBool flag.\n\n // Not the same, but at least loom can handle it.\n\n loom::model(|| {\n\n let writer_chunk: usize = 3;\n\n let writers_thread_count: usize = 1; //should be 2 writers, instead of 1, but loom does not support >4 threads\n\n let readers_thread_count: usize = 2;\n\n\n\n struct S{} impl Settings for S{\n\n const MIN_CHUNK_SIZE: u32 = 4;\n\n const MAX_CHUNK_SIZE: u32 = 4;\n\n const CLEANUP: CleanupMode = DefaultSettings::CLEANUP;\n\n }\n\n\n\n let event = EventQueue::<[usize;4], S>::new();\n\n\n\n let mut readers = Vec::new();\n\n for _ in 0..readers_thread_count{\n\n readers.push(EventReader::new(&event));\n\n }\n", "file_path": "src/tests/loom_test.rs", "rank": 69, "score": 45432.89824447573 }, { "content": "fn run_mode(mode: &'static str) {\n\n let mut config = compiletest::Config::default();\n\n\n\n config.mode = mode.parse().expect(\"Invalid mode\");\n\n config.src_base = PathBuf::from(format!(\"tests/compile-tests/{}\", mode));\n\n config.target_rustcflags = Some(\"-L target/debug -L target/debug/deps\".to_string());\n\n config.clean_rmeta(); // If your tests import the parent crate, this helps with E0464\n\n\n\n compiletest::run_tests(&config);\n\n}\n\n\n\n// If this test fail - try `cargo clean` first.\n", "file_path": "tests/compile-tests.rs", "rank": 70, "score": 41265.47822899639 }, { "content": "pub trait Settings{\n\n const MIN_CHUNK_SIZE : u32 = 4;\n\n const MAX_CHUNK_SIZE : u32 = 4096;\n\n const CLEANUP: CleanupMode = CleanupMode::OnChunkRead;\n\n}\n\n\n\npub struct DefaultSettings{}\n\nimpl Settings for DefaultSettings{}\n\n\n\n/// mpmc::Settings -> event_queue::Settings\n\npub(crate) struct BS<S: Settings>{\n\n _phantom: PhantomData<S>\n\n}\n\nimpl<S: Settings> BaseSettings for BS<S>{\n\n const MIN_CHUNK_SIZE : u32 = S::MIN_CHUNK_SIZE;\n\n const MAX_CHUNK_SIZE : u32 = S::MAX_CHUNK_SIZE;\n\n const CLEANUP: CleanupMode = S::CLEANUP;\n\n const LOCK_ON_NEW_CHUNK_CLEANUP: bool = false;\n\n const CLEANUP_IN_UNSUBSCRIBE: bool = true;\n\n}", "file_path": "src/mpmc/mod.rs", "rank": 71, "score": 30065.951762885055 }, { "content": "pub trait Settings{\n\n const MIN_CHUNK_SIZE : u32 = 4;\n\n const MAX_CHUNK_SIZE : u32 = 4096;\n\n const CLEANUP: CleanupMode = CleanupMode::OnNewChunk;\n\n}\n\n\n\npub struct DefaultSettings{}\n\nimpl Settings for DefaultSettings{}\n\n\n\n/// spmc::Settings -> event_queue::Settings\n\npub(crate) struct BS<S: Settings>{\n\n _phantom: PhantomData<S>\n\n}\n\nimpl<S: Settings> BaseSettings for BS<S>{\n\n const MIN_CHUNK_SIZE : u32 = S::MIN_CHUNK_SIZE;\n\n const MAX_CHUNK_SIZE : u32 = S::MAX_CHUNK_SIZE;\n\n const CLEANUP: CleanupMode = S::CLEANUP;\n\n const LOCK_ON_NEW_CHUNK_CLEANUP: bool = true;\n\n const CLEANUP_IN_UNSUBSCRIBE: bool = false;\n\n}", "file_path": "src/spmc/mod.rs", "rank": 72, "score": 30065.951762885055 }, { "content": "// Chunk's read_completely_times updated on Iter::Drop\n\n//\n\n// Chunk's iteration synchronization occurs around [ChunkStorage::storage_len] acquire/release access\n\n//\n\n\n\nuse crate::sync::Ordering;\n\nuse std::ptr::{NonNull};\n\nuse crate::event_queue::{CleanupMode, EventQueue, foreach_chunk, Settings};\n\nuse std::ops::ControlFlow::{Continue};\n\nuse crate::cursor::Cursor;\n\nuse crate::chunk_state::{PackedChunkState};\n\nuse crate::StartPositionEpoch;\n\n\n\npub struct EventReader<T, S: Settings>\n\n{\n\n pub(super) position: Cursor<T, S>,\n\n pub(super) start_position_epoch: StartPositionEpoch,\n\n}\n\n\n\nunsafe impl<T, S: Settings> Send for EventReader<T, S>{}\n", "file_path": "src/event_reader.rs", "rank": 73, "score": 29496.104122519653 }, { "content": "//unsafe impl<T, S: Settings> Send for EventQueue<T, S>{}\n\n//unsafe impl<T, S: Settings> Sync for EventQueue<T, S>{}\n\n\n\nimpl<T, S: Settings> EventQueue<T, S>\n\n{\n\n pub fn with_capacity(new_capacity: u32) -> Pin<Arc<Self>>{\n\n assert!(S::MIN_CHUNK_SIZE <= new_capacity && new_capacity <= S::MAX_CHUNK_SIZE);\n\n\n\n let this = Arc::pin(Self{\n\n list: Mutex::new(List{\n\n first: null_mut(),\n\n last: null_mut(),\n\n chunk_id_counter: 0,\n\n readers_count:0,\n\n total_capacity:new_capacity as usize,\n\n penult_chunk_size : 0,\n\n\n\n #[cfg(feature = \"double_buffering\")]\n\n free_chunk: None,\n\n }),\n", "file_path": "src/event_queue.rs", "rank": 74, "score": 29493.740240356055 }, { "content": " |mut_chunk_ptr| func(&mut *mut_chunk_ptr)\n\n );\n\n}\n\n\n\n/// end_chunk_ptr may be null\n\n#[inline(always)]\n\npub(super) unsafe fn foreach_chunk_ptr_mut<T, F, S: Settings>\n\n(\n\n start_chunk_ptr : *mut DynamicChunk<T, S>,\n\n end_chunk_ptr : *const DynamicChunk<T, S>,\n\n load_ordering : Ordering,\n\n mut func : F\n\n)\n\n where F: FnMut(*mut DynamicChunk<T, S>) -> ControlFlow<()>\n\n{\n\n debug_assert!(!start_chunk_ptr.is_null());\n\n debug_assert!(\n\n end_chunk_ptr.is_null()\n\n ||\n\n std::ptr::eq((*start_chunk_ptr).event(), (*end_chunk_ptr).event())\n", "file_path": "src/event_queue.rs", "rank": 75, "score": 29493.46360714558 }, { "content": " }\n\n );\n\n }\n\n }\n\n\n\n pub fn clear(&self, list: &mut List<T, S>){\n\n let last_chunk = unsafe{ &*list.last };\n\n let last_chunk_len = last_chunk.chunk_state(Ordering::Relaxed).len() as usize;\n\n\n\n self.set_start_position(list, Cursor {\n\n chunk: last_chunk,\n\n index: last_chunk_len\n\n });\n\n\n\n self.force_cleanup_impl(list);\n\n }\n\n\n\n pub fn truncate_front(&self, list: &mut List<T, S>, len: usize) {\n\n // make chunks* array\n\n\n", "file_path": "src/event_queue.rs", "rank": 76, "score": 29492.829895961604 }, { "content": " }\n\n }\n\n\n\n let chunk = unsafe{&*self.position.chunk};\n\n let value = unsafe { chunk.get_unchecked(self.position.index) };\n\n self.position.index += 1;\n\n\n\n Some(value)\n\n }\n\n}\n\n\n\nimpl<'a, T, S: Settings> Drop for Iter<'a, T, S>{\n\n #[inline]\n\n fn drop(&mut self) {\n\n let try_cleanup = S::CLEANUP == CleanupMode::OnChunkRead; // should be const\n\n\n\n debug_assert!(self.position >= self.event_reader.position);\n\n let mut need_cleanup = false;\n\n\n\n let first_chunk = self.event_reader.position.chunk;\n", "file_path": "src/event_reader.rs", "rank": 77, "score": 29492.340380399884 }, { "content": " node_ptr = node.next(Ordering::Relaxed);\n\n DynamicChunk::destruct(node);\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[inline(always)]\n\npub(super) unsafe fn foreach_chunk<T, F, S: Settings>\n\n(\n\n start_chunk_ptr : *const DynamicChunk<T, S>,\n\n end_chunk_ptr : *const DynamicChunk<T, S>,\n\n load_ordering : Ordering,\n\n mut func : F\n\n)\n\n where F: FnMut(&DynamicChunk<T, S>) -> ControlFlow<()>\n\n{\n\n foreach_chunk_mut(\n\n start_chunk_ptr as *mut _,\n\n end_chunk_ptr,\n", "file_path": "src/event_queue.rs", "rank": 78, "score": 29492.26766506231 }, { "content": " load_ordering,\n\n |mut_chunk| func(mut_chunk)\n\n );\n\n}\n\n\n\n/// end_chunk_ptr may be null\n\n#[inline(always)]\n\npub(super) unsafe fn foreach_chunk_mut<T, F, S: Settings>\n\n(\n\n start_chunk_ptr : *mut DynamicChunk<T, S>,\n\n end_chunk_ptr : *const DynamicChunk<T, S>,\n\n load_ordering : Ordering,\n\n mut func : F\n\n)\n\n where F: FnMut(&mut DynamicChunk<T, S>) -> ControlFlow<()>\n\n{\n\n foreach_chunk_ptr_mut(\n\n start_chunk_ptr,\n\n end_chunk_ptr,\n\n load_ordering,\n", "file_path": "src/event_queue.rs", "rank": 79, "score": 29492.23725524599 }, { "content": " prev_chunk = chunk;\n\n return Continue(());\n\n }\n\n\n\n let next_chunk_ptr = (*chunk).next(Ordering::Relaxed);\n\n debug_assert!(!next_chunk_ptr.is_null());\n\n\n\n (*prev_chunk).set_next(next_chunk_ptr, Ordering::Release);\n\n drop(lock);\n\n\n\n self.free_chunk::<false>(chunk, list);\n\n Continue(())\n\n }\n\n );\n\n }\n\n }\n\n\n\n pub fn cleanup(&self){\n\n self.cleanup_impl(&mut *self.list.lock());\n\n }\n", "file_path": "src/event_queue.rs", "rank": 80, "score": 29492.171496862113 }, { "content": " let chunk_readers = chunk.readers_entered().load(Ordering::Acquire);\n\n if read >= chunk_readers {\n\n need_cleanup = true;\n\n }\n\n }\n\n }\n\n\n\n Continue(())\n\n }\n\n );\n\n }\n\n\n\n // Cleanup (optional)\n\n if try_cleanup {\n\n if need_cleanup{\n\n unsafe{&*end_chunk}.event().cleanup();\n\n }\n\n }\n\n\n\n // 2. Update EventReader chunk+index\n\n self.event_reader.position = self.position;\n\n }\n\n}", "file_path": "src/event_reader.rs", "rank": 81, "score": 29491.252370415026 }, { "content": "\n\n let mut total_len = 0;\n\n for i in (0..chunks_count).rev(){\n\n let chunk = unsafe{ &*chunks[i] };\n\n let chunk_len = chunk.chunk_state(Ordering::Relaxed).len() as usize;\n\n total_len += chunk_len;\n\n if total_len >= len{\n\n let new_start_position = Cursor {\n\n chunk: chunks[i],\n\n index: total_len - len\n\n };\n\n // Do we actually need to truncate?\n\n if let Some(start_position) = unsafe{*self.start_position.as_mut_ptr()}{\n\n if start_position >= new_start_position{\n\n return;\n\n }\n\n }\n\n\n\n self.set_start_position(list, new_start_position);\n\n self.force_cleanup_impl(list);\n", "file_path": "src/event_queue.rs", "rank": 82, "score": 29491.1686957271 }, { "content": " position: event_reader.position,\n\n chunk_state,\n\n event_reader,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, T, S: Settings> LendingIterator for Iter<'a, T, S>{\n\n type ItemValue = T;\n\n\n\n #[inline]\n\n fn next(&mut self) -> Option<&Self::ItemValue> {\n\n if /*unlikely*/ self.position.index as u32 == self.chunk_state.len(){\n\n // should try next chunk?\n\n if !self.chunk_state.has_next(){\n\n return None;\n\n }\n\n\n\n // acquire next chunk\n\n let next_chunk = unsafe{\n", "file_path": "src/event_reader.rs", "rank": 83, "score": 29490.97385929421 }, { "content": " }\n\n );\n\n }\n\n if list.first == list.last{\n\n list.penult_chunk_size = 0;\n\n }\n\n }\n\n\n\n /// This will traverse up to the start_point - and will free all unoccupied chunks. (out-of-order cleanup)\n\n /// This one slower then cleanup_impl.\n\n fn force_cleanup_impl(&self, list: &mut List<T, S>){\n\n self.cleanup_impl(list);\n\n\n\n // Lock start_position permanently, due to out of order chunk destruction.\n\n // Reader can try enter in the chunk in the middle of force_cleanup execution.\n\n let start_position = self.start_position.lock();\n\n let terminal_chunk = match &*start_position{\n\n None => { return; }\n\n Some(cursor) => {cursor.chunk}\n\n };\n", "file_path": "src/event_queue.rs", "rank": 84, "score": 29490.896040603784 }, { "content": " // 2. Mark current chunk read\n\n let chunk = unsafe{&*self.position.chunk};\n\n if /*constexpr*/ S::CLEANUP == CleanupMode::OnChunkRead {\n\n let event = chunk.event();\n\n let readers_entered = chunk.readers_entered().load(Ordering::Acquire);\n\n\n\n // MORE or equal, just in case (this MT...). This check is somewhat opportunistic.\n\n let prev_read = chunk.read_completely_times().fetch_add(1, Ordering::AcqRel);\n\n if prev_read+1 >= readers_entered{\n\n drop(start_position_lock);\n\n event.cleanup();\n\n }\n\n } else {\n\n chunk.read_completely_times().fetch_add(1, Ordering::AcqRel);\n\n }\n\n\n\n // 3. Change position\n\n self.position = start_position;\n\n }\n\n }\n", "file_path": "src/event_reader.rs", "rank": 85, "score": 29490.490017528013 }, { "content": "\n\nimpl<T, S: Settings> EventReader<T, S>\n\n{\n\n // Have much better performance being non-inline. Occurs rarely.\n\n // This is the only reason this code - is a function.\n\n #[inline(never)]\n\n #[cold]\n\n fn do_update_start_position_and_get_chunk_state(&mut self) -> PackedChunkState {\n\n let event = unsafe{(*self.position.chunk).event()};\n\n\n\n // fast forward\n\n {\n\n let start_position_lock = event.start_position.lock();\n\n if let Some(start_position) = *start_position_lock{\n\n if self.position < start_position {\n\n\n\n // 1. Enter new_position chunk\n\n let new_chunk = unsafe{&*start_position.chunk};\n\n new_chunk.readers_entered().fetch_add(1, Ordering::AcqRel);\n\n\n", "file_path": "src/event_reader.rs", "rank": 86, "score": 29489.93851563818 }, { "content": " list.chunk_id_counter,\n\n epoch) }\n\n }, None => unsafe { std::hint::unreachable_unchecked() },\n\n }\n\n } else {\n\n // TODO: try free in cleanup somehow\n\n list.free_chunk = None;\n\n }\n\n }\n\n\n\n if new_node.is_null(){\n\n new_node = DynamicChunk::<T, S>::construct(list.chunk_id_counter, epoch, self, size);\n\n }\n\n new_node\n\n };\n\n\n\n // connect\n\n node.set_next(new_node, Ordering::Release);\n\n list.last = new_node;\n\n list.penult_chunk_size = node.capacity() as u32;\n", "file_path": "src/event_queue.rs", "rank": 87, "score": 29489.4526103895 }, { "content": "#[cfg(not(loom))]\n\n#[cfg(test)]\n\nmod test;\n\n\n\nuse crate::sync::{Ordering};\n\nuse crate::sync::{Mutex, Arc};\n\nuse crate::sync::{SpinMutex};\n\n\n\nuse std::ptr::{null_mut, null, NonNull};\n\nuse crate::event_reader::EventReader;\n\nuse std::ops::ControlFlow;\n\nuse std::ops::ControlFlow::{Continue, Break};\n\nuse std::marker::PhantomPinned;\n\nuse std::pin::Pin;\n\nuse crate::cursor::Cursor;\n\nuse crate::dynamic_chunk::{DynamicChunk};\n\n#[cfg(feature = \"double_buffering\")]\n\nuse crate::dynamic_chunk::{DynamicChunkRecycled};\n\nuse crate::{StartPositionEpoch};\n\n\n", "file_path": "src/event_queue.rs", "rank": 88, "score": 29489.435930965104 }, { "content": " if list.first as *const _ == terminal_chunk{\n\n return;\n\n }\n\n unsafe {\n\n // cleanup_impl dealt with first chunk before. Omit.\n\n let mut prev_chunk = list.first;\n\n // using _ptr version, because with &chunk - reference should be valid during whole\n\n // lambda function call. (according to miri and some rust borrowing rules).\n\n // And we actually drop that chunk.\n\n foreach_chunk_ptr_mut(\n\n (*list.first).next(Ordering::Relaxed),\n\n terminal_chunk,\n\n Ordering::Relaxed, // we're under mutex\n\n |chunk| {\n\n // We need to lock only `prev_chunk`, because it is impossible\n\n // to get in `chunk` omitting chunk.readers_entered+1\n\n let lock = (*prev_chunk).chunk_switch_mutex().write();\n\n let chunk_readers = (*chunk).readers_entered().load(Ordering::Acquire);\n\n let chunk_read_times = (*chunk).read_completely_times().load(Ordering::Acquire);\n\n if chunk_readers != chunk_read_times {\n", "file_path": "src/event_queue.rs", "rank": 89, "score": 29489.22670750961 }, { "content": " }\n\n\n\n/*\n\n // chunks_count can be atomic. But does that needed?\n\n pub fn chunks_count(&self) -> usize {\n\n let list = self.list.lock();\n\n unsafe{\n\n list.chunk_id_counter/*(*list.last).id*/ - (*list.first).id() + 1\n\n }\n\n }*/\n\n}\n\n\n\nimpl<T, S: Settings> Drop for EventQueue<T, S>{\n\n fn drop(&mut self) {\n\n let list = self.list.get_mut();\n\n debug_assert!(list.readers_count == 0);\n\n unsafe{\n\n let mut node_ptr = list.first;\n\n while node_ptr != null_mut() {\n\n let node = &mut *node_ptr;\n", "file_path": "src/event_queue.rs", "rank": 90, "score": 29489.218549389898 }, { "content": " if free_chunk.capacity() >= (*chunk).capacity() {\n\n // Discard - recycled chunk bigger then our\n\n DynamicChunk::destruct(chunk);\n\n return;\n\n }\n\n }\n\n // Replace free_chunk with our.\n\n list.free_chunk = Some(DynamicChunk::recycle(chunk));\n\n }\n\n }\n\n\n\n fn cleanup_impl(&self, list: &mut List<T, S>){\n\n unsafe {\n\n // using _ptr version, because with &chunk - reference should be valid during whole\n\n // lambda function call. (according to miri and some rust borrowing rules).\n\n // And we actually drop that chunk.\n\n foreach_chunk_ptr_mut(\n\n list.first,\n\n list.last,\n\n Ordering::Relaxed, // we're under mutex\n", "file_path": "src/event_queue.rs", "rank": 91, "score": 29489.16898008383 }, { "content": " start_position: SpinMutex::new(None),\n\n _pinned: PhantomPinned,\n\n });\n\n\n\n let node = DynamicChunk::<T, S>::construct(\n\n 0, StartPositionEpoch::zero(), &*this, new_capacity as usize);\n\n\n\n unsafe {\n\n let event = &mut *(&*this as *const _ as *mut EventQueue<T, S>);\n\n event.list.get_mut().first = node;\n\n event.list.get_mut().last = node;\n\n }\n\n\n\n this\n\n }\n\n\n\n #[inline]\n\n fn add_chunk_sized(&self, list: &mut List<T, S>, size: usize) -> &mut DynamicChunk<T, S>{\n\n let node = unsafe{&mut *list.last};\n\n let epoch = node.chunk_state(Ordering::Relaxed).epoch();\n", "file_path": "src/event_queue.rs", "rank": 92, "score": 29488.87290267698 }, { "content": " // Enter chunk\n\n last_chunk.readers_entered().fetch_add(1, Ordering::AcqRel);\n\n\n\n EventReader{\n\n position: Cursor{chunk: last_chunk, index: chunk_state.len() as usize},\n\n start_position_epoch: chunk_state.epoch()\n\n }\n\n }\n\n\n\n // Called from EventReader Drop\n\n //\n\n // `this_ptr` instead of `&self`, because `&self` as reference should be valid during\n\n // function call. And we drop it sometimes.... through `Arc::decrement_strong_count`.\n\n pub(crate) fn unsubscribe(this_ptr: NonNull<Self>, event_reader: &EventReader<T, S>){\n\n let this = unsafe { this_ptr.as_ref() };\n\n let mut list = this.list.lock();\n\n\n\n // Exit chunk\n\n unsafe{&*event_reader.position.chunk}.read_completely_times().fetch_add(1, Ordering::AcqRel);\n\n\n", "file_path": "src/event_queue.rs", "rank": 93, "score": 29488.707944545185 }, { "content": " #[inline]\n\n pub fn update_position(&mut self) {\n\n self.update_start_position_and_get_chunk_state();\n\n }\n\n\n\n // TODO: copy_iter() ?\n\n\n\n #[inline]\n\n pub fn iter(&mut self) -> Iter<T, S>{\n\n Iter::new(self)\n\n }\n\n}\n\n\n\nimpl<T, S: Settings> Drop for EventReader<T, S>{\n\n fn drop(&mut self) {\n\n unsafe {\n\n EventQueue::<T, S>::unsubscribe(\n\n NonNull::from((*self.position.chunk).event()),\n\n self\n\n );\n\n }\n\n }\n\n}\n\n\n\n/// This should be rust GAT iterator. But it does not exists yet.\n", "file_path": "src/event_reader.rs", "rank": 94, "score": 29488.686644692953 }, { "content": " if S::CLEANUP_IN_UNSUBSCRIBE && S::CLEANUP != CleanupMode::Never{\n\n if list.first as *const _ == event_reader.position.chunk {\n\n this.cleanup_impl(&mut *list);\n\n }\n\n }\n\n\n\n list.readers_count -= 1;\n\n if list.readers_count == 0{\n\n drop(list);\n\n // Safe to self-destruct\n\n unsafe { Arc::decrement_strong_count(this_ptr.as_ptr()); }\n\n }\n\n }\n\n\n\n unsafe fn free_chunk<const LOCK_ON_WRITE_START_POSITION: bool>(\n\n &self,\n\n chunk: *mut DynamicChunk<T, S>,\n\n list: &mut List<T, S>)\n\n {\n\n if let Some(start_position) = *self.start_position.as_mut_ptr(){\n", "file_path": "src/event_queue.rs", "rank": 95, "score": 29488.666445687064 }, { "content": " return;\n\n }\n\n }\n\n\n\n // len is bigger then total_len.\n\n // do nothing.\n\n }\n\n\n\n pub fn change_chunk_capacity(&self, list: &mut List<T, S>, new_capacity: u32){\n\n assert!(S::MIN_CHUNK_SIZE <= new_capacity && new_capacity <= S::MAX_CHUNK_SIZE);\n\n self.on_new_chunk_cleanup(list);\n\n self.add_chunk_sized(&mut *list, new_capacity as usize);\n\n }\n\n\n\n pub fn total_capacity(&self, list: &List<T, S>) -> usize {\n\n list.total_capacity\n\n }\n\n\n\n pub fn chunk_capacity(&self, list: &List<T, S>) -> usize {\n\n unsafe { (*list.last).capacity() }\n", "file_path": "src/event_queue.rs", "rank": 96, "score": 29488.20841354731 }, { "content": " list.total_capacity += size;\n\n\n\n unsafe{&mut *new_node}\n\n }\n\n\n\n #[inline]\n\n fn on_new_chunk_cleanup(&self, list: &mut List<T, S>){\n\n if S::CLEANUP == CleanupMode::OnNewChunk{\n\n // this should acts as compile-time-if.\n\n if S::LOCK_ON_NEW_CHUNK_CLEANUP{\n\n let _lock = self.list.lock();\n\n self.cleanup_impl(list);\n\n } else {\n\n self.cleanup_impl(list);\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n fn add_chunk(&self, list: &mut List<T, S>) -> &mut DynamicChunk<T, S>{\n", "file_path": "src/event_queue.rs", "rank": 97, "score": 29488.00667305742 }, { "content": "\n\n #[inline]\n\n fn set_start_position(\n\n &self,\n\n list: &mut List<T, S>,\n\n new_start_position: Cursor<T, S>)\n\n {\n\n *self.start_position.lock() = Some(new_start_position);\n\n\n\n // update len_and_start_position_epoch in each chunk\n\n let first_chunk = unsafe{&mut *list.first};\n\n let new_epoch = first_chunk.chunk_state(Ordering::Relaxed).epoch().increment();\n\n unsafe {\n\n foreach_chunk_mut(\n\n first_chunk,\n\n null(),\n\n Ordering::Relaxed, // we're under mutex\n\n |chunk| {\n\n chunk.set_epoch(new_epoch, Ordering::Relaxed, Ordering::Release);\n\n Continue(())\n", "file_path": "src/event_queue.rs", "rank": 98, "score": 29487.131705695683 }, { "content": " // add chunk and push value there\n\n node = self.add_chunk(&mut *list);\n\n unsafe{ node.push_unchecked(value, Ordering::Relaxed); }\n\n }\n\n };\n\n }\n\n }\n\n\n\n /// EventReader will start receive events from NOW.\n\n /// It will not see events that was pushed BEFORE subscription.\n\n pub fn subscribe(&self, list: &mut List<T, S>) -> EventReader<T, S>{\n\n if list.readers_count == 0{\n\n // Keep alive. Decrements in unsubscribe\n\n unsafe { Arc::increment_strong_count(self); }\n\n }\n\n list.readers_count += 1;\n\n\n\n let last_chunk = unsafe{&*list.last};\n\n let chunk_state = last_chunk.chunk_state(Ordering::Relaxed);\n\n\n", "file_path": "src/event_queue.rs", "rank": 99, "score": 29486.98060616317 } ]
Rust
eval/src/values/mod.rs
slowli/arithmetic-parser
3e01a6069ddea36740126c40386deea0b6dfaee7
use hashbrown::HashMap; use core::{ any::{type_name, Any}, fmt, }; use crate::{ alloc::{vec, Rc, String, Vec}, fns, }; use arithmetic_parser::{MaybeSpanned, StripCode}; mod env; mod function; mod ops; mod variable_map; pub use self::{ env::Environment, function::{CallContext, Function, InterpretedFn, NativeFn}, variable_map::{Assertions, Comparisons, Prelude, VariableMap}, }; #[derive(Debug, Clone, Copy, PartialEq)] #[non_exhaustive] pub enum ValueType { Prim, Bool, Function, Tuple(usize), Object, Array, Ref, } impl fmt::Display for ValueType { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Prim => formatter.write_str("primitive value"), Self::Bool => formatter.write_str("boolean value"), Self::Function => formatter.write_str("function"), Self::Tuple(1) => formatter.write_str("tuple with 1 element"), Self::Object => formatter.write_str("object"), Self::Tuple(size) => write!(formatter, "tuple with {} elements", size), Self::Array => formatter.write_str("array"), Self::Ref => formatter.write_str("reference"), } } } pub struct OpaqueRef { value: Rc<dyn Any>, type_name: &'static str, dyn_eq: fn(&dyn Any, &dyn Any) -> bool, dyn_fmt: fn(&dyn Any, &mut fmt::Formatter<'_>) -> fmt::Result, } #[allow(renamed_and_removed_lints, clippy::unknown_clippy_lints)] impl OpaqueRef { #[allow(clippy::missing_panics_doc)] pub fn new<T>(value: T) -> Self where T: Any + fmt::Debug + PartialEq, { Self { value: Rc::new(value), type_name: type_name::<T>(), dyn_eq: |this, other| { let this_cast = this.downcast_ref::<T>().unwrap(); other .downcast_ref::<T>() .map_or(false, |other_cast| other_cast == this_cast) }, dyn_fmt: |this, formatter| { let this_cast = this.downcast_ref::<T>().unwrap(); fmt::Debug::fmt(this_cast, formatter) }, } } #[allow(clippy::missing_panics_doc)] pub fn with_identity_eq<T>(value: T) -> Self where T: Any + fmt::Debug, { Self { value: Rc::new(value), type_name: type_name::<T>(), dyn_eq: |this, other| { let this_data = (this as *const dyn Any).cast::<()>(); let other_data = (other as *const dyn Any).cast::<()>(); this_data == other_data }, dyn_fmt: |this, formatter| { let this_cast = this.downcast_ref::<T>().unwrap(); fmt::Debug::fmt(this_cast, formatter) }, } } pub fn downcast_ref<T: Any>(&self) -> Option<&T> { self.value.downcast_ref() } } impl Clone for OpaqueRef { fn clone(&self) -> Self { Self { value: Rc::clone(&self.value), type_name: self.type_name, dyn_eq: self.dyn_eq, dyn_fmt: self.dyn_fmt, } } } impl PartialEq for OpaqueRef { fn eq(&self, other: &Self) -> bool { (self.dyn_eq)(self.value.as_ref(), other.value.as_ref()) } } impl fmt::Debug for OpaqueRef { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter .debug_tuple("OpaqueRef") .field(&self.value.as_ref()) .finish() } } impl fmt::Display for OpaqueRef { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { write!(formatter, "{}::", self.type_name)?; (self.dyn_fmt)(self.value.as_ref(), formatter) } } #[derive(Debug)] #[non_exhaustive] pub enum Value<'a, T> { Prim(T), Bool(bool), Function(Function<'a, T>), Tuple(Vec<Value<'a, T>>), Object(HashMap<String, Value<'a, T>>), Ref(OpaqueRef), } pub type SpannedValue<'a, T> = MaybeSpanned<'a, Value<'a, T>>; impl<'a, T> Value<'a, T> { pub fn native_fn(function: impl NativeFn<T> + 'static) -> Self { Self::Function(Function::Native(Rc::new(function))) } pub fn wrapped_fn<Args, F>(fn_to_wrap: F) -> Self where fns::FnWrapper<Args, F>: NativeFn<T> + 'static, { let wrapped = fns::wrap::<Args, _>(fn_to_wrap); Self::native_fn(wrapped) } pub(crate) fn interpreted_fn(function: InterpretedFn<'a, T>) -> Self { Self::Function(Function::Interpreted(Rc::new(function))) } pub fn void() -> Self { Self::Tuple(vec![]) } pub fn opaque_ref(value: impl Any + fmt::Debug + PartialEq) -> Self { Self::Ref(OpaqueRef::new(value)) } pub fn value_type(&self) -> ValueType { match self { Self::Prim(_) => ValueType::Prim, Self::Bool(_) => ValueType::Bool, Self::Function(_) => ValueType::Function, Self::Tuple(elements) => ValueType::Tuple(elements.len()), Self::Object(_) => ValueType::Object, Self::Ref(_) => ValueType::Ref, } } pub fn is_void(&self) -> bool { matches!(self, Self::Tuple(tuple) if tuple.is_empty()) } pub fn is_function(&self) -> bool { matches!(self, Self::Function(_)) } } impl<T: Clone> Clone for Value<'_, T> { fn clone(&self) -> Self { match self { Self::Prim(lit) => Self::Prim(lit.clone()), Self::Bool(bool) => Self::Bool(*bool), Self::Function(function) => Self::Function(function.clone()), Self::Tuple(tuple) => Self::Tuple(tuple.clone()), Self::Object(fields) => Self::Object(fields.clone()), Self::Ref(reference) => Self::Ref(reference.clone()), } } } impl<T: 'static + Clone> StripCode for Value<'_, T> { type Stripped = Value<'static, T>; fn strip_code(self) -> Self::Stripped { match self { Self::Prim(lit) => Value::Prim(lit), Self::Bool(bool) => Value::Bool(bool), Self::Function(function) => Value::Function(function.strip_code()), Self::Tuple(tuple) => { Value::Tuple(tuple.into_iter().map(StripCode::strip_code).collect()) } Self::Object(fields) => Value::Object( fields .into_iter() .map(|(name, value)| (name, value.strip_code())) .collect(), ), Self::Ref(reference) => Value::Ref(reference), } } } impl<'a, T: Clone> From<&Value<'a, T>> for Value<'a, T> { fn from(reference: &Value<'a, T>) -> Self { reference.clone() } } impl<T: PartialEq> PartialEq for Value<'_, T> { fn eq(&self, rhs: &Self) -> bool { match (self, rhs) { (Self::Prim(this), Self::Prim(other)) => this == other, (Self::Bool(this), Self::Bool(other)) => this == other, (Self::Tuple(this), Self::Tuple(other)) => this == other, (Self::Object(this), Self::Object(other)) => this == other, (Self::Function(this), Self::Function(other)) => this.is_same_function(other), (Self::Ref(this), Self::Ref(other)) => this == other, _ => false, } } } impl<T: fmt::Display> fmt::Display for Value<'_, T> { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Prim(value) => fmt::Display::fmt(value, formatter), Self::Bool(true) => formatter.write_str("true"), Self::Bool(false) => formatter.write_str("false"), Self::Ref(opaque_ref) => fmt::Display::fmt(opaque_ref, formatter), Self::Function(_) => formatter.write_str("[function]"), Self::Object(fields) => { formatter.write_str("#{ ")?; for (name, value) in fields.iter() { write!(formatter, "{} = {}; ", name, value)?; } formatter.write_str("}") } Self::Tuple(elements) => { formatter.write_str("(")?; for (i, element) in elements.iter().enumerate() { fmt::Display::fmt(element, formatter)?; if i + 1 < elements.len() { formatter.write_str(", ")?; } } formatter.write_str(")") } } } } #[cfg(test)] mod tests { use super::*; use core::cmp::Ordering; #[test] fn opaque_ref_equality() { let value = Value::<f32>::opaque_ref(Ordering::Less); let same_value = Value::<f32>::opaque_ref(Ordering::Less); assert_eq!(value, same_value); assert_eq!(value, value.clone()); let other_value = Value::<f32>::opaque_ref(Ordering::Greater); assert_ne!(value, other_value); } #[test] fn opaque_ref_formatting() { let value = OpaqueRef::new(Ordering::Less); assert_eq!(value.to_string(), "core::cmp::Ordering::Less"); } }
use hashbrown::HashMap; use core::{ any::{type_name, Any}, fmt, }; use crate::{ alloc::{vec, Rc, String, Vec}, fns, }; use arithmetic_parser::{MaybeSpanned, StripCode}; mod env; mod function; mod ops; mod variable_map; pub use self::{ env::Environment, function::{CallContext, Function, InterpretedFn, NativeFn}, variable_map::{Assertions, Comparisons, Prelude, VariableMap}, }; #[derive(Debug, Clone, Copy, PartialEq)] #[non_exhaustive] pub enum ValueType { Prim, Bool, Function, Tuple(usize), Object, Array, Ref, } impl fmt::Display for ValueType { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Prim => formatter.write_str("primitive value"), Self::Bool => formatter.write_str("boolean value"), Self::Function => formatter.write_str("function"), Self::Tuple(1) => formatter.write_str("tuple with 1 element"), Self::Object => formatter.write_str("object"), Self::Tuple(size) => write!(formatter, "tuple with {} elements", size), Self::Array => formatter.write_str("array"), Self::Ref => formatter.write_str("reference"), } } } pub struct OpaqueRef { value: Rc<dyn Any>, type_name: &'static str, dyn_eq: fn(&dyn Any, &dyn Any) -> bool, dyn_fmt: fn(&dyn Any, &mut fmt::Formatter<'_>) -> fmt::Result, } #[allow(renamed_and_removed_lints, clippy::unknown_clippy_lints)] impl OpaqueRef { #[allow(clippy::missing_panics_doc)] pub fn new<T>(value: T) -> Self where T: Any + fmt::Debug + PartialEq, { Self { value: Rc::new(value), type_name: type_name::<T>(), dyn_eq: |this, other| { let this_cast = this.downcast_ref::<T>().unwrap(); other .downcast_ref::<T>() .map_or(false, |other_cast| other_cast == this_cast) }, dyn_fmt: |this, formatter| { let this_cast = this.downcast_ref::<T>().unwrap(); fmt::Debug::fmt(this_cast, formatter) }, } } #[allow(clippy::missing_panics_doc)] pub fn with_identity_eq<T>(value: T) -> Self where T: Any + fmt::Debug, { Self { value: Rc::new(value), type_name: type_name::<T>(), dyn_eq: |this, other| { let this_data = (this as *const dyn Any).cast::<()>(); let other_data = (other as *const dyn Any).cast::<()>(); this_data == other_data }, dyn_fmt: |this, formatter| { let this_cast = this.downcast_ref::<T>().unwrap(); fmt::Debug::fmt(this_cast, formatter) }, } } pub fn downcast_ref<T: Any>(&self) -> Option<&T> { self.value.downcast_ref() } } impl Clone for OpaqueRef { fn clone(&self) -> Self { Self { value: Rc::clone(&self.value), type_name: self.type_name, dyn_eq: self.dyn_eq, dyn_fmt: self.dyn_fmt, } } } impl PartialEq for OpaqueRef { fn eq(&self, other: &Self) -> bool { (self.dyn_eq)(self.value.as_ref(), other.value.as_ref()) } } impl fmt::Debug for OpaqueRef { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter .debug_tuple("OpaqueRef") .field(&self.value.as_ref()) .finish() } } impl fmt::Display for OpaqueRef { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { write!(formatter, "{}::", self.type_name)?; (self.dyn_fmt)(self.value.as_ref(), formatter) } } #[derive(Debug)] #[non_exhaustive] pub enum Value<'a, T> { Prim(T), Bool(bool), Function(Function<'a, T>), Tuple(Vec<Value<'a, T>>), Object(HashMap<String, Value<'a, T>>), Ref(OpaqueRef), } pub type SpannedValue<'a, T> = MaybeSpanned<'a, Value<'a, T>>; impl<'a, T> Value<'a, T> { pub fn native_fn(function: impl NativeFn<T> + 'static) -> Self { Self::Function(Function::Native(Rc::new(function))) } pub fn wrapped_fn<Args, F>(fn_to_wrap: F) -> Self where fns::FnWrapper<Args, F>: NativeFn<T> + 'static, { let wrapped = fns::wrap::<Args, _>(fn_to_wrap); Self::native_fn(wrapped) } pub(crate) fn interpreted_fn(function: InterpretedFn<'a, T>) -> Self { Self::Function(Function::Interpreted(Rc::new(function))) } pub fn void() -> Self { Self::Tuple(vec![]) } pub fn opaque_ref(value: impl Any + fmt::Debug + PartialEq) -> Self { Self::Ref(OpaqueRef::new(value)) } pub fn value_type(&self) -> ValueType { match self { Self::Prim(_) => ValueType::Prim, Self::Bool(_) => ValueType::Bool, Self::Function(_) => ValueType::Function, Self::Tuple(elements) => ValueType::Tuple(elements.len()), Self::Object(_) => ValueType::Object, Self::Ref(_) => ValueType::Ref, } } pub fn is_void(&self) -> bool { matches!(self, Self::Tuple(tuple) if tuple.is_empty()) } pub fn is_function(&self) -> bool { matches!(self, Self::Function(_)) } } impl<T: Clone> Clone for Value<'_, T> { fn clone(&self) -> Self { match self { Self::Prim(lit) => Self::Prim(lit.clone()), Self::Bool(bool) => Self::Bool(*bool), Self::Function(function) => Self::Function(function.clone()), Self::Tuple(tuple) => Self::Tuple(tuple.clone()), Self::Object(fields) => Self::Object(fields.clone()), Self::Ref(reference) => Self::Ref(reference.clone()), } } } impl<T: 'static + Clone> StripCode for Value<'_, T> { type Stripped = Value<'static, T>; fn strip_code(self) -> Self::Stripped { match self { Self::Prim(lit) => Value::Prim(lit), Self::Bool(bool) => Value::Bool(bool), Self::Function(function) => Value::Function(function.strip_code()), Self::Tuple(tuple) => { Value::Tuple(tuple.into_iter().map(StripCode::strip_code).collect()) } Self::Object(fields) => Value::Object( fields .into_iter() .map(|(name, value)| (name, value.strip_code())) .collect(), ), Self::Ref(reference) => Value::Ref(reference), } } } impl<'a, T: Clone> From<&Value<'a, T>> for Value<'a, T> { fn from(reference: &Value<'a, T>) -> Self { reference.clone() } } impl<T: PartialEq> PartialEq for Value<'_, T> { fn eq(&self, rhs: &Self) -> bool { match (self, rhs) { (Self::Prim(this), Self::Prim(other)) => this == other, (Self::Bool(this), Self::Bool(other)) => this ==
his == other, _ => false, } } } impl<T: fmt::Display> fmt::Display for Value<'_, T> { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Prim(value) => fmt::Display::fmt(value, formatter), Self::Bool(true) => formatter.write_str("true"), Self::Bool(false) => formatter.write_str("false"), Self::Ref(opaque_ref) => fmt::Display::fmt(opaque_ref, formatter), Self::Function(_) => formatter.write_str("[function]"), Self::Object(fields) => { formatter.write_str("#{ ")?; for (name, value) in fields.iter() { write!(formatter, "{} = {}; ", name, value)?; } formatter.write_str("}") } Self::Tuple(elements) => { formatter.write_str("(")?; for (i, element) in elements.iter().enumerate() { fmt::Display::fmt(element, formatter)?; if i + 1 < elements.len() { formatter.write_str(", ")?; } } formatter.write_str(")") } } } } #[cfg(test)] mod tests { use super::*; use core::cmp::Ordering; #[test] fn opaque_ref_equality() { let value = Value::<f32>::opaque_ref(Ordering::Less); let same_value = Value::<f32>::opaque_ref(Ordering::Less); assert_eq!(value, same_value); assert_eq!(value, value.clone()); let other_value = Value::<f32>::opaque_ref(Ordering::Greater); assert_ne!(value, other_value); } #[test] fn opaque_ref_formatting() { let value = OpaqueRef::new(Ordering::Less); assert_eq!(value.to_string(), "core::cmp::Ordering::Less"); } }
other, (Self::Tuple(this), Self::Tuple(other)) => this == other, (Self::Object(this), Self::Object(other)) => this == other, (Self::Function(this), Self::Function(other)) => this.is_same_function(other), (Self::Ref(this), Self::Ref(other)) => t
function_block-random_span
[ { "content": "/// Default implementation of [`VisitMut::visit_function_mut()`].\n\npub fn visit_function_mut<Prim, V>(visitor: &mut V, function: &mut Function<Prim>)\n\nwhere\n\n Prim: PrimitiveType,\n\n V: VisitMut<Prim> + ?Sized,\n\n{\n\n visitor.visit_tuple_mut(&mut function.args);\n\n visitor.visit_type_mut(&mut function.return_type);\n\n}\n", "file_path": "typing/src/visit.rs", "rank": 0, "score": 394420.7955593652 }, { "content": "/// Default implementation of [`VisitMut::visit_object_mut()`].\n\npub fn visit_object_mut<Prim, V>(visitor: &mut V, object: &mut Object<Prim>)\n\nwhere\n\n Prim: PrimitiveType,\n\n V: VisitMut<Prim> + ?Sized,\n\n{\n\n for (_, ty) in object.iter_mut() {\n\n visitor.visit_type_mut(ty);\n\n }\n\n}\n\n\n", "file_path": "typing/src/visit.rs", "rank": 1, "score": 394304.4672530079 }, { "content": "fn comparison_type_defs() -> Vec<(&'static str, Type)> {\n\n // TODO: imprecise typing!\n\n vec![\n\n (\"LESS\", Type::NUM),\n\n (\"EQUAL\", Type::NUM),\n\n (\"GREATER\", Type::NUM),\n\n (\"cmp\", binary_fn()),\n\n (\"min\", binary_fn()),\n\n (\"max\", binary_fn()),\n\n ]\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\n#[allow(clippy::type_complexity)] // not that complex, really\n\npub struct StdLibrary<T: 'static> {\n\n constants: &'static [(&'static str, T)],\n\n unary: &'static [(&'static str, fn(T) -> T)],\n\n binary: &'static [(&'static str, fn(T, T) -> T)],\n\n}\n\n\n", "file_path": "cli/src/library.rs", "rank": 2, "score": 393880.09351213154 }, { "content": "/// Checks if the provided string is a valid variable name.\n\npub fn is_valid_variable_name(name: &str) -> bool {\n\n if name.is_empty() || !name.is_ascii() {\n\n return false;\n\n }\n\n\n\n match var_name(InputSpan::new(name)) {\n\n Ok((rest, _)) => rest.fragment().is_empty(),\n\n Err(_) => false,\n\n }\n\n}\n\n\n", "file_path": "parser/src/parser/mod.rs", "rank": 3, "score": 390688.75543619366 }, { "content": "/// Default implementation of [`Visit::visit_function()`].\n\npub fn visit_function<Prim, V>(visitor: &mut V, function: &Function<Prim>)\n\nwhere\n\n Prim: PrimitiveType,\n\n V: Visit<Prim> + ?Sized,\n\n{\n\n visitor.visit_tuple(&function.args);\n\n visitor.visit_type(&function.return_type);\n\n}\n\n\n\n/// Recursive traversal across the exclusive reference to a [`Type`].\n\n///\n\n/// Inspired by the [`VisitMut` trait from `syn`].\n\n///\n\n/// [`VisitMut` trait from `syn`]: https://docs.rs/syn/^1/syn/visit_mut/trait.VisitMut.html\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use arithmetic_typing::{ast::TypeAst, arith::Num, Type};\n\n/// use arithmetic_typing::visit::{self, VisitMut};\n", "file_path": "typing/src/visit.rs", "rank": 4, "score": 378266.05291744496 }, { "content": "/// Default implementation of [`Visit::visit_object()`].\n\npub fn visit_object<Prim, V>(visitor: &mut V, object: &Object<Prim>)\n\nwhere\n\n Prim: PrimitiveType,\n\n V: Visit<Prim> + ?Sized,\n\n{\n\n for (_, ty) in object.iter() {\n\n visitor.visit_type(ty);\n\n }\n\n}\n\n\n", "file_path": "typing/src/visit.rs", "rank": 5, "score": 378147.2998699541 }, { "content": "/// Default implementation of [`VisitMut::visit_tuple_mut()`].\n\npub fn visit_tuple_mut<Prim, V>(visitor: &mut V, tuple: &mut Tuple<Prim>)\n\nwhere\n\n Prim: PrimitiveType,\n\n V: VisitMut<Prim> + ?Sized,\n\n{\n\n if let Some(middle) = tuple.parts_mut().1 {\n\n visitor.visit_middle_len_mut(middle.len_mut());\n\n }\n\n for ty in tuple.element_types_mut() {\n\n visitor.visit_type_mut(ty);\n\n }\n\n}\n\n\n", "file_path": "typing/src/visit.rs", "rank": 6, "score": 360824.73550585634 }, { "content": "/// Default implementation of [`VisitMut::visit_type_mut()`].\n\npub fn visit_type_mut<Prim, V>(visitor: &mut V, ty: &mut Type<Prim>)\n\nwhere\n\n Prim: PrimitiveType,\n\n V: VisitMut<Prim> + ?Sized,\n\n{\n\n match ty {\n\n Type::Any | Type::Var(_) | Type::Prim(_) => {}\n\n Type::Dyn(constraints) => visitor.visit_dyn_constraints_mut(constraints),\n\n Type::Tuple(tuple) => visitor.visit_tuple_mut(tuple),\n\n Type::Object(obj) => visitor.visit_object_mut(obj),\n\n Type::Function(function) => visitor.visit_function_mut(function.as_mut()),\n\n }\n\n}\n\n\n", "file_path": "typing/src/visit.rs", "rank": 7, "score": 347204.0122841485 }, { "content": "#[doc(hidden)] // necessary for `wrap_fn` macro\n\npub fn enforce_closure_type<T, A, F>(function: F) -> F\n\nwhere\n\n F: for<'a> Fn(Vec<SpannedValue<'a, T>>, &mut CallContext<'_, 'a, A>) -> EvalResult<'a, T>,\n\n{\n\n function\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{\n\n alloc::{format, ToOwned},\n\n Environment, ExecutableModule, Prelude, Value, WildcardId,\n\n };\n\n\n\n use arithmetic_parser::grammars::{F32Grammar, Parse, Untyped};\n\n use assert_matches::assert_matches;\n\n\n\n #[test]\n\n fn functions_with_primitive_args() {\n", "file_path": "eval/src/fns/wrapper/mod.rs", "rank": 8, "score": 345921.8296798561 }, { "content": "/// Default implementation of [`Visit::visit_tuple()`].\n\npub fn visit_tuple<Prim, V>(visitor: &mut V, tuple: &Tuple<Prim>)\n\nwhere\n\n Prim: PrimitiveType,\n\n V: Visit<Prim> + ?Sized,\n\n{\n\n for (_, ty) in tuple.element_types() {\n\n visitor.visit_type(ty);\n\n }\n\n}\n\n\n", "file_path": "typing/src/visit.rs", "rank": 9, "score": 343460.0107914985 }, { "content": "pub fn create_int_env<T>(wrapping: bool) -> (Environment<'static, T>, TypeEnvironment)\n\nwhere\n\n T: ReplLiteral + ops::Rem + WrappingNeg + CheckedRem,\n\n{\n\n const REM_ERROR_MSG: &str = \"Cannot calculate remainder for a divisor of zero\";\n\n\n\n let mut env: Environment<'static, T> = Prelude\n\n .iter()\n\n .chain(Assertions.iter())\n\n .chain(Comparisons.iter())\n\n .chain(T::STD_LIB.variables())\n\n .collect();\n\n\n\n env.insert_native_fn(\"array\", fns::Array);\n\n if wrapping {\n\n env.insert_wrapped_fn(\"rem\", |x: T, y: T| {\n\n if y == T::zero() {\n\n Err(REM_ERROR_MSG.to_owned())\n\n } else if y.wrapping_neg().is_one() {\n\n // Prevent a panic with `T::min_value() % -1`.\n", "file_path": "cli/src/library.rs", "rank": 10, "score": 336126.7304445832 }, { "content": "/// Default implementation of [`Visit::visit_type()`].\n\npub fn visit_type<Prim, V>(visitor: &mut V, ty: &Type<Prim>)\n\nwhere\n\n Prim: PrimitiveType,\n\n V: Visit<Prim> + ?Sized,\n\n{\n\n match ty {\n\n Type::Any => { /* Do nothing. */ }\n\n Type::Dyn(constraints) => visitor.visit_dyn_constraints(constraints),\n\n Type::Var(var) => visitor.visit_var(*var),\n\n Type::Prim(primitive) => visitor.visit_primitive(primitive),\n\n Type::Tuple(tuple) => visitor.visit_tuple(tuple),\n\n Type::Object(obj) => visitor.visit_object(obj),\n\n Type::Function(function) => visitor.visit_function(function.as_ref()),\n\n }\n\n}\n\n\n", "file_path": "typing/src/visit.rs", "rank": 11, "score": 329346.8999436733 }, { "content": "fn dbg_fn<Prim: PrimitiveType>() -> Function<Prim> {\n\n Function::builder()\n\n .with_varargs(Type::Any, UnknownLen::param(0))\n\n .returning(Type::void())\n\n}\n\n\n", "file_path": "typing/tests/integration/examples/mod.rs", "rank": 12, "score": 315831.92199786333 }, { "content": "/// Numeric literal used in `NumGrammar`s.\n\npub trait NumLiteral: 'static + Clone + fmt::Debug {\n\n /// Tries to parse a literal.\n\n fn parse(input: InputSpan<'_>) -> NomResult<'_, Self>;\n\n}\n\n\n", "file_path": "parser/src/grammars/mod.rs", "rank": 13, "score": 311548.1470023969 }, { "content": "type MapFn<'r, 'a, T> = fn((&'r String, &'r Value<'a, T>)) -> (&'r str, &'r Value<'a, T>);\n\n\n\n/// Iterator over references of the `Environment` entries.\n\n#[derive(Debug)]\n\npub struct Iter<'r, 'a, T> {\n\n inner: iter::Map<hash_map::Iter<'r, String, Value<'a, T>>, MapFn<'r, 'a, T>>,\n\n}\n\n\n\nimpl<'r, 'a, T> Iterator for Iter<'r, 'a, T> {\n\n type Item = (&'r str, &'r Value<'a, T>);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.inner.next()\n\n }\n\n\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n self.inner.size_hint()\n\n }\n\n}\n\n\n", "file_path": "eval/src/values/env.rs", "rank": 14, "score": 309635.36050345574 }, { "content": "fn lvalue_tuple(elements: Vec<SpannedLvalue<'_, ValueType>>) -> Lvalue<'_, ValueType> {\n\n Lvalue::Tuple(Destructure {\n\n start: elements,\n\n middle: None,\n\n end: vec![],\n\n })\n\n}\n\n\n", "file_path": "parser/src/parser/tests/mod.rs", "rank": 15, "score": 304461.9322807996 }, { "content": "/// Constraint that can be placed on [`Type`]s.\n\n///\n\n/// Constraints can be placed on [`Function`] type variables, and can be applied\n\n/// to types in [`TypeArithmetic`] impls. For example, [`NumArithmetic`] places\n\n/// the [`Linearity`] constraint on types involved in arithmetic ops.\n\n///\n\n/// The constraint mechanism is similar to trait constraints in Rust, but is much more limited:\n\n///\n\n/// - Constraints cannot be parametric (cf. parameters in traits, such `AsRef<_>`\n\n/// or `Iterator<Item = _>`).\n\n/// - Constraints are applied to types in separation; it is impossible to create a constraint\n\n/// involving several type variables.\n\n/// - Constraints cannot contradict each other.\n\n///\n\n/// # Implementation rules\n\n///\n\n/// - [`Display`](fmt::Display) must display constraint as an identifier (e.g., `Lin`).\n\n/// The string presentation of a constraint must be unique within a [`PrimitiveType`];\n\n/// it is used to identify constraints in a [`ConstraintSet`].\n\n///\n\n/// [`TypeArithmetic`]: crate::arith::TypeArithmetic\n\n/// [`NumArithmetic`]: crate::arith::NumArithmetic\n\npub trait Constraint<Prim: PrimitiveType>: fmt::Display + Send + Sync + 'static {\n\n /// Returns a [`Visit`]or that will be applied to constrained [`Type`]s. The visitor\n\n /// may use `substitutions` to resolve types and `errors` to record constraint errors.\n\n ///\n\n /// # Tips\n\n ///\n\n /// - You can use [`StructConstraint`] for typical use cases, which involve recursively\n\n /// traversing `ty`.\n\n fn visitor<'r>(\n\n &self,\n\n substitutions: &'r mut Substitutions<Prim>,\n\n errors: OpErrors<'r, Prim>,\n\n ) -> Box<dyn Visit<Prim> + 'r>;\n\n\n\n /// Clones this constraint into a `Box`.\n\n ///\n\n /// This method should be implemented by implementing [`Clone`] and boxing its output.\n\n fn clone_boxed(&self) -> Box<dyn Constraint<Prim>>;\n\n}\n\n\n", "file_path": "typing/src/arith/constraints.rs", "rank": 16, "score": 289820.35549218406 }, { "content": "#[test]\n\nfn tuples_with_dyn_length_as_object_fields() {\n\n let code = r#\"\n\n test = |obj| { obj.xs == obj.ys.filter(|y| y > 1) };\n\n test(#{ xs: (2, 3), ys: (1, 2, 3) });\n\n \"#;\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n type_env\n\n .insert(\"filter\", Prelude::Filter)\n\n .process_with_arithmetic(&NumArithmetic::with_comparisons(), &block)\n\n .unwrap();\n\n\n\n assert_eq!(\n\n type_env[\"test\"].to_string(),\n\n \"for<'T: { xs: [Num], ys: [Num; N] }> ('T) -> Bool\"\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/object.rs", "rank": 17, "score": 283124.89074668137 }, { "content": "#[derive(Debug)]\n\nstruct StructConstraintVisitor<'r, Prim: PrimitiveType, C, F> {\n\n inner: StructConstraint<Prim, C, F>,\n\n substitutions: &'r mut Substitutions<Prim>,\n\n errors: OpErrors<'r, Prim>,\n\n}\n\n\n\nimpl<'r, Prim, C, F> Visit<Prim> for StructConstraintVisitor<'r, Prim, C, F>\n\nwhere\n\n Prim: PrimitiveType,\n\n C: Constraint<Prim> + Clone,\n\n F: Fn(&Prim) -> bool + 'static,\n\n{\n\n fn visit_type(&mut self, ty: &Type<Prim>) {\n\n match ty {\n\n Type::Dyn(constraints) => {\n\n if !constraints.inner.simple.contains(&self.inner.constraint) {\n\n self.errors.push(ErrorKind::failed_constraint(\n\n ty.clone(),\n\n self.inner.constraint.clone(),\n\n ));\n", "file_path": "typing/src/arith/constraints.rs", "rank": 18, "score": 261525.75904113936 }, { "content": "fn evaluate<'a>(env: &mut Environment<'a, f32>, program: &'a str) -> Value<'a, f32> {\n\n try_evaluate(env, program).unwrap()\n\n}\n\n\n", "file_path": "eval/tests/basics/main.rs", "rank": 19, "score": 260386.7008466635 }, { "content": "#[test]\n\nfn tuples_as_object_fields() {\n\n let code = r#\"\n\n test = |obj| { obj.xs == obj.ys.map(|y| (y, y * 2)) };\n\n test(#{ xs: ((1, 2), (3, 4)), ys: (3, 4) });\n\n \"#;\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n type_env\n\n .insert(\"map\", Prelude::Map)\n\n .process_statements(&block)\n\n .unwrap();\n\n\n\n assert_eq!(\n\n type_env[\"test\"].to_string(),\n\n \"for<'T: { xs: [(Num, Num); N], ys: [Num; N] }> ('T) -> Bool\"\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/object.rs", "rank": 20, "score": 252626.24767755202 }, { "content": "#[test]\n\nfn functional_fields_in_objects() {\n\n let code = r#\"\n\n obj = #{ x: 1, run: |x, y| x + y };\n\n run = obj.run;\n\n run((1, 2), (3, 4)) == (4, 6);\n\n (obj.run)(obj.x, 5)\n\n \"#;\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n let output = type_env.process_statements(&block).unwrap();\n\n assert_eq!(output.to_string(), \"Num\");\n\n\n\n assert_eq!(\n\n type_env[\"obj\"].to_string(),\n\n \"{ run: for<'T: Ops> ('T, 'T) -> 'T, x: Num }\"\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/object.rs", "rank": 21, "score": 252623.08455250206 }, { "content": "fn convert_iter<Prim: PrimitiveType, S, Ty, I>(\n\n iter: I,\n\n) -> impl Iterator<Item = (String, Type<Prim>)>\n\nwhere\n\n I: IntoIterator<Item = (S, Ty)>,\n\n S: Into<String>,\n\n Ty: Into<Type<Prim>>,\n\n{\n\n iter.into_iter()\n\n .map(|(name, ty)| (name.into(), TypeEnvironment::prepare_type(ty)))\n\n}\n\n\n\nimpl<Prim: PrimitiveType, S, Ty> FromIterator<(S, Ty)> for TypeEnvironment<Prim>\n\nwhere\n\n S: Into<String>,\n\n Ty: Into<Type<Prim>>,\n\n{\n\n fn from_iter<I: IntoIterator<Item = (S, Ty)>>(iter: I) -> Self {\n\n Self {\n\n variables: convert_iter(iter).collect(),\n", "file_path": "typing/src/env/mod.rs", "rank": 22, "score": 250440.98032753376 }, { "content": "fn read_file(path: &str) -> String {\n\n fs::read_to_string(path).unwrap_or_else(|err| panic!(\"Cannot read file {}: {}\", path, err))\n\n}\n\n\n", "file_path": "typing/tests/check_readme.rs", "rank": 23, "score": 249813.64421012142 }, { "content": "pub fn create_modular_env(modulus: u64) -> (Environment<'static, u64>, TypeEnvironment) {\n\n let mut env: Environment<'_, u64> = Prelude.iter().chain(Assertions.iter()).collect();\n\n env.insert(\"MAX_VALUE\", Value::Prim(modulus - 1));\n\n\n\n let type_env = defs::Prelude::iter()\n\n .chain(defs::Assertions::iter())\n\n .chain(vec![(\"MAX_VALUE\", Type::NUM)])\n\n .collect();\n\n\n\n (env, type_env)\n\n}\n\n\n\nmacro_rules! declare_real_functions {\n\n ($type:ident) => {\n\n impl ReplLiteral for $type {\n\n const STD_LIB: StdLibrary<$type> = StdLibrary {\n\n constants: &[\n\n (\"INF\", $type::INFINITY),\n\n (\"E\", std::$type::consts::E),\n\n (\"PI\", std::$type::consts::PI),\n", "file_path": "cli/src/library.rs", "rank": 24, "score": 247708.0941001046 }, { "content": "#[test]\n\nfn functional_fields_in_object_constraints() {\n\n let code = \"test = |obj| (obj.run)(obj.x, 1);\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut env = TypeEnvironment::new();\n\n env.insert(\"push\", Prelude::Push);\n\n env.process_statements(&block).unwrap();\n\n assert_eq!(\n\n env[\"test\"].to_string(),\n\n \"for<'T: { run: ('U, Num) -> 'V, x: 'U }> ('T) -> 'V\"\n\n );\n\n\n\n let code_samples = &[\n\n (\"test(#{ x: 1, run: |x: Num, y: Num| x + y })\", \"Num\"),\n\n (\"test(#{ x: 1, run: |x, y| x + y })\", \"Num\"),\n\n (\"test(#{ run: push, x: (5, 6) })\", \"(Num, Num, Num)\"),\n\n ];\n\n for &(run_code, expected_output) in code_samples {\n\n let run_block = F32Grammar::parse_statements(run_code).unwrap();\n\n let output = env.process_statements(&run_block).unwrap();\n\n assert_eq!(output.to_string(), expected_output);\n\n }\n\n}\n\n\n", "file_path": "typing/tests/integration/object.rs", "rank": 25, "score": 247125.71669204714 }, { "content": "#[test]\n\nfn dyn_type_as_function() {\n\n let mut type_env = TypeEnvironment::new();\n\n type_env.insert(\"some_lin\", DynConstraints::just(Linearity));\n\n\n\n let bogus_call = \"some_lin(1)\";\n\n let bogus_call = F32Grammar::parse_statements(bogus_call).unwrap();\n\n let err = type_env\n\n .process_statements(&bogus_call)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_matches!(err.kind(), ErrorKind::TypeMismatch(_, _));\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/mod.rs", "rank": 26, "score": 245180.87855727258 }, { "content": "pub fn create_complex_env<T: ReplLiteral>() -> (Environment<'static, T>, TypeEnvironment) {\n\n let env = Prelude\n\n .iter()\n\n .chain(Assertions.iter())\n\n .chain(T::STD_LIB.variables())\n\n .collect();\n\n\n\n let type_env = defs::Prelude::iter()\n\n .chain(defs::Assertions::iter())\n\n .chain(T::STD_LIB.type_defs())\n\n .collect();\n\n\n\n (env, type_env)\n\n}\n", "file_path": "cli/src/library.rs", "rank": 27, "score": 243677.764174224 }, { "content": "pub fn create_float_env<T: ReplLiteral>() -> (Environment<'static, T>, TypeEnvironment) {\n\n let mut env: Environment<'static, T> = Prelude\n\n .iter()\n\n .chain(Assertions.iter())\n\n .chain(Comparisons.iter())\n\n .chain(T::STD_LIB.variables())\n\n .collect();\n\n env.insert_native_fn(\"array\", fns::Array);\n\n\n\n let type_env = defs::Prelude::iter()\n\n .chain(defs::Assertions::iter())\n\n .chain(comparison_type_defs())\n\n .chain(T::STD_LIB.type_defs())\n\n .chain(vec![(\"array\", defs::Prelude::array(NumType::Num).into())])\n\n .collect();\n\n\n\n (env, type_env)\n\n}\n\n\n\nmacro_rules! declare_complex_functions {\n", "file_path": "cli/src/library.rs", "rank": 28, "score": 243677.764174224 }, { "content": "#[derive(Debug)]\n\nstruct ErrorPrecursor<Prim: PrimitiveType> {\n\n kind: ErrorKind<Prim>,\n\n location: Vec<ErrorLocation>,\n\n}\n\n\n\nimpl<Prim: PrimitiveType> ErrorPrecursor<Prim> {\n\n fn into_expr_error<'a, T: Grammar<'a>>(\n\n self,\n\n context: ErrorContext<Prim>,\n\n root_expr: &SpannedExpr<'a, T>,\n\n ) -> Error<'a, Prim> {\n\n Error {\n\n inner: ErrorLocation::walk_expr(&self.location, root_expr).copy_with_extra(self.kind),\n\n root_span: root_expr.with_no_extra(),\n\n context,\n\n location: self.location,\n\n }\n\n }\n\n\n\n fn into_assignment_error<'a>(\n", "file_path": "typing/src/error/op_errors.rs", "rank": 29, "score": 239957.79072200967 }, { "content": "/// Marker trait for object-safe constraints, i.e., constraints that can be included\n\n/// into a [`DynConstraints`](crate::DynConstraints).\n\n///\n\n/// Object safety is similar to this notion in Rust. For a constraint `C` to be object-safe,\n\n/// it should be the case that `dyn C` (the untagged union of all types implementing `C`)\n\n/// implements `C`. As an example, this is the case for [`Linearity`], but is not the case\n\n/// for [`Ops`]. Indeed, [`Ops`] requires the type to be addable to itself,\n\n/// which would be impossible for `dyn Ops`.\n\npub trait ObjectSafeConstraint<Prim: PrimitiveType>: Constraint<Prim> {}\n\n\n\n/// Helper to define *structural* [`Constraint`]s, i.e., constraints recursively checking\n\n/// the provided type.\n\n///\n\n/// The following logic is used to check whether a type satisfies the constraint:\n\n///\n\n/// - Primitive types satisfy the constraint iff the predicate provided in [`Self::new()`]\n\n/// returns `true`.\n\n/// - [`Type::Any`] always satisfies the constraint.\n\n/// - [`Type::Dyn`] types satisfy the constraint iff the [`Constraint`] wrapped by this helper\n\n/// is present among [`DynConstraints`](crate::DynConstraints). Thus,\n\n/// if the wrapped constraint is not [object-safe](ObjectSafeConstraint), it will not be satisfied\n\n/// by any `Dyn` type.\n\n/// - Functional types never satisfy the constraint.\n\n/// - A compound type (i.e., a tuple) satisfies the constraint iff all its items satisfy\n\n/// the constraint.\n\n/// - If [`Self::deny_dyn_slices()`] is set, tuple types need to have static length.\n\n///\n\n/// # Examples\n", "file_path": "typing/src/arith/constraints.rs", "rank": 30, "score": 239617.51596569392 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct TypeResolver<'a, Prim: PrimitiveType> {\n\n substitutions: &'a Substitutions<Prim>,\n\n}\n\n\n\nimpl<Prim: PrimitiveType> VisitMut<Prim> for TypeResolver<'_, Prim> {\n\n fn visit_type_mut(&mut self, ty: &mut Type<Prim>) {\n\n let fast_resolved = self.substitutions.fast_resolve(ty);\n\n if !ptr::eq(ty, fast_resolved) {\n\n *ty = fast_resolved.clone();\n\n }\n\n visit::visit_type_mut(self, ty);\n\n }\n\n\n\n fn visit_middle_len_mut(&mut self, len: &mut TupleLen) {\n\n *len = self.substitutions.resolve_len(*len);\n\n }\n\n}\n\n\n", "file_path": "typing/src/arith/substitutions/mod.rs", "rank": 31, "score": 239150.46228075173 }, { "content": "#[derive(Debug)]\n\nstruct TypeSpecifier<'a, Prim: PrimitiveType> {\n\n substitutions: &'a mut Substitutions<Prim>,\n\n variance: Variance,\n\n}\n\n\n\nimpl<'a, Prim: PrimitiveType> TypeSpecifier<'a, Prim> {\n\n fn new(substitutions: &'a mut Substitutions<Prim>) -> Self {\n\n Self {\n\n substitutions,\n\n variance: Variance::Co,\n\n }\n\n }\n\n}\n\n\n\nimpl<Prim: PrimitiveType> VisitMut<Prim> for TypeSpecifier<'_, Prim> {\n\n fn visit_type_mut(&mut self, ty: &mut Type<Prim>) {\n\n match ty {\n\n Type::Any if self.variance == Variance::Co => {\n\n *ty = self.substitutions.new_type_var();\n\n }\n", "file_path": "typing/src/arith/substitutions/mod.rs", "rank": 32, "score": 239139.70110317477 }, { "content": "/// Arithmetic allowing to customize primitive types and how unary and binary operations are handled\n\n/// during type inference.\n\n///\n\n/// # Examples\n\n///\n\n/// See crate examples for examples how define custom arithmetics.\n\npub trait TypeArithmetic<Prim: PrimitiveType> {\n\n /// Handles a unary operation.\n\n fn process_unary_op(\n\n &self,\n\n substitutions: &mut Substitutions<Prim>,\n\n context: &UnaryOpContext<Prim>,\n\n errors: OpErrors<'_, Prim>,\n\n ) -> Type<Prim>;\n\n\n\n /// Handles a binary operation.\n\n fn process_binary_op(\n\n &self,\n\n substitutions: &mut Substitutions<Prim>,\n\n context: &BinaryOpContext<Prim>,\n\n errors: OpErrors<'_, Prim>,\n\n ) -> Type<Prim>;\n\n}\n\n\n\n/// Code spans related to a unary operation.\n\n///\n", "file_path": "typing/src/arith/mod.rs", "rank": 33, "score": 239012.50671313176 }, { "content": "#[allow(unused_variables)]\n\npub trait VisitMut<Prim: PrimitiveType> {\n\n /// Visits a generic type.\n\n ///\n\n /// The default implementation calls one of more specific methods corresponding to the `ty`\n\n /// variant. For \"simple\" types (variables, params, primitive types) does nothing.\n\n fn visit_type_mut(&mut self, ty: &mut Type<Prim>) {\n\n visit_type_mut(self, ty);\n\n }\n\n\n\n /// Visits a tuple type.\n\n ///\n\n /// The default implementation calls [`Self::visit_middle_len_mut()`] for the middle length\n\n /// if the tuple has a middle. Then, [`Self::visit_type_mut()`] is called\n\n /// for each tuple element, including the middle element if any.\n\n fn visit_tuple_mut(&mut self, tuple: &mut Tuple<Prim>) {\n\n visit_tuple_mut(self, tuple);\n\n }\n\n\n\n /// Visits an object type.\n\n fn visit_object_mut(&mut self, object: &mut Object<Prim>) {\n", "file_path": "typing/src/visit.rs", "rank": 34, "score": 238957.93318765907 }, { "content": "/// Function on zero or more [`Value`]s.\n\n///\n\n/// Native functions are defined in the Rust code and then can be used from the interpreted\n\n/// code. See [`fns`](crate::fns) module docs for different ways to define native functions.\n\npub trait NativeFn<T> {\n\n /// Executes the function on the specified arguments.\n\n fn evaluate<'a>(\n\n &self,\n\n args: Vec<SpannedValue<'a, T>>,\n\n context: &mut CallContext<'_, 'a, T>,\n\n ) -> EvalResult<'a, T>;\n\n}\n\n\n\nimpl<T, F: 'static> NativeFn<T> for F\n\nwhere\n\n F: for<'a> Fn(Vec<SpannedValue<'a, T>>, &mut CallContext<'_, 'a, T>) -> EvalResult<'a, T>,\n\n{\n\n fn evaluate<'a>(\n\n &self,\n\n args: Vec<SpannedValue<'a, T>>,\n\n context: &mut CallContext<'_, 'a, T>,\n\n ) -> EvalResult<'a, T> {\n\n self(args, context)\n\n }\n", "file_path": "eval/src/values/function.rs", "rank": 35, "score": 237964.27978495264 }, { "content": "#[test]\n\nfn object_and_tuple_constraints_via_fields() {\n\n let code = \"|obj| { obj.x == 1; obj.0 }\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"obj.0\");\n\n assert_matches!(err.kind(), ErrorKind::CannotIndex);\n\n assert_matches!(\n\n err.context(),\n\n ErrorContext::TupleIndex { ty } if ty.to_string() == \"{ x: Num }\"\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 36, "score": 236930.66651827522 }, { "content": "#[test]\n\nfn dyn_type_with_bogus_function_call() {\n\n let code = \"hash(1, |x| x + 1)\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n\n\n let mut type_env = TypeEnvironment::new();\n\n let err = type_env\n\n .insert(\"hash\", hash_fn_type())\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"|x| x + 1\");\n\n assert_eq!(err.location(), [fn_arg(1)]);\n\n assert_matches!(\n\n err.context(),\n\n ErrorContext::FnCall { call_signature, .. }\n\n if call_signature.to_string() == \"(Num, (Num) -> Num) -> Num\"\n\n );\n\n\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::FailedConstraint {\n\n ty: Type::Function(_),\n\n ..\n\n }\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/mod.rs", "rank": 37, "score": 235013.50868411383 }, { "content": "#[test]\n\nfn invalid_field_name() {\n\n let code = \"xs = (1, 2); xs.123456789012345678901234567890\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(\n\n *err.main_span().fragment(),\n\n \"123456789012345678901234567890\"\n\n );\n\n assert_eq!(err.location(), []);\n\n assert_matches!(err.context(), ErrorContext::None);\n\n assert_matches!(err.kind(), ErrorKind::InvalidFieldName(_));\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/mod.rs", "rank": 38, "score": 234420.61396639075 }, { "content": "#[wasm_bindgen]\n\npub fn evaluate(program: &str) -> Result<JsValue, JsValue> {\n\n let block = Untyped::<F64Grammar>::parse_statements(program)\n\n .map_err(|err| Error::new(&err.to_string()))?;\n\n\n\n let mut env = Prelude.iter().chain(Assertions.iter()).collect();\n\n initialize_env(&mut env);\n\n\n\n let value = env\n\n .compile_module(WildcardId, &block)\n\n .map_err(|err| Error::new(&err.to_string()))?\n\n .run()\n\n .map_err(|err| Error::new(&err.to_string()))?;\n\n\n\n match value {\n\n Value::Prim(number) => Ok(JsValue::from(number)),\n\n Value::Bool(flag) => Ok(JsValue::from(flag)),\n\n _ => Err(Error::new(\"returned value is not presentable\").into()),\n\n }\n\n}\n", "file_path": "test-wasm/src/lib.rs", "rank": 39, "score": 234131.6971993863 }, { "content": "#[derive(Debug)]\n\nstruct OccurrenceChecker<'a, Prim: PrimitiveType> {\n\n substitutions: &'a Substitutions<Prim>,\n\n var_indexes: HashSet<usize>,\n\n recursive_var: Option<usize>,\n\n}\n\n\n\nimpl<'a, Prim: PrimitiveType> OccurrenceChecker<'a, Prim> {\n\n fn new(\n\n substitutions: &'a Substitutions<Prim>,\n\n var_indexes: impl IntoIterator<Item = usize>,\n\n ) -> Self {\n\n Self {\n\n substitutions,\n\n var_indexes: var_indexes.into_iter().collect(),\n\n recursive_var: None,\n\n }\n\n }\n\n}\n\n\n\nimpl<Prim: PrimitiveType> Visit<Prim> for OccurrenceChecker<'_, Prim> {\n", "file_path": "typing/src/arith/substitutions/mod.rs", "rank": 40, "score": 233669.8309827611 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq)]\n\nenum StrType {\n\n Str,\n\n Bool,\n\n}\n\n\n\nimpl fmt::Display for StrType {\n\n fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(match self {\n\n Self::Str => \"Str\",\n\n Self::Bool => \"Bool\",\n\n })\n\n }\n\n}\n\n\n\nimpl FromStr for StrType {\n\n type Err = anyhow::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"Str\" => Ok(Self::Str),\n", "file_path": "typing/examples/strings.rs", "rank": 41, "score": 233027.62213783216 }, { "content": "/// Marker trait for possible literals.\n\n///\n\n/// This trait is somewhat of a crutch, necessary to ensure that [function wrappers] can accept\n\n/// number arguments and distinguish them from other types (booleans, vectors, tuples, etc.).\n\n///\n\n/// [function wrappers]: crate::fns::FnWrapper\n\npub trait Number: Clone + 'static {}\n\n\n\nimpl Number for i8 {}\n\nimpl Number for u8 {}\n\nimpl Number for i16 {}\n\nimpl Number for u16 {}\n\nimpl Number for i32 {}\n\nimpl Number for u32 {}\n\nimpl Number for i64 {}\n\nimpl Number for u64 {}\n\nimpl Number for i128 {}\n\nimpl Number for u128 {}\n\n\n\nimpl Number for f32 {}\n\nimpl Number for f64 {}\n\n\n\n#[cfg(feature = \"num-complex\")]\n\nimpl Number for num_complex::Complex32 {}\n\n#[cfg(feature = \"num-complex\")]\n\nimpl Number for num_complex::Complex64 {}\n\n\n\n#[cfg(feature = \"num-bigint\")]\n\nimpl Number for num_bigint::BigInt {}\n\n#[cfg(feature = \"num-bigint\")]\n\nimpl Number for num_bigint::BigUint {}\n", "file_path": "eval/src/lib.rs", "rank": 42, "score": 230460.42904693622 }, { "content": "fn unify_tuples<Prim: PrimitiveType>(\n\n substitutions: &mut Substitutions<Prim>,\n\n lhs: &Tuple<Prim>,\n\n rhs: &Tuple<Prim>,\n\n) -> Result<(), ErrorKind<Prim>> {\n\n extract_errors(|errors| {\n\n substitutions.unify_tuples(lhs, rhs, TupleContext::Generic, errors);\n\n })\n\n}\n\n\n", "file_path": "typing/src/arith/substitutions/tests.rs", "rank": 43, "score": 227501.0970916198 }, { "content": "fn unify_objects<Prim: PrimitiveType>(\n\n substitutions: &mut Substitutions<Prim>,\n\n lhs: &Object<Prim>,\n\n rhs: &Object<Prim>,\n\n) -> Result<(), ErrorKind<Prim>> {\n\n extract_errors(|errors| {\n\n substitutions.unify_objects(lhs, rhs, errors);\n\n })\n\n}\n\n\n", "file_path": "typing/src/arith/substitutions/tests.rs", "rank": 44, "score": 227392.0046961265 }, { "content": "fn prepare_env() -> TypeEnvironment<GroupPrim> {\n\n let rand_scalar = Function::builder().returning(SC);\n\n let hash_to_scalar = Function::builder()\n\n .with_varargs(DynConstraints::just(Hashed), UnknownLen::param(0))\n\n .returning(SC);\n\n let to_scalar = Function::builder().with_arg(GE).returning(SC);\n\n\n\n let mut env: TypeEnvironment<GroupPrim> = Prelude::iter().chain(Assertions::iter()).collect();\n\n env.insert(\"dbg\", dbg_fn())\n\n .insert(\"GEN\", GE)\n\n .insert(\"ORDER\", SC)\n\n .insert(\"rand_scalar\", rand_scalar)\n\n .insert(\"hash_to_scalar\", hash_to_scalar)\n\n .insert(\"to_scalar\", to_scalar);\n\n env\n\n}\n\n\n", "file_path": "typing/tests/integration/examples/mod.rs", "rank": 45, "score": 225902.7448423999 }, { "content": "fn expect_compilation_error<'a>(env: &mut Environment<'a, f32>, program: &'a str) -> Error<'a> {\n\n let block = Untyped::<F32Grammar>::parse_statements(program).unwrap();\n\n env.compile_module(WildcardId, &block).unwrap_err()\n\n}\n\n\n", "file_path": "eval/tests/basics/main.rs", "rank": 46, "score": 223235.5010906819 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct StrArithmetic;\n\n\n\nimpl MapPrimitiveType<String> for StrArithmetic {\n\n type Prim = StrType;\n\n\n\n fn type_of_literal(&self, _lit: &String) -> Self::Prim {\n\n StrType::Str\n\n }\n\n}\n\n\n\nimpl TypeArithmetic<StrType> for StrArithmetic {\n\n fn process_unary_op<'a>(\n\n &self,\n\n substitutions: &mut Substitutions<StrType>,\n\n context: &UnaryOpContext<StrType>,\n\n errors: OpErrors<'a, StrType>,\n\n ) -> Type<StrType> {\n\n BoolArithmetic.process_unary_op(substitutions, context, errors)\n\n }\n\n\n", "file_path": "typing/examples/strings.rs", "rank": 47, "score": 222533.6527827339 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct StrGrammar;\n\n\n\nimpl ParseLiteral for StrGrammar {\n\n type Lit = String;\n\n\n\n /// Parses an ASCII string like `\"Hello, world!\"`.\n\n fn parse_literal(input: InputSpan<'_>) -> NomResult<'_, Self::Lit> {\n\n use nom::{\n\n branch::alt,\n\n bytes::complete::{escaped_transform, is_not},\n\n character::complete::char as tag_char,\n\n combinator::{cut, map, opt},\n\n sequence::{preceded, terminated},\n\n };\n\n\n\n let parser = escaped_transform(\n\n is_not(\"\\\\\\\"\\n\"),\n\n '\\\\',\n\n alt((\n\n map(tag_char('\\\\'), |_| \"\\\\\"),\n", "file_path": "typing/examples/strings.rs", "rank": 48, "score": 222533.6527827339 }, { "content": "fn tuple_element(index: usize) -> ErrorLocation {\n\n ErrorLocation::TupleElement(Some(TupleIndex::Start(index)))\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/mod.rs", "rank": 49, "score": 212705.53157563022 }, { "content": "fn read_file(path: &str) -> String {\n\n fs::read_to_string(path).unwrap_or_else(|err| panic!(\"Cannot read file {}: {}\", path, err))\n\n}\n\n\n", "file_path": "parser/tests/check_readme.rs", "rank": 50, "score": 211880.2114203075 }, { "content": "fn read_file(path: &str) -> String {\n\n fs::read_to_string(path).unwrap_or_else(|err| panic!(\"Cannot read file {}: {}\", path, err))\n\n}\n\n\n", "file_path": "eval/tests/check_readme.rs", "rank": 51, "score": 211880.2114203075 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\nenum GroupPrim {\n\n Bool,\n\n Scalar,\n\n GroupElement,\n\n}\n\n\n\nimpl fmt::Display for GroupPrim {\n\n fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(match self {\n\n Self::Bool => \"Bool\",\n\n Self::Scalar => \"Sc\",\n\n Self::GroupElement => \"Ge\",\n\n })\n\n }\n\n}\n\n\n\nimpl FromStr for GroupPrim {\n\n type Err = anyhow::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n", "file_path": "typing/tests/integration/examples/mod.rs", "rank": 52, "score": 211101.75850275866 }, { "content": "fn can_start_a_var_name(byte: u8) -> bool {\n\n byte == b'_' || byte.is_ascii_alphabetic()\n\n}\n\n\n", "file_path": "parser/src/grammars/mod.rs", "rank": 53, "score": 210323.39009587682 }, { "content": "#[allow(clippy::type_complexity)]\n\nfn initialize_env(env: &mut Environment<'_, f64>) {\n\n const CONSTANTS: &[(&str, f64)] = &[\n\n (\"E\", f64::consts::E),\n\n (\"PI\", f64::consts::PI),\n\n (\"Inf\", f64::INFINITY),\n\n ];\n\n\n\n const UNARY_FNS: &[(&str, fn(f64) -> f64)] = &[\n\n // Rounding functions.\n\n (\"floor\", f64::floor),\n\n (\"ceil\", f64::ceil),\n\n (\"round\", f64::round),\n\n (\"frac\", f64::fract),\n\n // Exponential functions.\n\n (\"exp\", f64::exp),\n\n (\"ln\", f64::ln),\n\n (\"sinh\", f64::sinh),\n\n (\"cosh\", f64::cosh),\n\n (\"tanh\", f64::tanh),\n\n (\"asinh\", f64::asinh),\n", "file_path": "test-wasm/src/lib.rs", "rank": 54, "score": 209827.7646697695 }, { "content": "#[test]\n\nfn object_field_access() {\n\n let code = \"|obj| obj.x == 1\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let output = TypeEnvironment::new().process_statements(&block).unwrap();\n\n assert_eq!(output.to_string(), \"for<'T: { x: Num }> ('T) -> Bool\");\n\n\n\n let code = \"|pt| pt.x + pt.y\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let output = TypeEnvironment::new().process_statements(&block).unwrap();\n\n assert_eq!(\n\n output.to_string(),\n\n \"for<'T: { x: 'U, y: 'U }, 'U: Ops> ('T) -> 'U\"\n\n );\n\n\n\n let code = \"|pt| (pt.x, pt.y).fold(0, |acc, x| acc + x)\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let output = TypeEnvironment::new()\n\n .insert(\"fold\", Prelude::Fold)\n\n .process_statements(&block)\n\n .unwrap();\n", "file_path": "typing/tests/integration/object.rs", "rank": 55, "score": 209344.81787056834 }, { "content": "#[test]\n\nfn tuple_as_object() {\n\n let code = \"require_x = |obj| obj.x; require_x((1, 2))\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"(1, 2)\");\n\n assert_matches!(err.kind(), ErrorKind::CannotAccessFields);\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 56, "score": 209188.82663014036 }, { "content": "type TupleTailAst<'a> = (Spanned<'a, SliceAst<'a>>, Vec<SpannedTypeAst<'a>>);\n\n\n", "file_path": "typing/src/ast/mod.rs", "rank": 57, "score": 208530.43235703226 }, { "content": "#[derive(Debug)]\n\nstruct ParamPlacement<Prim: PrimitiveType> {\n\n // Grouped by function index.\n\n type_params: HashMap<usize, Vec<usize>>,\n\n // Grouped by function index.\n\n len_params: HashMap<usize, Vec<usize>>,\n\n function_count: usize,\n\n current_function_idx: usize,\n\n constraints: ParamConstraints<Prim>,\n\n}\n\n\n\nimpl<Prim: PrimitiveType> ParamPlacement<Prim> {\n\n fn new(\n\n type_params: HashMap<usize, Vec<usize>>,\n\n len_params: HashMap<usize, Vec<usize>>,\n\n constraints: ParamConstraints<Prim>,\n\n ) -> Self {\n\n Self {\n\n type_params,\n\n len_params,\n\n function_count: 0,\n", "file_path": "typing/src/types/quantifier.rs", "rank": 58, "score": 208136.45086781518 }, { "content": "#[test]\n\nfn incompatible_field_types_via_fn() {\n\n let code = r#\"\n\n require_x = |obj| obj.x == 1;\n\n |obj| { !obj.x; require_x(obj) }\n\n \"#;\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"obj\");\n\n assert_eq!(err.location(), [fn_arg(0), ErrorLocation::from(\"x\")]);\n\n assert_matches!(err.context(), ErrorContext::FnCall { .. });\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::TypeMismatch(lhs, rhs) if *lhs == Type::BOOL && *rhs == Type::NUM\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 59, "score": 207686.97574717036 }, { "content": "// Refactored from `num_traits::pow()`:\n\n// https://docs.rs/num-traits/0.2.14/src/num_traits/pow.rs.html#189-211\n\nfn wrapping_exp<T: Copy + One + WrappingMul>(mut base: T, mut exp: usize) -> T {\n\n if exp == 0 {\n\n return T::one();\n\n }\n\n\n\n while exp & 1 == 0 {\n\n base = base.wrapping_mul(&base);\n\n exp >>= 1;\n\n }\n\n if exp == 1 {\n\n return base;\n\n }\n\n\n\n let mut acc = base;\n\n while exp > 1 {\n\n exp >>= 1;\n\n base = base.wrapping_mul(&base);\n\n if exp & 1 == 1 {\n\n acc = acc.wrapping_mul(&base);\n\n }\n", "file_path": "eval/src/arith/generic.rs", "rank": 60, "score": 204976.012756667 }, { "content": "#[test]\n\nfn object_and_tuple_constraints() {\n\n let code = \"|obj| { obj.x; (x, ...) = obj; }\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"(x, ...)\");\n\n assert_matches!(err.kind(), ErrorKind::CannotAccessFields);\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 61, "score": 204868.15362393306 }, { "content": "fn repeat(function: Function<'_, f32>, times: f32) -> Result<Function<'_, f32>, String> {\n\n if times <= 0.0 {\n\n Err(\"`times` should be positive\".to_owned())\n\n } else {\n\n let function = Repeated {\n\n inner: function.strip_code(),\n\n times: times as usize,\n\n };\n\n Ok(Function::native(function))\n\n }\n\n}\n\n\n", "file_path": "eval/tests/hof.rs", "rank": 62, "score": 204430.36231967196 }, { "content": "#[test]\n\nfn dyn_type_with_object_bound() {\n\n let input = InputSpan::new(\"dyn { x: Num } + Lin\");\n\n let (rest, ty) = type_definition(input).unwrap();\n\n\n\n assert!(rest.fragment().is_empty());\n\n let object = match ty {\n\n TypeAst::Dyn(constraints) => {\n\n assert_eq!(constraints.terms.len(), 1);\n\n constraints.object.unwrap()\n\n }\n\n _ => panic!(\"Unexpected type: {:?}\", ty),\n\n };\n\n assert_eq!(object.fields.len(), 1);\n\n let (field_name, field_ty) = &object.fields[0];\n\n assert_eq!(*field_name.fragment(), \"x\");\n\n assert_matches!(field_ty.extra, TypeAst::Ident);\n\n}\n\n\n", "file_path": "typing/src/ast/tests.rs", "rank": 63, "score": 204243.95277027818 }, { "content": "#[test]\n\nfn incompatible_field_types() {\n\n let code = r#\"\n\n require_x = |obj| obj.x == 1;\n\n require_x(#{ x: (1, 2) })\n\n \"#;\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"#{ x: (1, 2) }\");\n\n assert_eq!(err.location(), [fn_arg(0), ErrorLocation::from(\"x\")]);\n\n assert_matches!(err.context(), ErrorContext::FnCall { .. });\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::TypeMismatch(lhs, rhs) if *lhs == Type::NUM && rhs.to_string() == \"(Num, Num)\"\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 64, "score": 204166.6033665031 }, { "content": "fn object(input: InputSpan<'_>) -> NomResult<'_, ObjectAst<'_>> {\n\n let colon = tuple((ws, tag_char(':'), ws));\n\n let object_field = separated_pair(ident, colon, with_span(type_definition));\n\n let object_body = terminated(separated_list1(comma_sep, object_field), opt(comma_sep));\n\n let object = preceded(\n\n terminated(tag_char('{'), ws),\n\n cut(terminated(object_body, tuple((ws, tag_char('}'))))),\n\n );\n\n map(object, |fields| ObjectAst { fields })(input)\n\n}\n\n\n", "file_path": "typing/src/ast/mod.rs", "rank": 65, "score": 203184.40773345414 }, { "content": "/// `zip` function signature.\n\nfn zip_fn_type() -> Function<Num> {\n\n Function::builder()\n\n .with_arg(Type::param(0).repeat(UnknownLen::param(0)))\n\n .with_arg(Type::param(1).repeat(UnknownLen::param(0)))\n\n .returning(Type::slice(\n\n (Type::param(0), Type::param(1)),\n\n UnknownLen::param(0),\n\n ))\n\n .with_static_lengths(&[0])\n\n .into()\n\n}\n\n\n", "file_path": "typing/tests/integration/main.rs", "rank": 66, "score": 201834.9165138062 }, { "content": "fn hash_fn_type() -> Function<Num> {\n\n Function::builder()\n\n .with_varargs(DynConstraints::just(Hashed), UnknownLen::param(0))\n\n .returning(Type::NUM)\n\n}\n\n\n", "file_path": "typing/tests/integration/main.rs", "rank": 67, "score": 201829.49204188547 }, { "content": "#[test]\n\nfn repeated_field_in_object_destructure() {\n\n let code = \"{ x, x } = #{ x: 1 };\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"x\");\n\n assert_eq!(err.main_span().location_offset(), 5);\n\n assert_matches!(err.kind(), ErrorKind::RepeatedField(field) if field == \"x\");\n\n}\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 68, "score": 200905.46491431317 }, { "content": "#[test]\n\nfn repeated_field_in_object_initialization() {\n\n let code = \"obj = #{ x: 1, x: 2 == 3 }; !obj.x\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"x\");\n\n assert_eq!(err.main_span().location_offset(), 15);\n\n assert_matches!(err.kind(), ErrorKind::RepeatedField(field) if field == \"x\");\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 69, "score": 200905.46491431317 }, { "content": "#[allow(unused_variables)]\n\npub trait Visit<Prim: PrimitiveType> {\n\n /// Visits a generic type.\n\n ///\n\n /// The default implementation calls one of more specific methods corresponding to the `ty`\n\n /// variant.\n\n fn visit_type(&mut self, ty: &Type<Prim>) {\n\n visit_type(self, ty);\n\n }\n\n\n\n /// Visits a type variable.\n\n ///\n\n /// The default implementation does nothing.\n\n fn visit_var(&mut self, var: TypeVar) {\n\n // Does nothing.\n\n }\n\n\n\n /// Visits a primitive type.\n\n ///\n\n /// The default implementation does nothing.\n\n fn visit_primitive(&mut self, primitive: &Prim) {\n", "file_path": "typing/src/visit.rs", "rank": 70, "score": 200827.8789378534 }, { "content": "// Helper trait to wrap type mapper and arithmetic.\n\ntrait FullArithmetic<Val, Prim: PrimitiveType>:\n\n MapPrimitiveType<Val, Prim = Prim> + TypeArithmetic<Prim>\n\n{\n\n}\n\n\n\nimpl<Val, Prim: PrimitiveType, T> FullArithmetic<Val, Prim> for T where\n\n T: MapPrimitiveType<Val, Prim = Prim> + TypeArithmetic<Prim>\n\n{\n\n}\n", "file_path": "typing/src/env/mod.rs", "rank": 71, "score": 200386.63647201128 }, { "content": "#[test]\n\nfn object_type_with_duplicate_fields() {\n\n let input = InputSpan::new(\"{ x: Num, x: (Num,) }\");\n\n let (_, ast) = TypeAst::parse(input).unwrap();\n\n let err = <Type>::try_from(&ast).unwrap_err().single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"x\");\n\n assert_eq!(err.main_span().location_offset(), 10);\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::AstConversion(AstConversionError::DuplicateField(field))\n\n if field == \"x\"\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/annotations.rs", "rank": 72, "score": 200038.45459847507 }, { "content": "#[test]\n\nfn no_required_field() {\n\n let code = r#\"\n\n require_x = |obj| obj.x == 1;\n\n require_x(#{ y: 2 });\n\n \"#;\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"#{ y: 2 }\");\n\n assert_eq!(err.location(), [fn_arg(0)]);\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::MissingFields { fields, available_fields }\n\n if fields.len() == 1 && fields.contains(\"x\") &&\n\n available_fields.len() == 1 && available_fields.contains(\"y\")\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 73, "score": 197748.09200137714 }, { "content": "#[derive(Debug)]\n\nstruct PolyTypeTransformer<'a, Prim: PrimitiveType> {\n\n mapping: ParamMapping,\n\n resolved_objects: HashMap<usize, Object<Prim>>,\n\n substitutions: &'a Substitutions<Prim>,\n\n}\n\n\n\nimpl<'a, Prim: PrimitiveType> PolyTypeTransformer<'a, Prim> {\n\n fn new(substitutions: &'a Substitutions<Prim>) -> Self {\n\n Self {\n\n mapping: ParamMapping::default(),\n\n resolved_objects: HashMap::new(),\n\n substitutions,\n\n }\n\n }\n\n\n\n fn object_constraint(&self, var_idx: usize) -> Option<&'a Object<Prim>> {\n\n let constraints = self.substitutions.constraints.get(&var_idx)?;\n\n constraints.object.as_ref()\n\n }\n\n}\n", "file_path": "typing/src/arith/substitutions/fns.rs", "rank": 74, "score": 197741.29264943965 }, { "content": "#[test]\n\nfn object_annotations_in_function() {\n\n let code = r#\"\n\n test = |obj: { x: _ }| obj.x == 1;\n\n test(#{ x: 1 });\n\n \"#;\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n type_env.process_statements(&block).unwrap();\n\n\n\n assert_eq!(type_env[\"test\"].to_string(), \"({ x: Num }) -> Bool\");\n\n\n\n let bogus_code = \"test(#{ x: 1, y: 2 })\";\n\n let bogus_block = F32Grammar::parse_statements(bogus_code).unwrap();\n\n let err = type_env\n\n .process_statements(&bogus_block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_matches!(err.kind(), ErrorKind::FieldsMismatch { .. });\n\n}\n\n\n", "file_path": "typing/tests/integration/annotations.rs", "rank": 75, "score": 197588.93763589923 }, { "content": "#[test]\n\nfn extra_fields_in_dyn_fn_arg() {\n\n let code = \"|objs: [dyn { x: _ }; _]| objs.map(|obj| obj.x + obj.y)\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n let err = type_env\n\n .insert(\"map\", Prelude::Map)\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"|obj| obj.x + obj.y\");\n\n assert_eq!(err.location(), [fn_arg(1), fn_arg(0)]);\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::MissingFields { fields, .. } if fields.contains(\"y\")\n\n );\n\n}\n", "file_path": "typing/tests/integration/errors/annotations.rs", "rank": 76, "score": 196723.4896454252 }, { "content": "#[test]\n\nfn comparisons_when_switched_off() {\n\n let code = \"(1, 2, 3).filter(|x| x > 1)\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n type_env.insert(\"filter\", Prelude::Filter);\n\n let err = type_env.process_statements(&block).unwrap_err().single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"x > 1\");\n\n assert!(err.location().is_empty());\n\n assert_matches!(err.context(), ErrorContext::BinaryOp(_));\n\n assert_matches!(err.kind(), ErrorKind::UnsupportedFeature(_));\n\n assert_eq!(\n\n err.kind().to_string(),\n\n \"Unsupported binary op: greater comparison\"\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/mod.rs", "rank": 77, "score": 196638.31758511218 }, { "content": "#[test]\n\nfn incompatible_field_types_via_accesses() {\n\n let code = \"|obj| obj.x == 1 && !obj.x\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"!obj.x\");\n\n assert_eq!(err.location(), []);\n\n assert_matches!(err.context(), ErrorContext::UnaryOp(_));\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::TypeMismatch(lhs, rhs) if *lhs == Type::BOOL && *rhs == Type::NUM\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 78, "score": 196107.28180177708 }, { "content": "#[test]\n\nfn incorrect_tuple_length_returned_from_fn() {\n\n let code = \"double = |x| (x, x); (z,) = double(5);\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n let err = type_env.process_statements(&block).unwrap_err().single();\n\n\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::TupleLenMismatch {\n\n lhs,\n\n rhs,\n\n context: TupleContext::Generic,\n\n } if *lhs == TupleLen::from(1) && *rhs == TupleLen::from(2)\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/mod.rs", "rank": 79, "score": 195144.29936530415 }, { "content": "/// Trait allowing to mock out type annotation support together with [`WithMockedTypes`].\n\n/// It specifies recognized type annotations; if any other annotation is used, an error\n\n/// will be raised.\n\n///\n\n/// When used as a [`Parse`]r, all [`Features`] are on.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use arithmetic_parser::grammars::{F64Grammar, MockTypes, WithMockedTypes};\n\n/// struct MockedTypesList;\n\n///\n\n/// impl MockTypes for MockedTypesList {\n\n/// const MOCKED_TYPES: &'static [&'static str] = &[\"Num\"];\n\n/// }\n\n///\n\n/// // Grammar that recognizes `Num` type annotation.\n\n/// type Grammar = WithMockedTypes<F64Grammar, MockedTypesList>;\n\n/// ```\n\npub trait MockTypes: 'static {\n\n /// List of mocked type annotations.\n\n const MOCKED_TYPES: &'static [&'static str];\n\n}\n\n\n\n/// Decorator for a grammar that mocks type parsing.\n\n///\n\n/// # Examples\n\n///\n\n/// See [`MockTypes`] for examples of usage.\n\n#[derive(Debug)]\n\npub struct WithMockedTypes<T, Ty>(PhantomData<(T, Ty)>);\n\n\n\nimpl<T: ParseLiteral, Ty: MockTypes> ParseLiteral for WithMockedTypes<T, Ty> {\n\n type Lit = T::Lit;\n\n\n\n fn parse_literal(input: InputSpan<'_>) -> NomResult<'_, Self::Lit> {\n\n T::parse_literal(input)\n\n }\n\n}\n", "file_path": "parser/src/grammars/traits.rs", "rank": 80, "score": 195005.62366408756 }, { "content": "fn comma_separated_types(input: InputSpan<'_>) -> NomResult<'_, Vec<SpannedTypeAst<'_>>> {\n\n separated_list0(delimited(ws, tag_char(','), ws), with_span(type_definition))(input)\n\n}\n\n\n", "file_path": "typing/src/ast/mod.rs", "rank": 81, "score": 193730.07686822896 }, { "content": "#[test]\n\nfn unifying_tuples_with_dyn_lengths() {\n\n let code = r#\"\n\n xs: (_, ...[_], _) = (true, 1, 2, 3, 4);\n\n (_, _, ...ys) = xs; // should work\n\n zs: (...[_; _], _, Num) = xs; // should not work (Bool and Num cannot be unified)\n\n \"#;\n\n\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n type_env.insert(\"true\", Type::BOOL);\n\n let err = type_env.process_statements(&block).unwrap_err().single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"(...[_; _], _, Num)\");\n\n assert_eq!(err.location(), [ErrorLocation::TupleElement(None)]);\n\n assert_matches!(\n\n err.context(),\n\n ErrorContext::Assignment { lhs, rhs }\n\n if lhs.to_string() == \"(...[Num], Num, Num)\" && *rhs == type_env[\"xs\"]\n\n );\n\n assert_incompatible_types(err.kind(), &Type::BOOL, &Type::NUM);\n\n\n\n assert_eq!(type_env[\"xs\"].to_string(), \"(Bool, ...[Num], Num)\");\n\n assert_eq!(type_env[\"ys\"].to_string(), \"[Num]\");\n\n}\n\n\n", "file_path": "typing/tests/integration/annotations.rs", "rank": 82, "score": 193396.66129323764 }, { "content": "#[test]\n\nfn constraint_with_dyn_object_and_cast() {\n\n let code = \"hash(#{ x: 1 } as dyn { x: Num } + Hash)\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let output = TypeEnvironment::new()\n\n .insert_object_safe_constraint(Hashed)\n\n .insert(\"hash\", hash_fn_type())\n\n .process_statements(&block)\n\n .unwrap();\n\n\n\n assert_eq!(output, Type::NUM);\n\n}\n\n\n", "file_path": "typing/tests/integration/annotations.rs", "rank": 83, "score": 193281.65796630198 }, { "content": "#[test]\n\nfn incompatible_fields_via_constraints_for_object_constraint() {\n\n let code = \"|obj| { hash(obj); (obj.run)() }\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .insert(\"hash\", hash_fn_type())\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"(obj.run)()\");\n\n assert_eq!(err.location(), []);\n\n assert_matches!(err.context(), ErrorContext::FnCall { .. });\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::FailedConstraint { ty, constraint }\n\n if ty.to_string() == \"() -> _\" && constraint.to_string() == \"Hash\"\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 84, "score": 193247.95001582414 }, { "content": "#[test]\n\nfn incompatible_fields_via_constraints_for_concrete_object() {\n\n let code = \"hash(#{ x: 1, y: || 2 })\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .insert(\"hash\", hash_fn_type())\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"#{ x: 1, y: || 2 }\");\n\n assert_eq!(err.location(), [fn_arg(0), ErrorLocation::from(\"y\")]);\n\n assert_matches!(err.context(), ErrorContext::FnCall { .. });\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::FailedConstraint { ty, constraint }\n\n if ty.to_string() == \"() -> Num\" && constraint.to_string() == \"Hash\"\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 85, "score": 193247.95001582414 }, { "content": "#[test]\n\nfn extra_fields_are_retained_with_constraints() {\n\n let code = r#\"\n\n test = |obj| { obj.x == 1; obj };\n\n test(#{ x: 1, y: 2 }).y == 2;\n\n \"#;\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n TypeEnvironment::new().process_statements(&block).unwrap();\n\n}\n\n\n", "file_path": "typing/tests/integration/object.rs", "rank": 86, "score": 193204.30856252692 }, { "content": "#[test]\n\nfn function_as_arg_within_tuple() {\n\n let code = r#\"\n\n test_fn = |struct, y| {\n\n (fn, x) = struct;\n\n fn(x / 3) * y\n\n };\n\n \"#;\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n type_env.process_statements(&block).unwrap();\n\n\n\n assert_eq!(\n\n type_env.get(\"test_fn\").unwrap().to_string(),\n\n \"for<'T: Ops> (((Num) -> 'T, Num), 'T) -> 'T\"\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/basics.rs", "rank": 87, "score": 193164.4692385585 }, { "content": "fn assert_incompatible_types<Prim: PrimitiveType>(\n\n err: &ErrorKind<Prim>,\n\n first: &Type<Prim>,\n\n second: &Type<Prim>,\n\n) {\n\n let (x, y) = match err {\n\n ErrorKind::TypeMismatch(x, y) => (x, y),\n\n _ => panic!(\"Unexpected error type: {:?}\", err),\n\n };\n\n assert!(\n\n (x == first && y == second) || (x == second && y == first),\n\n \"Unexpected incompatible types: {:?}, expected: {:?}\",\n\n (x, y),\n\n (first, second)\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/main.rs", "rank": 88, "score": 193026.40040458276 }, { "content": "fn prepare_imprecise_env() -> TypeEnvironment {\n\n let rand_scalar = Function::builder().returning(Type::NUM);\n\n let hash_to_scalar = Function::builder()\n\n .with_varargs(DynConstraints::just(Hashed), UnknownLen::param(0))\n\n .returning(Type::NUM);\n\n let to_scalar = Function::builder().with_arg(Type::NUM).returning(Type::NUM);\n\n\n\n let mut env: TypeEnvironment = Prelude::iter().chain(Assertions::iter()).collect();\n\n env.insert(\"dbg\", dbg_fn())\n\n .insert(\"GEN\", Type::NUM)\n\n .insert(\"ORDER\", Type::NUM)\n\n .insert(\"rand_scalar\", rand_scalar)\n\n .insert(\"hash_to_scalar\", hash_to_scalar)\n\n .insert(\"to_scalar\", to_scalar);\n\n env\n\n}\n\n\n", "file_path": "typing/tests/integration/examples/mod.rs", "rank": 89, "score": 192540.4547459399 }, { "content": "/// Function arguments in the call position; e.g., `(a, B + 1)`.\n\n///\n\n/// # Return value\n\n///\n\n/// The second component of the returned tuple is set to `true` if the list is `,`-terminated.\n\nfn fn_args<'a, T, Ty>(input: InputSpan<'a>) -> NomResult<'a, (Vec<SpannedExpr<'a, T::Base>>, bool)>\n\nwhere\n\n T: Parse<'a>,\n\n Ty: GrammarType,\n\n{\n\n let maybe_comma = map(opt(preceded(ws::<Ty>, tag_char(','))), |c| c.is_some());\n\n\n\n preceded(\n\n terminated(tag_char('('), ws::<Ty>),\n\n // Once we've encountered the opening `(`, the input *must* correspond to the parser.\n\n cut(tuple((\n\n separated_list0(delimited(ws::<Ty>, tag_char(','), ws::<Ty>), expr::<T, Ty>),\n\n terminated(maybe_comma, tuple((ws::<Ty>, tag_char(')')))),\n\n ))),\n\n )(input)\n\n}\n\n\n", "file_path": "parser/src/parser/mod.rs", "rank": 90, "score": 191255.69632692973 }, { "content": "#[test]\n\nfn function_passed_as_arg_invalid_arg_type() {\n\n let code = r#\"\n\n mapper = |(x, y), map| (map(x), map(y));\n\n mapper((1, 2), |(x, _)| x);\n\n \"#;\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n let err = type_env.process_statements(&block).unwrap_err().single();\n\n\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::TypeMismatch(Type::Tuple(t), rhs)\n\n if t.len() == TupleLen::from(2) && *rhs == Type::NUM\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/mod.rs", "rank": 91, "score": 191134.79538149427 }, { "content": "fn unify<Prim: PrimitiveType>(\n\n substitutions: &mut Substitutions<Prim>,\n\n lhs: &Type<Prim>,\n\n rhs: &Type<Prim>,\n\n) -> Result<(), ErrorKind<Prim>> {\n\n extract_errors(|errors| substitutions.unify(lhs, rhs, errors))\n\n}\n\n\n", "file_path": "typing/src/arith/substitutions/tests.rs", "rank": 92, "score": 190438.23843519547 }, { "content": "fn type_params(input: InputSpan<'_>) -> NomResult<'_, Vec<(Spanned<'_>, TypeConstraintsAst<'_>)>> {\n\n let type_bounds = preceded(tuple((ws, tag_char(':'), ws)), type_bounds);\n\n let type_param = tuple((type_param_ident, type_bounds));\n\n separated_list1(comma_sep, type_param)(input)\n\n}\n\n\n", "file_path": "typing/src/ast/mod.rs", "rank": 93, "score": 189771.21544271486 }, { "content": "#[test]\n\nfn incompatible_fields_via_constraints_for_object_constraint_rev() {\n\n let code = \"|obj| { (obj.run)(); hash(obj) }\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .insert(\"hash\", hash_fn_type())\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"obj\");\n\n assert_eq!(err.location(), [fn_arg(0), ErrorLocation::from(\"run\")]);\n\n assert_matches!(err.context(), ErrorContext::FnCall { .. });\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::FailedConstraint { ty, .. } if ty.to_string() == \"() -> _\"\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 94, "score": 189678.4489294637 }, { "content": "#[test]\n\nfn unifying_dyn_object_as_lhs() {\n\n let constraints = DynConstraints::from(Object::just(\"x\", Type::NUM));\n\n let lhs = Type::Dyn(constraints.clone());\n\n\n\n {\n\n let mut substitutions = Substitutions::<Num>::default();\n\n unify(&mut substitutions, &lhs, &Type::Any).unwrap();\n\n assert!(substitutions.eqs.is_empty());\n\n assert!(substitutions.constraints.is_empty());\n\n }\n\n {\n\n let mut substitutions = Substitutions::<Num>::default();\n\n unify(&mut substitutions, &lhs, &Type::free_var(0)).unwrap();\n\n assert!(substitutions.eqs.is_empty());\n\n assert_eq!(substitutions.constraints.len(), 1);\n\n assert_eq!(substitutions.constraints[&0], constraints.inner);\n\n }\n\n\n\n // Object RHS.\n\n {\n", "file_path": "typing/src/arith/substitutions/tests.rs", "rank": 95, "score": 188975.53536805997 }, { "content": "#[test]\n\nfn dyn_constraint_non_object() {\n\n let code = \"#{ x: 1 } as dyn Lin as dyn { x: Num } + Lin\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n let err = type_env.process_statements(&block).unwrap_err().single();\n\n\n\n assert_matches!(err.kind(), ErrorKind::CannotAccessFields);\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/annotations.rs", "rank": 96, "score": 188975.53536805997 }, { "content": "#[test]\n\nfn contradicting_constraint_with_dyn_object() {\n\n let code = \"hash(#{ x: 1 } as dyn { x: Num })\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n let err = type_env\n\n .insert(\"hash\", hash_fn_type())\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_eq!(*err.main_span().fragment(), \"#{ x: 1 }\");\n\n assert_eq!(err.location(), [fn_arg(0)]);\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::FailedConstraint { ty: Type::Dyn(_), constraint }\n\n if constraint.to_string() == \"Hash\"\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/annotations.rs", "rank": 97, "score": 188975.53536805997 }, { "content": "#[test]\n\nfn missing_field_after_object_annotation() {\n\n let code = \"|obj: { x: _ }| obj.x == obj.y\";\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let mut type_env = TypeEnvironment::new();\n\n let err = type_env.process_statements(&block).unwrap_err().single();\n\n\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::MissingFields { fields, .. } if fields.len() == 1 && fields.contains(\"y\")\n\n );\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/annotations.rs", "rank": 98, "score": 188900.22619520882 }, { "content": "#[test]\n\nfn incompatible_fields_in_embedded_obj() {\n\n let code_samples = &[\n\n \"|obj| { hash(obj); (obj.some.run)() }\",\n\n \"|obj| { hash(obj); some = obj.some; (some.run)() }\",\n\n \"|obj| { hash(obj); run = obj.some.run; run() }\",\n\n \"|obj| { (obj.some.run)(); hash(obj); }\",\n\n \"|obj| { some = obj.some; (some.run)(); hash(obj); }\",\n\n \"|obj| { run = obj.some.run; run(); hash(obj); }\",\n\n ];\n\n\n\n for &code in code_samples {\n\n let block = F32Grammar::parse_statements(code).unwrap();\n\n let err = TypeEnvironment::new()\n\n .insert(\"hash\", hash_fn_type())\n\n .process_statements(&block)\n\n .unwrap_err()\n\n .single();\n\n\n\n assert_matches!(\n\n err.kind(),\n\n ErrorKind::FailedConstraint { ty, .. } if ty.to_string() == \"() -> _\"\n\n );\n\n }\n\n}\n\n\n", "file_path": "typing/tests/integration/errors/object.rs", "rank": 99, "score": 188900.22619520882 } ]
Rust
starlark/src/syntax/validate.rs
creativemindplus/starlark-rust
4fc26687df9a975eeb26f6b3d6f995d188fd00ba
/* * Copyright 2018 The Starlark in Rust Authors. * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use std::collections::HashSet; use gazebo::prelude::*; use thiserror::Error; use crate::{ codemap::{CodeMap, Spanned}, errors::Diagnostic, syntax::{ ast::{ Argument, Assign, AssignIdentP, AssignOp, AstArgument, AstAssign, AstAssignIdent, AstExpr, AstParameter, AstStmt, AstString, Expr, Parameter, Stmt, }, Dialect, }, }; #[derive(Error, Debug)] enum ValidateError { #[error("`break` cannot be used outside of a `for` loop")] BreakOutsideLoop, #[error("`continue` cannot be used outside of a `for` loop")] ContinueOutsideLoop, #[error("`return` cannot be used outside of a `def` function")] ReturnOutsideDef, #[error("`load` must only occur at the top of a module")] LoadNotTop, #[error("`if` cannot be used outside `def` in this dialect")] NoTopLevelIf, #[error("`for` cannot be used outside `def` in this dialect")] NoTopLevelFor, #[error("left-hand-side of assignment must take the form `a`, `a.b` or `a[b]`")] InvalidLhs, #[error("left-hand-side of modifying assignment cannot be a list or tuple")] InvalidModifyLhs, } #[derive(Eq, PartialEq, Ord, PartialOrd)] enum ArgsStage { Positional, Named, Args, Kwargs, } #[derive(Error, Debug)] enum ArgumentDefinitionOrderError { #[error("positional argument after non positional")] PositionalThenNonPositional, #[error("named argument after *args or **kwargs")] NamedArgumentAfterStars, #[error("repeated named argument")] RepeatedNamed, #[error("Args array after another args or kwargs")] ArgsArrayAfterArgsOrKwargs, #[error("Multiple kwargs dictionary in arguments")] MultipleKwargs, } impl Expr { pub fn check_call( f: AstExpr, args: Vec<AstArgument>, codemap: &CodeMap, ) -> anyhow::Result<Expr> { let err = |span, msg| Err(Diagnostic::new(msg, span, codemap.dupe())); let mut stage = ArgsStage::Positional; let mut named_args = HashSet::new(); for arg in &args { match &arg.node { Argument::Positional(_) => { if stage != ArgsStage::Positional { return err( arg.span, ArgumentDefinitionOrderError::PositionalThenNonPositional, ); } } Argument::Named(n, _) => { if stage > ArgsStage::Named { return err( arg.span, ArgumentDefinitionOrderError::NamedArgumentAfterStars, ); } else if !named_args.insert(&n.node) { return err(n.span, ArgumentDefinitionOrderError::RepeatedNamed); } else { stage = ArgsStage::Named; } } Argument::Args(_) => { if stage > ArgsStage::Named { return err( arg.span, ArgumentDefinitionOrderError::ArgsArrayAfterArgsOrKwargs, ); } else { stage = ArgsStage::Args; } } Argument::KwArgs(_) => { if stage == ArgsStage::Kwargs { return err(arg.span, ArgumentDefinitionOrderError::MultipleKwargs); } else { stage = ArgsStage::Kwargs; } } } } Ok(Expr::Call(box f, args)) } } fn test_param_name<'a, T>( argset: &mut HashSet<&'a str>, n: &'a AstAssignIdent, arg: &Spanned<T>, codemap: &CodeMap, ) -> anyhow::Result<()> { if argset.contains(n.node.0.as_str()) { return Err(Diagnostic::new( ArgumentUseOrderError::DuplicateParameterName, arg.span, codemap.dupe(), )); } argset.insert(&n.node.0); Ok(()) } #[derive(Error, Debug)] enum ArgumentUseOrderError { #[error("duplicated parameter name")] DuplicateParameterName, #[error("positional parameter after non positional")] PositionalThenNonPositional, #[error("Default parameter after args array or kwargs dictionary")] DefaultParameterAfterStars, #[error("Args parameter after another args or kwargs parameter")] ArgsParameterAfterStars, #[error("Multiple kwargs dictionary in parameters")] MultipleKwargs, } fn check_parameters(parameters: &[AstParameter], codemap: &CodeMap) -> anyhow::Result<()> { let err = |span, msg| Err(Diagnostic::new(msg, span, codemap.dupe())); let mut argset = HashSet::new(); let mut seen_args = false; let mut seen_kwargs = false; let mut seen_optional = false; for arg in parameters.iter() { match &arg.node { Parameter::Normal(n, ..) => { if seen_kwargs || seen_optional { return err(arg.span, ArgumentUseOrderError::PositionalThenNonPositional); } test_param_name(&mut argset, n, arg, codemap)?; } Parameter::WithDefaultValue(n, ..) => { if seen_kwargs { return err(arg.span, ArgumentUseOrderError::DefaultParameterAfterStars); } seen_optional = true; test_param_name(&mut argset, n, arg, codemap)?; } Parameter::NoArgs => { if seen_args || seen_kwargs { return err(arg.span, ArgumentUseOrderError::ArgsParameterAfterStars); } seen_args = true; } Parameter::Args(n, ..) => { if seen_args || seen_kwargs { return err(arg.span, ArgumentUseOrderError::ArgsParameterAfterStars); } seen_args = true; test_param_name(&mut argset, n, arg, codemap)?; } Parameter::KwArgs(n, ..) => { if seen_kwargs { return err(arg.span, ArgumentUseOrderError::MultipleKwargs); } seen_kwargs = true; test_param_name(&mut argset, n, arg, codemap)?; } } } Ok(()) } impl Expr { pub fn check_lambda( parameters: Vec<AstParameter>, body: AstExpr, codemap: &CodeMap, ) -> anyhow::Result<Expr> { check_parameters(&parameters, codemap)?; Ok(Expr::Lambda(parameters, box body, ())) } } impl Stmt { pub fn check_def( name: AstString, parameters: Vec<AstParameter>, return_type: Option<Box<AstExpr>>, stmts: AstStmt, codemap: &CodeMap, ) -> anyhow::Result<Stmt> { check_parameters(&parameters, codemap)?; let name = name.into_map(|s| AssignIdentP(s, ())); Ok(Stmt::Def(name, parameters, return_type, box stmts, ())) } pub fn check_assign(codemap: &CodeMap, x: AstExpr) -> anyhow::Result<AstAssign> { Ok(Spanned { span: x.span, node: match x.node { Expr::Tuple(xs) | Expr::List(xs) => { Assign::Tuple(xs.into_try_map(|x| Self::check_assign(codemap, x))?) } Expr::Dot(a, b) => Assign::Dot(a, b), Expr::ArrayIndirection(box (a, b)) => Assign::ArrayIndirection(box (a, b)), Expr::Identifier(x, ()) => Assign::Identifier(x.into_map(|s| AssignIdentP(s, ()))), _ => { return Err(Diagnostic::new( ValidateError::InvalidLhs, x.span, codemap.dupe(), )); } }, }) } pub fn check_assignment( codemap: &CodeMap, lhs: AstExpr, op: Option<AssignOp>, rhs: AstExpr, ) -> anyhow::Result<Stmt> { if op.is_some() { match &lhs.node { Expr::Tuple(_) | Expr::List(_) => { return Err(Diagnostic::new( ValidateError::InvalidModifyLhs, lhs.span, codemap.dupe(), )); } _ => {} } } let lhs = Self::check_assign(codemap, lhs)?; Ok(match op { None => Stmt::Assign(lhs, box rhs), Some(op) => Stmt::AssignModify(lhs, op, box rhs), }) } pub fn validate(codemap: &CodeMap, stmt: &AstStmt, dialect: &Dialect) -> anyhow::Result<()> { fn f( codemap: &CodeMap, dialect: &Dialect, stmt: &AstStmt, top_level: bool, inside_for: bool, inside_def: bool, ) -> anyhow::Result<()> { let err = |x| Err(Diagnostic::new(x, stmt.span, codemap.dupe())); match &stmt.node { Stmt::Def(_, _, _, body, _payload) => f(codemap, dialect, body, false, false, true), Stmt::For(_, box (_, body)) => { if top_level && !dialect.enable_top_level_stmt { err(ValidateError::NoTopLevelFor) } else { f(codemap, dialect, body, false, true, inside_def) } } Stmt::If(..) | Stmt::IfElse(..) => { if top_level && !dialect.enable_top_level_stmt { err(ValidateError::NoTopLevelIf) } else { stmt.node.visit_stmt_result(|x| { f(codemap, dialect, x, false, inside_for, inside_def) }) } } Stmt::Break if !inside_for => err(ValidateError::BreakOutsideLoop), Stmt::Continue if !inside_for => err(ValidateError::ContinueOutsideLoop), Stmt::Return(_) if !inside_def => err(ValidateError::ReturnOutsideDef), Stmt::Load(..) if !top_level => err(ValidateError::LoadNotTop), _ => stmt.node.visit_stmt_result(|x| { f(codemap, dialect, x, top_level, inside_for, inside_def) }), } } f(codemap, dialect, stmt, true, false, false) } }
/* * Copyright 2018 The Starlark in Rust Authors. * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use std::collections::HashSet; use gazebo::prelude::*; use thiserror::Error; use crate::{ codemap::{CodeMap, Spanned}, errors::Diagnostic, syntax::{ ast::{ Argument, Assign, AssignIdentP, AssignOp, AstArgument, AstAssign, AstAssignIdent, AstExpr, AstParameter, AstStmt, AstString, Expr, Parameter, Stmt, }, Dialect, }, }; #[derive(Error, Debug)] enum ValidateError { #[error("`break` cannot be used outside of a `for` loop")] BreakOutsideLoop, #[error("`continue` cannot be used outside of a `for` loop")] ContinueOutsideLoop, #[error("`return` cannot be used outside of a `def` function")] ReturnOutsideDef, #[error("`load` must only occur at the top of a module")] LoadNotTop, #[error("`if` cannot be used outside `def` in this dialect")] NoTopLevelIf, #[error("`for` cannot be used outside `def` in this dialect")] NoTopLevelFor, #[error("left-hand-side of assignment must take the form `a`, `a.b` or `a[b]`")] InvalidLhs, #[error("left-hand-side of modifying assignment cannot be a list or tuple")] InvalidModifyLhs, } #[derive(Eq, PartialEq, Ord, PartialOrd)] enum ArgsStage { Positional, Named, Args, Kwargs, } #[derive(Error, Debug)] enum ArgumentDefinitionOrderError { #[error("positional argument after non positional")] PositionalThenNonPositional, #[error("named argument after *args or **kwargs")] NamedArgumentAfterStars, #[error("repeated named argument")] RepeatedNamed, #[error("Args array after another args or kwargs")] ArgsArrayAfterArgsOrKwargs, #[error("Multiple kwargs dictionary in arguments")] MultipleKwargs, } impl Expr { pub fn check_call( f: AstExpr, args: Vec<AstArgument>, codemap: &CodeMap, ) -> anyhow::Result<Expr> { let err = |span, msg| Err(Diagnostic::new(msg, span, codemap.dupe())); let mut stage = ArgsStage::Positional; let mut named_args = HashSet::new(); for arg in &args { match &arg.node { Argument::Positional(_) => { if stage != ArgsStage::Positional { return err( arg.span, ArgumentDefinitionOrderError::PositionalThenNonPositional, ); } } Argument::Named(n, _) => { if stage > ArgsStage::Named { return err( arg.span, ArgumentDefinitionOrderError::NamedArgumentAfterStars, ); } else if !named_args.insert(&n.node) { return err(n.span, ArgumentDefinitionOrderError::RepeatedNamed); } else { stage = ArgsStage::Named; } } Argument::Args(_) => { if stage > ArgsStage::Named { return err( arg.span, ArgumentDefinitionOrderError::ArgsArrayAfterArgsOrKwargs, ); } else { stage = ArgsStage::Args; } } Argument::KwArgs(_) => { if stage == ArgsStage::Kwargs { return err(arg.span, ArgumentDefinitionOrderError::MultipleKwargs); } else { stage = ArgsStage::Kwargs; } } } } Ok(Expr::Call(box f, args)) } } fn test_param_name<'a, T>( argset: &mut HashSet<&'a str>, n: &'a AstAssignIdent, arg: &Spanned<T>, codemap: &CodeMap, ) -> anyhow::Result<()> { if argset.contains(n.node.0.as_str()) { return Err(Diagnostic::new( ArgumentUseOrderError::DuplicateParameterName, arg.span, codemap.dupe(), )); } argset.insert(&n.node.0); Ok(()) } #[derive(Error, Debug)] enum ArgumentUseOrderError { #[error("duplicated parameter name")] DuplicateParameterName, #[error("positional parameter after non positional")] PositionalThenNonPositional, #[error("Default parameter after args array or kwargs dictionary")] DefaultParameterAfterStars, #[error("Args parameter after another args or kwargs parameter")] ArgsParameterAfterStars, #[error("Multiple kwargs dictionary in parameters")] MultipleKwargs, } fn check_parameters(parameters: &[AstParameter], codemap: &CodeMap) -> anyhow::Result<()> { let err = |span, msg| Err(Diagnostic::new(msg, span, codemap.dupe())); let mut argset = HashSet::new(); let mut seen_args = false; let mut seen_kwargs = false; let mut seen_optional = false; for arg in parameters.iter() { match &arg.node { Parameter::Normal(n, ..) => { if seen_kwargs || seen_optional { return err(arg.span, ArgumentUseOrderError::PositionalThenNonPositional); } test_param_name(&mut argset, n, arg, codemap)?; } Parameter::WithDefaultValue(n, ..) => { if seen_kwargs { return err(arg.span, ArgumentUseOrderError::DefaultParameterAfterStars); } seen_optional = true; test_param_name(&mut argset, n, arg, codemap)?; } Parameter::NoArgs => { if seen_args || seen_kwargs { return err(arg.span, ArgumentUseOrderError::ArgsParameterAfterStars); } seen_args = true; } Parameter::Args(n, ..) => { if seen_args || seen_kwargs { return err(arg.span, ArgumentUseOrderError::ArgsParameterAfterStars); } seen_args = true; test_param_name(&mut argset, n, arg, codemap)?; } Parameter::KwArgs(n, ..) => { if seen_kwargs { return err(arg.span, ArgumentUseOrderError::MultipleKwargs); } seen_kwargs = true; test_param_name(&mut argset, n, arg, codemap)?; } } } Ok(()) } impl Expr { pub fn check_lambda( parameters: Vec<AstParameter>, body: AstExpr, codemap: &CodeMap, ) -> anyhow::Result<Expr> { check_parameters(&parameters, codemap)?; Ok(Expr::Lambda(parameters, box body, ())) } } impl Stmt {
pub fn check_assign(codemap: &CodeMap, x: AstExpr) -> anyhow::Result<AstAssign> { Ok(Spanned { span: x.span, node: match x.node { Expr::Tuple(xs) | Expr::List(xs) => { Assign::Tuple(xs.into_try_map(|x| Self::check_assign(codemap, x))?) } Expr::Dot(a, b) => Assign::Dot(a, b), Expr::ArrayIndirection(box (a, b)) => Assign::ArrayIndirection(box (a, b)), Expr::Identifier(x, ()) => Assign::Identifier(x.into_map(|s| AssignIdentP(s, ()))), _ => { return Err(Diagnostic::new( ValidateError::InvalidLhs, x.span, codemap.dupe(), )); } }, }) } pub fn check_assignment( codemap: &CodeMap, lhs: AstExpr, op: Option<AssignOp>, rhs: AstExpr, ) -> anyhow::Result<Stmt> { if op.is_some() { match &lhs.node { Expr::Tuple(_) | Expr::List(_) => { return Err(Diagnostic::new( ValidateError::InvalidModifyLhs, lhs.span, codemap.dupe(), )); } _ => {} } } let lhs = Self::check_assign(codemap, lhs)?; Ok(match op { None => Stmt::Assign(lhs, box rhs), Some(op) => Stmt::AssignModify(lhs, op, box rhs), }) } pub fn validate(codemap: &CodeMap, stmt: &AstStmt, dialect: &Dialect) -> anyhow::Result<()> { fn f( codemap: &CodeMap, dialect: &Dialect, stmt: &AstStmt, top_level: bool, inside_for: bool, inside_def: bool, ) -> anyhow::Result<()> { let err = |x| Err(Diagnostic::new(x, stmt.span, codemap.dupe())); match &stmt.node { Stmt::Def(_, _, _, body, _payload) => f(codemap, dialect, body, false, false, true), Stmt::For(_, box (_, body)) => { if top_level && !dialect.enable_top_level_stmt { err(ValidateError::NoTopLevelFor) } else { f(codemap, dialect, body, false, true, inside_def) } } Stmt::If(..) | Stmt::IfElse(..) => { if top_level && !dialect.enable_top_level_stmt { err(ValidateError::NoTopLevelIf) } else { stmt.node.visit_stmt_result(|x| { f(codemap, dialect, x, false, inside_for, inside_def) }) } } Stmt::Break if !inside_for => err(ValidateError::BreakOutsideLoop), Stmt::Continue if !inside_for => err(ValidateError::ContinueOutsideLoop), Stmt::Return(_) if !inside_def => err(ValidateError::ReturnOutsideDef), Stmt::Load(..) if !top_level => err(ValidateError::LoadNotTop), _ => stmt.node.visit_stmt_result(|x| { f(codemap, dialect, x, top_level, inside_for, inside_def) }), } } f(codemap, dialect, stmt, true, false, false) } }
pub fn check_def( name: AstString, parameters: Vec<AstParameter>, return_type: Option<Box<AstExpr>>, stmts: AstStmt, codemap: &CodeMap, ) -> anyhow::Result<Stmt> { check_parameters(&parameters, codemap)?; let name = name.into_map(|s| AssignIdentP(s, ())); Ok(Stmt::Def(name, parameters, return_type, box stmts, ())) }
function_block-full_function
[ { "content": "fn require_return_expression(ret_type: &Option<Box<AstExpr>>) -> Option<Span> {\n\n match ret_type {\n\n None => None,\n\n Some(x) => match &***x {\n\n Expr::Identifier(x, _) if x.node == \"None\" => None,\n\n _ => Some(x.span),\n\n },\n\n }\n\n}\n\n\n", "file_path": "starlark/src/analysis/flow.rs", "rank": 0, "score": 396529.18977679894 }, { "content": "fn err<T>(codemap: &CodeMap, span: Span, err: DialectError) -> anyhow::Result<T> {\n\n Err(Diagnostic::new(err, span, codemap.dupe()))\n\n}\n\n\n\nimpl Dialect {\n\n pub(crate) fn check_lambda<T>(\n\n &self,\n\n codemap: &CodeMap,\n\n x: Spanned<T>,\n\n ) -> anyhow::Result<Spanned<T>> {\n\n if self.enable_lambda {\n\n Ok(x)\n\n } else {\n\n err(codemap, x.span, DialectError::Lambda)\n\n }\n\n }\n\n\n\n pub(crate) fn check_def<T>(\n\n &self,\n\n codemap: &CodeMap,\n", "file_path": "starlark/src/syntax/dialect.rs", "rank": 1, "score": 365894.03537722933 }, { "content": "fn returns(x: &AstStmt) -> Vec<(Span, Option<&AstExpr>)> {\n\n fn f<'a>(x: &'a AstStmt, res: &mut Vec<(Span, Option<&'a AstExpr>)>) {\n\n match &**x {\n\n Stmt::Return(ret) => res.push((x.span, ret.as_ref())),\n\n Stmt::Def(..) => {} // Do not descend\n\n _ => x.visit_stmt(|x| f(x, res)),\n\n }\n\n }\n\n\n\n let mut res = Vec::new();\n\n f(x, &mut res);\n\n res\n\n}\n\n\n", "file_path": "starlark/src/analysis/flow.rs", "rank": 2, "score": 352777.9545277505 }, { "content": "fn go(x: &AstStmt, res: &mut Vec<Span>) {\n\n match &**x {\n\n Stmt::Statements(_) => {} // These are not interesting statements that come up\n\n _ => res.push(x.span),\n\n }\n\n x.visit_stmt(|x| go(x, res))\n\n}\n\n\n\nimpl AstModule {\n\n /// Locations where statements occur, likely to be passed as the positions\n\n /// to [`before_stmt`](crate::eval::Evaluator::before_stmt).\n\n pub fn stmt_locations(&self) -> Vec<Span> {\n\n let mut res = Vec::new();\n\n self.statement.visit_stmt(|x| go(x, &mut res));\n\n res\n\n }\n\n}\n", "file_path": "starlark/src/debug/breakpoint.rs", "rank": 3, "score": 345442.5877287179 }, { "content": "fn match_dict_copy(codemap: &CodeMap, x: &AstExpr, res: &mut Vec<LintT<Performance>>) {\n\n // If we see `dict(**x)` suggest `dict(x)`\n\n match &**x {\n\n Expr::Call(fun, args) if args.len() == 1 => match (&***fun, &*args[0]) {\n\n (Expr::Identifier(f, _), Argument::KwArgs(arg)) if f.node == \"dict\" => {\n\n res.push(LintT::new(\n\n codemap,\n\n x.span,\n\n Performance::DictWithoutStarStar(x.to_string(), format!(\"dict({})\", arg.node)),\n\n ))\n\n }\n\n _ => {}\n\n },\n\n _ => {}\n\n }\n\n}\n\n\n", "file_path": "starlark/src/analysis/performance.rs", "rank": 4, "score": 337592.0638743926 }, { "content": "/// See [`Assert::all_true`].\n\npub fn all_true(expressions: &str) {\n\n Assert::new().all_true(expressions)\n\n}\n\n\n", "file_path": "starlark/src/assert/assert.rs", "rank": 5, "score": 312949.37297248776 }, { "content": "fn fmt_string_literal(f: &mut Formatter<'_>, s: &str) -> fmt::Result {\n\n f.write_str(\"\\\"\")?;\n\n for c in s.chars() {\n\n match c {\n\n '\\n' => f.write_str(\"\\\\n\")?,\n\n '\\t' => f.write_str(\"\\\\t\")?,\n\n '\\r' => f.write_str(\"\\\\r\")?,\n\n '\\0' => f.write_str(\"\\\\0\")?,\n\n '\"' => f.write_str(\"\\\\\\\"\")?,\n\n '\\\\' => f.write_str(\"\\\\\\\\\")?,\n\n x => f.write_str(&x.to_string())?,\n\n }\n\n }\n\n f.write_str(\"\\\"\")\n\n}\n\n\n\nimpl Display for AstLiteral {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n match self {\n\n AstLiteral::Int(i) => write!(f, \"{}\", &i.node),\n", "file_path": "starlark/src/syntax/ast.rs", "rank": 6, "score": 311078.6589932129 }, { "content": "fn stmt(codemap: &CodeMap, x: &AstStmt, res: &mut Vec<LintT<FlowIssue>>) {\n\n check_stmt(codemap, x, res);\n\n x.visit_stmt(|x| stmt(codemap, x, res));\n\n}\n\n\n", "file_path": "starlark/src/analysis/flow.rs", "rank": 7, "score": 307769.7332927596 }, { "content": "/// See [`Assert::parse_ast`].\n\npub fn parse_ast(program: &str) -> AstModule {\n\n Assert::new().parse_ast(program)\n\n}\n\n\n\n/// Lex some text and return the tokens. Fails if the program does not parse.\n\n/// Only available inside the crate because the Token type is not exported.\n\n#[cfg(test)]\n\npub(crate) fn lex_tokens(program: &str) -> Vec<(usize, Token, usize)> {\n\n Assert::new().lex_tokens(program)\n\n}\n\n\n", "file_path": "starlark/src/assert/assert.rs", "rank": 8, "score": 302395.2888504132 }, { "content": "fn check_stmt(codemap: &CodeMap, x: &AstStmt, res: &mut Vec<LintT<FlowIssue>>) {\n\n match &**x {\n\n Stmt::Def(name, _params, ret_type, body, _payload) => {\n\n let rets = returns(body);\n\n\n\n // Do I require my return statements to have an expression\n\n let require_expression = require_return_expression(ret_type)\n\n .or_else(|| rets.iter().find(|x| x.1.is_some()).map(|x| x.0));\n\n if let Some(reason) = require_expression {\n\n if !final_return(body) {\n\n res.push(LintT::new(\n\n codemap,\n\n x.span,\n\n FlowIssue::MissingReturn(\n\n // Statements often end with \\n, so remove that to fit nicely\n\n name.node.0.trim_end().to_owned(),\n\n codemap.file_span(reason).resolve(),\n\n ),\n\n ));\n\n }\n", "file_path": "starlark/src/analysis/flow.rs", "rank": 9, "score": 282309.61588707037 }, { "content": "#[starlark_module]\n\npub fn debug(builder: &mut GlobalsBuilder) {\n\n /// Print the value with full debug formatting. The result may not be stable over time,\n\n /// mostly intended for debugging purposes.\n\n fn debug(ref val: Value) -> String {\n\n Ok(format!(\"{:?}\", val))\n\n }\n\n}\n\n\n", "file_path": "starlark/src/stdlib/extra.rs", "rank": 10, "score": 281882.0495604363 }, { "content": "/// See [`Assert::fail`].\n\npub fn fail(program: &str, msg: &str) -> anyhow::Error {\n\n Assert::new().fail(program, msg)\n\n}\n\n\n", "file_path": "starlark/src/assert/assert.rs", "rank": 11, "score": 278542.3639913678 }, { "content": "fn expr(x: &AstExpr, res: &mut Vec<Bind>) {\n\n match &**x {\n\n Expr::Identifier(x, _) => res.push(Bind::Get(x.clone())),\n\n Expr::Lambda(args, body, _) => {\n\n let mut inner = Vec::new();\n\n parameters(args, res, &mut inner);\n\n expr(body, &mut inner);\n\n res.push(Bind::Scope(Scope::new(inner)));\n\n }\n\n\n\n Expr::ListComprehension(x, for_, clauses) => {\n\n comprehension(for_, clauses, res, |res| expr(x, res))\n\n }\n\n Expr::DictComprehension(x, for_, clauses) => comprehension(for_, clauses, res, |res| {\n\n expr(&x.0, res);\n\n expr(&x.1, res)\n\n }),\n\n\n\n // Uninteresting - just recurse\n\n _ => x.visit_expr(|x| expr(x, res)),\n\n }\n\n}\n\n\n", "file_path": "starlark/src/analysis/bind.rs", "rank": 12, "score": 277315.7843519312 }, { "content": "fn stmt(x: &AstStmt, res: &mut Vec<Bind>) {\n\n match &**x {\n\n Stmt::Statements(xs) => {\n\n for x in xs {\n\n stmt(x, res)\n\n }\n\n }\n\n Stmt::Break | Stmt::Continue | Stmt::Return(None) => flow(res),\n\n Stmt::Pass => {}\n\n Stmt::Return(Some(x)) => {\n\n expr(x, res);\n\n flow(res)\n\n }\n\n Stmt::Expression(x) => expr(x, res),\n\n Stmt::If(a, box b) => {\n\n expr(a, res);\n\n flow(res);\n\n stmt(b, res);\n\n flow(res);\n\n }\n", "file_path": "starlark/src/analysis/bind.rs", "rank": 13, "score": 277314.85543336836 }, { "content": "fn expr_lvalue(x: &AstAssign, res: &mut Vec<Bind>) {\n\n x.visit_expr(|x| expr(x, res));\n\n x.visit_lvalue(|x| res.push(Bind::Set(Assigner::Assign, x.clone())))\n\n}\n\n\n", "file_path": "starlark/src/analysis/bind.rs", "rank": 14, "score": 274151.8319665175 }, { "content": "// If you have a definition which ends with return, or a loop which ends with continue\n\n// that is a useless statement that just\n\nfn redundant(codemap: &CodeMap, x: &AstStmt, res: &mut Vec<LintT<FlowIssue>>) {\n\n fn check(is_loop: bool, codemap: &CodeMap, x: &AstStmt, res: &mut Vec<LintT<FlowIssue>>) {\n\n match &**x {\n\n Stmt::Continue if is_loop => {\n\n res.push(LintT::new(codemap, x.span, FlowIssue::RedundantContinue))\n\n }\n\n Stmt::Return(None) if !is_loop => {\n\n res.push(LintT::new(codemap, x.span, FlowIssue::RedundantReturn))\n\n }\n\n Stmt::Statements(xs) if !xs.is_empty() => {\n\n check(is_loop, codemap, xs.last().unwrap(), res)\n\n }\n\n Stmt::If(_, box x) => check(is_loop, codemap, x, res),\n\n Stmt::IfElse(_, box (x, y)) => {\n\n check(is_loop, codemap, x, res);\n\n check(is_loop, codemap, y, res);\n\n }\n\n _ => {}\n\n }\n\n }\n", "file_path": "starlark/src/analysis/flow.rs", "rank": 15, "score": 272619.15812232473 }, { "content": "// Go implementation of Starlark disallows duplicate top-level assignments,\n\n// it's likely that will become Starlark standard sooner or later, so check now.\n\n// The one place we allow it is to export something you grabbed with load.\n\nfn duplicate_top_level_assignment(module: &AstModule, res: &mut Vec<LintT<Incompatibility>>) {\n\n let mut defined = HashMap::new(); //(name, (location, is_load))\n\n let mut exported = HashSet::new(); // name's already exported by is_load\n\n\n\n fn ident<'a>(\n\n x: &'a AstAssignIdent,\n\n is_load: bool,\n\n codemap: &CodeMap,\n\n defined: &mut HashMap<&'a str, (Span, bool)>,\n\n res: &mut Vec<LintT<Incompatibility>>,\n\n ) {\n\n if let Some((old, _)) = defined.get(x.0.as_str()) {\n\n res.push(LintT::new(\n\n codemap,\n\n x.span,\n\n Incompatibility::DuplicateTopLevelAssign(x.0.clone(), codemap.file_span(*old)),\n\n ));\n\n } else {\n\n defined.insert(&x.0, (x.span, is_load));\n\n }\n", "file_path": "starlark/src/analysis/incompatible.rs", "rank": 16, "score": 272284.4366999708 }, { "content": "/// See [`Assert::is_true`].\n\npub fn is_true(program: &str) {\n\n Assert::new().is_true(program)\n\n}\n\n\n", "file_path": "starlark/src/assert/assert.rs", "rank": 18, "score": 268852.92121897324 }, { "content": "fn misplaced_load(codemap: &CodeMap, x: &AstStmt, res: &mut Vec<LintT<FlowIssue>>) {\n\n // accumulate all statements at the top-level\n\n fn top_statements<'a>(x: &'a AstStmt, stmts: &mut Vec<&'a AstStmt>) {\n\n match &**x {\n\n Stmt::Statements(xs) => {\n\n for x in xs {\n\n top_statements(x, stmts);\n\n }\n\n }\n\n _ => stmts.push(x),\n\n }\n\n }\n\n\n\n let mut stmts = Vec::new();\n\n top_statements(x, &mut stmts);\n\n\n\n // We allow loads or documentation strings, but after that, no loads\n\n let mut allow_loads = true;\n\n for x in stmts {\n\n match &**x {\n", "file_path": "starlark/src/analysis/flow.rs", "rank": 19, "score": 268706.8031185503 }, { "content": "fn unused_variable(codemap: &CodeMap, scope: &Scope, top: bool, res: &mut Vec<LintT<NameWarning>>) {\n\n let mut warnings = HashMap::new();\n\n for (x, (typ, span)) in &scope.bound {\n\n let exported = top && *typ == Assigner::Assign && !x.starts_with('_');\n\n let ignored = !top && x.starts_with('_');\n\n\n\n // We don't want to warn about exported things or ignored things\n\n if !exported && !ignored {\n\n warnings.insert(x, (*typ, *span));\n\n }\n\n }\n\n\n\n for x in &scope.inner {\n\n match x {\n\n Bind::Set(..) => {}\n\n Bind::Get(x) => {\n\n warnings.remove(&x.node);\n\n }\n\n Bind::Scope(scope) => {\n\n unused_variable(codemap, scope, false, res);\n", "file_path": "starlark/src/analysis/names.rs", "rank": 20, "score": 267066.7491895868 }, { "content": "// Returns true if the code aborts this sequence early, due to return, fail, break or continue\n\nfn reachable(codemap: &CodeMap, x: &AstStmt, res: &mut Vec<LintT<FlowIssue>>) -> bool {\n\n match &**x {\n\n Stmt::Break | Stmt::Continue | Stmt::Return(_) => true,\n\n Stmt::Expression(x) => is_fail(x),\n\n Stmt::Statements(xs) => {\n\n let mut i = xs.iter();\n\n while let Some(x) = i.next() {\n\n let aborts = reachable(codemap, x, res);\n\n if aborts {\n\n if let Some(nxt) = i.next() {\n\n res.push(LintT::new(\n\n codemap,\n\n nxt.span,\n\n FlowIssue::Unreachable(nxt.node.to_string().trim().to_owned()),\n\n ))\n\n }\n\n // All the remaining statements are totally unreachable, but we declared that once\n\n // so don't even bother looking at them\n\n return aborts;\n\n }\n", "file_path": "starlark/src/analysis/flow.rs", "rank": 21, "score": 264485.7362840314 }, { "content": "/// See [`Assert::pass_module`].\n\npub fn pass_module(program: &str) -> FrozenModule {\n\n Assert::new().pass_module(program)\n\n}\n\n\n", "file_path": "starlark/src/assert/assert.rs", "rank": 22, "score": 261814.6933655443 }, { "content": "/// Payload types attached to AST nodes.\n\npub trait AstPayload: Debug {\n\n type IdentPayload: Debug;\n\n type IdentAssignPayload: Debug;\n\n type DefPayload: Debug;\n\n}\n\n\n\n/// Default implementation of payload, which attaches `()` to nodes.\n\n/// This payload is returned with AST by parser.\n\n#[derive(Debug, Copy, Clone, Dupe)]\n\npub struct AstNoPayload;\n\nimpl AstPayload for AstNoPayload {\n\n type IdentPayload = ();\n\n type IdentAssignPayload = ();\n\n type DefPayload = ();\n\n}\n\n\n\npub type Expr = ExprP<AstNoPayload>;\n\npub type Assign = AssignP<AstNoPayload>;\n\npub type AssignIdent = AssignIdentP<AstNoPayload>;\n\npub type Clause = ClauseP<AstNoPayload>;\n", "file_path": "starlark/src/syntax/ast.rs", "rank": 24, "score": 259055.20829132968 }, { "content": "fn parameters(args: &[AstParameter], res: &mut Vec<Bind>, inner: &mut Vec<Bind>) {\n\n for a in args {\n\n let (name, typ, default) = a.split();\n\n opt_expr(typ, res);\n\n opt_expr(default, res);\n\n if let Some(name) = name {\n\n inner.push(Bind::Set(Assigner::Argument, name.clone()))\n\n }\n\n }\n\n}\n\n\n", "file_path": "starlark/src/analysis/bind.rs", "rank": 25, "score": 258584.09736312265 }, { "content": "#[starlark_module]\n\npub fn abs(builder: &mut GlobalsBuilder) {\n\n fn abs(ref x: i32) -> i32 {\n\n Ok(x.abs())\n\n }\n\n}\n\n\n", "file_path": "starlark/src/stdlib/extra.rs", "rank": 26, "score": 252907.87504326372 }, { "content": "pub fn write_decimal<W: fmt::Write>(output: &mut W, f: f64) -> fmt::Result {\n\n if !f.is_finite() {\n\n write_non_finite(output, f)\n\n } else {\n\n write!(output, \"{:.prec$}\", f, prec = WRITE_PRECISION)\n\n }\n\n}\n\n\n", "file_path": "starlark/src/values/types/float.rs", "rank": 27, "score": 251744.62899437448 }, { "content": "/// Find the number of times a `needle` occurs within a string, non-overlapping.\n\npub fn count_matches(x: &str, needle: &str) -> usize {\n\n if needle.len() == 1 {\n\n // If we are searching for a 1-byte string, we can provide a much faster path.\n\n // Since it is one byte, given how UTF8 works, all the resultant slices must be UTF8 too.\n\n count_matches_byte(x, needle.as_bytes()[0])\n\n } else {\n\n x.matches(needle).count()\n\n }\n\n}\n\n\n\n/// Result of applying `start` and `end` to a string.\n\n#[derive(PartialEq, Debug)]\n\npub(crate) struct StrIndices<'a> {\n\n /// Computed start char index.\n\n pub(crate) start: CharIndex,\n\n /// Substring after applying the `start` and `end` arguments.\n\n pub(crate) haystack: &'a str,\n\n}\n\n\n", "file_path": "starlark/src/values/types/string/fast_string.rs", "rank": 28, "score": 251171.4107282001 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let g = Globals::extended();\n\n criterion_general_benchmark(c, &g);\n\n criterion_parsing_benchmark(c);\n\n criterion_eval_benchmark(c, &g);\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "starlark/benches/main.rs", "rank": 29, "score": 247329.81961942467 }, { "content": "fn dict_copy(module: &AstModule, res: &mut Vec<LintT<Performance>>) {\n\n fn check(codemap: &CodeMap, x: &AstExpr, res: &mut Vec<LintT<Performance>>) {\n\n match_dict_copy(codemap, x, res);\n\n x.visit_expr(|x| check(codemap, x, res));\n\n }\n\n module\n\n .statement\n\n .visit_expr(|x| check(&module.codemap, x, res));\n\n}\n\n\n\npub(crate) fn performance(module: &AstModule) -> Vec<LintT<Performance>> {\n\n let mut res = Vec::new();\n\n dict_copy(module, &mut res);\n\n res\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use gazebo::prelude::*;\n\n\n", "file_path": "starlark/src/analysis/performance.rs", "rank": 30, "score": 245360.3052288455 }, { "content": "pub fn criterion_parsing_benchmark(c: &mut Criterion) {\n\n c.bench_function(\"parse_long_buble_sort\", |b| {\n\n let long_code = &BUBBLE_SORT.repeat(100)[..];\n\n b.iter(|| benchmark_pure_parsing(long_code))\n\n });\n\n}\n\n\n", "file_path": "starlark/benches/main.rs", "rank": 31, "score": 243644.69662731967 }, { "content": "fn comma_separated_fmt<I, F>(\n\n f: &mut Formatter<'_>,\n\n v: &[I],\n\n converter: F,\n\n for_tuple: bool,\n\n) -> fmt::Result\n\nwhere\n\n F: Fn(&I, &mut Formatter<'_>) -> fmt::Result,\n\n{\n\n for (i, e) in v.iter().enumerate() {\n\n f.write_str(if i == 0 { \"\" } else { \", \" })?;\n\n converter(e, f)?;\n\n }\n\n if v.len() == 1 && for_tuple {\n\n f.write_str(\",\")?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "starlark/src/syntax/ast.rs", "rank": 32, "score": 242377.27458500597 }, { "content": "// Go implementation of Starlark disallows duplicate top-level assignments,\n\n// it's likely that will become Starlark standard sooner or later, so check now.\n\n// The one place we allow it is to export something you grabbed with load.\n\nfn duplicate_dictionary_key(module: &AstModule, res: &mut Vec<LintT<Dubious>>) {\n\n #[derive(PartialEq, Eq, Hash)]\n\n enum Key<'a> {\n\n Int(i32),\n\n Float(u64),\n\n String(&'a str),\n\n Identifier(&'a str),\n\n }\n\n\n\n fn to_key<'a>(x: &'a AstExpr) -> Option<(Key<'a>, Span)> {\n\n match &**x {\n\n Expr::Literal(x) => match &*x {\n\n AstLiteral::Int(x) => Some((Key::Int(x.node), x.span)),\n\n AstLiteral::Float(x) => {\n\n let n = Num::from(x.node);\n\n if let Some(i) = n.as_int() {\n\n // make an integer float always collide with other ints\n\n Some((Key::Int(i), x.span))\n\n } else {\n\n // use bits representation of float to be able to always compare them for equality\n", "file_path": "starlark/src/analysis/dubious.rs", "rank": 33, "score": 241152.70291663246 }, { "content": "fn opt_expr(x: Option<&AstExpr>, res: &mut Vec<Bind>) {\n\n if let Some(x) = x {\n\n expr(x, res)\n\n }\n\n}\n\n\n", "file_path": "starlark/src/analysis/bind.rs", "rank": 34, "score": 239670.25842380518 }, { "content": "pub fn scope(module: &AstModule) -> Scope {\n\n let mut res = Vec::new();\n\n stmt(&module.statement, &mut res);\n\n Scope::new(res)\n\n}\n", "file_path": "starlark/src/analysis/bind.rs", "rank": 35, "score": 237120.62694356486 }, { "content": "fn write_non_finite<W: fmt::Write>(output: &mut W, f: f64) -> fmt::Result {\n\n debug_assert!(f.is_nan() || f.is_infinite());\n\n if f.is_nan() {\n\n write!(output, \"nan\")\n\n } else {\n\n write!(\n\n output,\n\n \"{}inf\",\n\n if f.is_sign_positive() { \"+\" } else { \"-\" }\n\n )\n\n }\n\n}\n\n\n", "file_path": "starlark/src/values/types/float.rs", "rank": 36, "score": 236284.51740168966 }, { "content": "pub fn write_compact<W: fmt::Write>(output: &mut W, f: f64, exponent_char: char) -> fmt::Result {\n\n if !f.is_finite() {\n\n write_non_finite(output, f)\n\n } else {\n\n let abs = f.abs();\n\n let exponent = if f == 0.0 {\n\n 0\n\n } else {\n\n abs.log10().floor() as i32\n\n };\n\n\n\n if exponent.abs() >= WRITE_PRECISION as i32 {\n\n // use scientific notation if exponent is outside of our precision (but strip 0s)\n\n write_scientific(output, f, exponent_char, true)\n\n } else if f.fract() == 0.0 {\n\n // make sure there's a fractional part even if the number doesn't have it\n\n output.write_fmt(format_args!(\"{:.1}\", f))\n\n } else {\n\n // rely on the built-in formatting otherwise\n\n output.write_fmt(format_args!(\"{}\", f))\n", "file_path": "starlark/src/values/types/float.rs", "rank": 37, "score": 234683.48159301342 }, { "content": "fn final_return(x: &AstStmt) -> bool {\n\n match &**x {\n\n Stmt::Return(_) => true,\n\n Stmt::Expression(x) if is_fail(x) => true,\n\n Stmt::Statements(xs) => match xs.last() {\n\n None => false,\n\n Some(x) => final_return(x),\n\n },\n\n Stmt::IfElse(_, box (x, y)) => final_return(x) && final_return(y),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "starlark/src/analysis/flow.rs", "rank": 38, "score": 233292.77446067758 }, { "content": "fn debug_value(typ: &str, v: Value, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.debug_tuple(typ).field(v.get_ref().as_debug()).finish()\n\n}\n\n\n\nimpl Debug for Value<'_> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n debug_value(\"Value\", *self, f)\n\n }\n\n}\n\n\n\nimpl Debug for FrozenValue {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n debug_value(\"FrozenValue\", Value::new_frozen(*self), f)\n\n }\n\n}\n\n\n\nimpl<'v> PartialEq for Value<'v> {\n\n fn eq(&self, other: &Value<'v>) -> bool {\n\n self.equals(*other).ok() == Some(true)\n\n }\n", "file_path": "starlark/src/values/mod.rs", "rank": 39, "score": 232761.47723261188 }, { "content": "/// See [`Assert::eq`].\n\npub fn eq(lhs: &str, rhs: &str) {\n\n Assert::new().eq(lhs, rhs)\n\n}\n\n\n", "file_path": "starlark/src/assert/assert.rs", "rank": 41, "score": 230019.19613737008 }, { "content": "fn unassigned_variable(codemap: &CodeMap, scope: &Scope, res: &mut Vec<LintT<NameWarning>>) {\n\n // We only look for variables that are assigned in this scope, but haven't yet been assigned\n\n let mut assigned: HashSet<&str> = HashSet::new();\n\n for x in &scope.inner {\n\n match x {\n\n Bind::Get(x)\n\n if scope.bound.get(&x.node).is_some() && !assigned.contains(x.as_str()) =>\n\n {\n\n res.push(LintT::new(\n\n codemap,\n\n x.span,\n\n NameWarning::UsingUnassigned(x.node.clone()),\n\n ))\n\n }\n\n Bind::Set(_, x) => {\n\n assigned.insert(x.0.as_str());\n\n }\n\n Bind::Scope(scope) => unassigned_variable(codemap, scope, res),\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "starlark/src/analysis/names.rs", "rank": 42, "score": 226436.5378636525 }, { "content": "pub fn criterion_eval_benchmark(c: &mut Criterion, globals: &Globals) {\n\n c.bench_function(\"run_tight_loop\", |b| {\n\n let env = Module::new();\n\n let mut eval = Evaluator::new(&env);\n\n let ast =\n\n AstModule::parse(\"benchmark.sky\", TIGHT_LOOP.to_owned(), &Dialect::Standard).unwrap();\n\n let bench_function = eval.eval_module(ast, globals).unwrap();\n\n b.iter(move || eval.eval_function(bench_function, &[], &[]).unwrap())\n\n });\n\n}\n\n\n", "file_path": "starlark/benches/main.rs", "rank": 43, "score": 223212.7272579397 }, { "content": "pub fn criterion_general_benchmark(c: &mut Criterion, globals: &Globals) {\n\n c.bench_function(\"empty\", |b| b.iter(|| benchmark_run(globals, EMPTY)));\n\n c.bench_function(\"bubble_sort\", |b| {\n\n b.iter(|| benchmark_run(globals, BUBBLE_SORT))\n\n });\n\n}\n\n\n", "file_path": "starlark/benches/main.rs", "rank": 44, "score": 223212.7272579397 }, { "content": "/// Find the number of times a `needle` byte occurs within a string.\n\n/// If the needle represents a complete character, this will be equivalent to doing\n\n/// search for that character in the string.\n\npub fn count_matches_byte(x: &str, needle: u8) -> usize {\n\n x.as_bytes().iter().filter(|x| **x == needle).count()\n\n}\n\n\n", "file_path": "starlark/src/values/types/string/fast_string.rs", "rank": 45, "score": 223113.6595645527 }, { "content": "pub fn write_scientific<W: fmt::Write>(\n\n output: &mut W,\n\n f: f64,\n\n exponent_char: char,\n\n strip_trailing_zeros: bool,\n\n) -> fmt::Result {\n\n if !f.is_finite() {\n\n write_non_finite(output, f)\n\n } else {\n\n let abs = f.abs();\n\n let exponent = if f == 0.0 {\n\n 0\n\n } else {\n\n abs.log10().floor() as i32\n\n };\n\n let normal = if f == 0.0 {\n\n 0.0\n\n } else {\n\n abs / 10f64.powf(exponent as f64)\n\n };\n", "file_path": "starlark/src/values/types/float.rs", "rank": 46, "score": 221442.83234946095 }, { "content": "/// See [`Assert::parse`].\n\npub fn parse(program: &str) -> String {\n\n Assert::new().parse(program)\n\n}\n\n\n", "file_path": "starlark/src/assert/assert.rs", "rank": 48, "score": 214870.93066170398 }, { "content": "/// See [`Assert::lex`].\n\npub fn lex(program: &str) -> String {\n\n Assert::new().lex(program)\n\n}\n\n\n", "file_path": "starlark/src/assert/assert.rs", "rank": 49, "score": 214870.93066170398 }, { "content": "/// See [`Assert::fails`].\n\npub fn fails(program: &str, msgs: &[&str]) -> anyhow::Error {\n\n Assert::new().fails(program, msgs)\n\n}\n\n\n", "file_path": "starlark/src/assert/assert.rs", "rank": 50, "score": 214817.3419080282 }, { "content": "#[derive(Debug, Hash, PartialEq, Eq, Clone, Copy, Dupe)]\n\nstruct FileId(*const crate::codemap::CodeMapData);\n\n\n\nimpl FileId {\n\n const EMPTY: FileId = FileId(ptr::null());\n\n\n\n fn new(codemap: &CodeMap) -> Self {\n\n Self(Arc::as_ptr(codemap.get_ptr()))\n\n }\n\n}\n\n\n\n// So we don't need a special case for the first time around,\n\n// we have a special FileId of empty that we ignore when printing\n", "file_path": "starlark/src/eval/runtime/stmt_profile.rs", "rank": 51, "score": 212180.04159093805 }, { "content": "#[starlark_module]\n\npub fn partial(builder: &mut GlobalsBuilder) {\n\n fn partial(ref func: Value, args: Value<'v>, kwargs: ARef<Dict>) -> Partial<'v> {\n\n debug_assert!(Tuple::from_value(args).is_some());\n\n let names = kwargs\n\n .keys()\n\n .map(|x| {\n\n let x = StringValue::new(x).unwrap();\n\n (\n\n // We duplicate string here.\n\n // If this becomes hot, we should do better.\n\n Symbol::new_hashed(x.unpack_starlark_str().as_str_hashed()),\n\n x,\n\n )\n\n })\n\n .collect();\n\n Ok(Partial {\n\n func,\n\n pos: args,\n\n named: kwargs.values().collect(),\n\n names,\n\n })\n\n }\n\n}\n\n\n", "file_path": "starlark/src/stdlib/extra.rs", "rank": 52, "score": 211148.99862631524 }, { "content": "#[starlark_module]\n\npub fn map(builder: &mut GlobalsBuilder) {\n\n fn map(ref func: Value, ref seq: Value) -> Value<'v> {\n\n let it = seq.iterate(heap)?;\n\n let mut res = Vec::with_capacity(it.size_hint().0);\n\n for v in it {\n\n res.push(func.invoke_pos(None, &[v], eval)?);\n\n }\n\n Ok(heap.alloc_list(&res))\n\n }\n\n}\n\n\n", "file_path": "starlark/src/stdlib/extra.rs", "rank": 53, "score": 211148.99862631524 }, { "content": "#[starlark_module]\n\npub fn pprint(builder: &mut GlobalsBuilder) {\n\n fn pprint(args: Vec<Value>) -> NoneType {\n\n // In practice most users may want to put the print somewhere else, but this does for now\n\n eval.print_handler\n\n .println(&format!(\"{:#}\", PrintWrapper(&args)))?;\n\n Ok(NoneType)\n\n }\n\n}\n\n\n", "file_path": "starlark/src/stdlib/extra.rs", "rank": 54, "score": 211148.99862631524 }, { "content": "#[starlark_module]\n\npub fn print(builder: &mut GlobalsBuilder) {\n\n fn print(args: Vec<Value>) -> NoneType {\n\n // In practice most users should want to put the print somewhere else, but this does for now\n\n // Unfortunately, we can't use PrintWrapper because strings to_str() and Display are different.\n\n eval.print_handler\n\n .println(&args.iter().map(|x| x.to_str()).join(\" \"))?;\n\n Ok(NoneType)\n\n }\n\n}\n\n\n", "file_path": "starlark/src/stdlib/extra.rs", "rank": 55, "score": 211148.99862631524 }, { "content": "#[starlark_module]\n\npub fn global(builder: &mut GlobalsBuilder) {\n\n fn record(kwargs: SmallMap<String, Value>) -> RecordType<'v> {\n\n // Every Value must either be a field or a value (the type)\n\n let mut mp = SmallMap::with_capacity(kwargs.len());\n\n for (k, v) in kwargs.into_iter_hashed() {\n\n let field = match Field::from_value(v) {\n\n None => Field::new(v, None),\n\n Some(v) => v.dupe(),\n\n };\n\n let compiled = TypeCompiled::new(field.typ, heap)?;\n\n mp.insert_hashed(k, (field, compiled));\n\n }\n\n Ok(RecordType::new(mp))\n\n }\n\n\n\n /// Creates a field record.\n\n ///\n\n /// Examples:\n\n ///\n\n /// ```\n", "file_path": "starlark/src/stdlib/record.rs", "rank": 56, "score": 211148.99862631524 }, { "content": "#[starlark_module]\n\npub fn global(builder: &mut GlobalsBuilder) {\n\n fn r#enum(args: Vec<Value>) -> Value<'v> {\n\n // Every Value must either be a field or a value (the type)\n\n EnumType::new(args, heap)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::assert::{self, Assert};\n\n\n\n #[test]\n\n fn test_enum() {\n\n assert::pass(\n\n r#\"\n\nenum_type = enum(\"option1\", \"option2\", True)\n\nx = enum_type(\"option1\")\n\nassert_eq(x.value, \"option1\")\n\nassert_eq(enum_type(True).value, True)\n\nassert_eq(enum_type.type, \"enum_type\")\n", "file_path": "starlark/src/stdlib/enumeration.rs", "rank": 57, "score": 211148.99862631524 }, { "content": "#[starlark_module]\n\npub fn filter(builder: &mut GlobalsBuilder) {\n\n fn filter(ref func: Value, ref seq: Value) -> Value<'v> {\n\n let mut res = Vec::new();\n\n\n\n for v in seq.iterate(heap)? {\n\n if func.is_none() {\n\n if !v.is_none() {\n\n res.push(v);\n\n }\n\n } else if func.invoke_pos(None, &[v], eval)?.to_bool() {\n\n res.push(v);\n\n }\n\n }\n\n Ok(heap.alloc_list(&res))\n\n }\n\n}\n\n\n", "file_path": "starlark/src/stdlib/extra.rs", "rank": 58, "score": 211148.99862631524 }, { "content": "#[starlark_module]\n\npub fn json(builder: &mut GlobalsBuilder) {\n\n fn json(ref x: Value) -> String {\n\n x.to_json()\n\n }\n\n}\n\n\n", "file_path": "starlark/src/stdlib/extra.rs", "rank": 59, "score": 211148.99862631524 }, { "content": "#[starlark_module]\n\npub fn dedupe(builder: &mut GlobalsBuilder) {\n\n /// Remove duplicates in a list. Uses identity of value (pointer),\n\n /// rather than by equality.\n\n fn dedupe(ref val: Value) -> Value<'v> {\n\n let mut seen = HashSet::new();\n\n let mut res = Vec::new();\n\n for v in val.iterate(heap)? {\n\n let p = v.ptr_value();\n\n if !seen.contains(&p) {\n\n seen.insert(p);\n\n res.push(v);\n\n }\n\n }\n\n Ok(heap.alloc_list(&res))\n\n }\n\n}\n\n\n", "file_path": "starlark/src/stdlib/extra.rs", "rank": 60, "score": 211148.99862631524 }, { "content": "#[starlark_module]\n\npub fn global(builder: &mut GlobalsBuilder) {\n\n #[starlark(type(Struct::TYPE))]\n\n fn r#struct(args: Arguments<'v, '_>) -> Struct<'v> {\n\n args.no_positional_args(heap)?;\n\n Ok(Struct::new(args.names_map()?))\n\n }\n\n}\n\n\n\n#[starlark_module]\n\npub(crate) fn struct_methods(builder: &mut MethodsBuilder) {\n\n #[starlark(speculative_exec_safe)]\n\n fn to_json(this: Value) -> String {\n\n this.to_json()\n\n }\n\n}\n", "file_path": "starlark/src/stdlib/structs.rs", "rank": 61, "score": 211148.99862631524 }, { "content": "#[starlark_module]\n\npub fn global(builder: &mut GlobalsBuilder) {\n\n fn breakpoint() -> NoneType {\n\n {\n\n let mut guard = BREAKPOINT_MUTEX.lock().unwrap();\n\n if *guard == State::Allow {\n\n let mut rl = match &mut eval.breakpoint_handler {\n\n Some(rl) => rl(),\n\n None => return Err(BreakpointError::NoHandler.into()),\n\n };\n\n rl.println(BREAKPOINT_HIT_MESSAGE);\n\n *guard = breakpoint_loop(eval, rl)?;\n\n }\n\n }\n\n Ok(NoneType)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::{cell::RefCell, env, rc::Rc};\n", "file_path": "starlark/src/stdlib/breakpoint.rs", "rank": 62, "score": 211148.99862631524 }, { "content": "#[test]\n\nfn test_top_level_assignation() {\n\n assert_eq!(\n\n assert::parse(\"\\n_ASSIGNATION = 'top-level'\\n\"),\n\n \"_ASSIGNATION = \\\"top-level\\\"\\n\"\n\n );\n\n}\n\n\n", "file_path": "starlark/src/syntax/grammar_tests.rs", "rank": 63, "score": 209885.6921411213 }, { "content": "#[test]\n\nfn test_top_level_def() {\n\n assert_eq!(\n\n assert::parse(\"def toto():\\n pass\\n\"),\n\n \"def toto():\\n pass\\n\"\n\n );\n\n let mut a = Assert::new();\n\n a.dialect_set(|x| x.enable_def = false);\n\n a.parse_fail(\"!def toto():\\n pass\\n!\");\n\n // no new line at end of file\n\n assert_eq!(\n\n assert::parse(\"def toto():\\n pass\"),\n\n \"def toto():\\n pass\\n\"\n\n );\n\n assert_eq!(\n\n assert::parse(\"def toto():\\n pass\\ndef titi(): return 1\"),\n\n \"def toto():\\n pass\\ndef titi():\\n return 1\\n\"\n\n );\n\n assert_eq!(\n\n assert::parse(\"def toto():\\n pass\\n\\ndef titi(): return 1\"),\n\n \"def toto():\\n pass\\ndef titi():\\n return 1\\n\"\n\n );\n\n assert_eq!(assert::parse(\"def t():\\n\\n pass\"), \"def t():\\n pass\\n\");\n\n}\n\n\n", "file_path": "starlark/src/syntax/grammar_tests.rs", "rank": 64, "score": 209429.63109399876 }, { "content": "/// See [`Assert::pass`].\n\npub fn pass(program: &str) -> OwnedFrozenValue {\n\n Assert::new().pass(program)\n\n}\n\n\n", "file_path": "starlark/src/assert/assert.rs", "rank": 65, "score": 208069.44213465776 }, { "content": "def f6(*args): return args\n\n\n", "file_path": "starlark/src/eval/tests/call.rs", "rank": 66, "score": 205503.25135794372 }, { "content": "fn bad_type_equality(module: &AstModule, res: &mut Vec<LintT<Incompatibility>>) {\n\n let types = Lazy::force(&TYPES);\n\n fn check(\n\n codemap: &CodeMap,\n\n x: &AstExpr,\n\n types: &HashMap<&str, &str>,\n\n res: &mut Vec<LintT<Incompatibility>>,\n\n ) {\n\n match_bad_type_equality(codemap, x, types, res);\n\n x.visit_expr(|x| check(codemap, x, types, res));\n\n }\n\n module\n\n .statement\n\n .visit_expr(|x| check(&module.codemap, x, types, res));\n\n}\n\n\n", "file_path": "starlark/src/analysis/incompatible.rs", "rank": 67, "score": 205113.25193212126 }, { "content": "def f(*args, **kwargs):\n\n return (args, kwargs)\n\n\n\ny = f(id(1), id(2), x=id(3), *[id(4)], **dict(z=id(5)))\n\nassert_eq(y, ((1, 2, 4), dict(x=3, z=5)))\n\nassert_eq(r, [1,2,3,4,5])\n\n\"#,\n\n );\n\n}\n\n\n", "file_path": "starlark/src/eval/tests/call.rs", "rank": 68, "score": 204850.41851148324 }, { "content": "#[test]\n\nfn test_top_level_def_with_docstring() {\n\n assert_eq!(\n\n assert::parse(\n\n \"\\\"\\\"\\\"Top-level docstring\\\"\\\"\\\"\n\n\n", "file_path": "starlark/src/syntax/grammar_tests.rs", "rank": 69, "score": 204820.24558114674 }, { "content": "/// See [`Assert::parse_fail`].\n\npub fn parse_fail(program: &str) -> anyhow::Error {\n\n Assert::new().parse_fail(program)\n\n}\n", "file_path": "starlark/src/assert/assert.rs", "rank": 70, "score": 202678.06048297792 }, { "content": "def f5(a, **kwargs): return kwargs\n", "file_path": "starlark/src/eval/tests/call.rs", "rank": 71, "score": 201389.97522253252 }, { "content": "def f(x, *args, **kwargs):\n\n assert_eq(args, ())\n\n assert_eq(kwargs, {})\n\nf(1)\n\n\"#,\n\n );\n\n assert::fail(\n\n r#\"\n", "file_path": "starlark/src/eval/tests/call.rs", "rank": 72, "score": 199985.13078534574 }, { "content": "// Deliberately qualify the GlobalsBuild type to test that we can\n\nfn assert_star(builder: &mut crate::environment::GlobalsBuilder) {\n\n fn eq(a: Value, b: Value) -> NoneType {\n\n assert_equals(a, b)\n\n }\n\n\n\n fn ne(a: Value, b: Value) -> NoneType {\n\n assert_different(a, b)\n\n }\n\n\n\n fn lt(a: Value, b: Value) -> NoneType {\n\n assert_less_than(a, b)\n\n }\n\n\n\n fn contains(xs: Value, x: Value) -> NoneType {\n\n if !xs.is_in(x)? {\n\n Err(anyhow!(\"assert.contains: expected {} to be in {}\", x, xs))\n\n } else {\n\n Ok(NoneType)\n\n }\n\n }\n", "file_path": "starlark/src/assert/assert.rs", "rank": 73, "score": 199854.43155371933 }, { "content": "#[derive(Error, Debug)]\n\nenum DialectError {\n\n #[error(\"`def` is not allowed in this dialect\")]\n\n Def,\n\n #[error(\"`lambda` is not allowed in this dialect\")]\n\n Lambda,\n\n #[error(\"`load` is not allowed in this dialect\")]\n\n Load,\n\n #[error(\"* keyword-only-arguments is not allowed in this dialect\")]\n\n KeywordOnlyArguments,\n\n #[error(\"type annotations are not allowed in this dialect\")]\n\n Types,\n\n}\n\n\n\n/// Starlark language features to enable, e.g. [`Standard`](Dialect::Standard) to follow the Starlark standard.\n\n#[derive(Debug, Clone, Eq, PartialEq, Hash)]\n\npub struct Dialect {\n\n /// Are `def` statements permitted.\n\n /// Enabled in both [`Standard`](Dialect::Standard) and [`Extended`](Dialect::Extended).\n\n pub enable_def: bool,\n\n /// Are `lambda` expressions permitted.\n", "file_path": "starlark/src/syntax/dialect.rs", "rank": 74, "score": 195926.74974528933 }, { "content": "// fail is kind of like a return with error\n\nfn is_fail(x: &AstExpr) -> bool {\n\n match &**x {\n\n Expr::Call(x, _) => match &***x {\n\n Expr::Identifier(name, _) => name.node == \"fail\",\n\n _ => false,\n\n },\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "starlark/src/analysis/flow.rs", "rank": 75, "score": 194110.47552778508 }, { "content": "def _ok():\n", "file_path": "starlark/src/analysis/names.rs", "rank": 76, "score": 186117.2723837914 }, { "content": "def uses_h():\n\n _h\n\n_h = []\n\n\"#,\n\n );\n\n let mut res = Vec::new();\n\n let scope = bind::scope(&m);\n\n unassigned_variable(&m.codemap, &scope, &mut res);\n\n let mut res = res.map(|x| x.problem.about());\n\n res.sort();\n\n assert_eq!(res, &[\"no1\", \"no2\", \"no3\"]);\n\n }\n\n\n\n #[test]\n\n fn test_lint_undefined() {\n\n let m = module(\n\n r#\"\n\nload(\"test\", imported = \"more\")\n\na = True + imported + no1\n", "file_path": "starlark/src/analysis/names.rs", "rank": 77, "score": 186112.8771673206 }, { "content": "# Also test function is inlined if it is defined after the caller.\n\ndef returns_list():\n\n return [10, True]\n\n\"#,\n\n )\n\n}\n", "file_path": "starlark/src/eval/tests/opt/def_inline.rs", "rank": 78, "score": 182667.1745204687 }, { "content": "def function(x):\n", "file_path": "starlark/src/eval/tests/def.rs", "rank": 79, "score": 180454.4954409633 }, { "content": "#[starlark_module]\n\nfn enum_type_methods(builder: &mut MethodsBuilder) {\n\n #[starlark(attribute)]\n\n fn r#type(this: Value) -> Value<'v> {\n\n let this = EnumType::from_value(this).unwrap();\n\n match this {\n\n Either::Left(x) => Ok(heap.alloc(x.typ.borrow().as_deref().unwrap_or(EnumValue::TYPE))),\n\n Either::Right(x) => Ok(heap.alloc(x.typ.as_deref().unwrap_or(EnumValue::TYPE))),\n\n }\n\n }\n\n\n\n fn values(this: Value) -> Value<'v> {\n\n let this = EnumType::from_value(this).unwrap();\n\n match this {\n\n Either::Left(x) => Ok(heap.alloc_list_iter(x.elements.keys().copied())),\n\n Either::Right(x) => Ok(heap.alloc_list_iter(x.elements.keys().map(|x| x.to_value()))),\n\n }\n\n }\n\n}\n\n\n\nimpl<'v, V: ValueLike<'v>> StarlarkValue<'v> for EnumValueGen<V>\n", "file_path": "starlark/src/values/types/enumeration.rs", "rank": 80, "score": 180232.174790642 }, { "content": "#[inline(always)]\n\nfn load_local<'v, const N: usize>(\n\n eval: &mut Evaluator<'v, '_>,\n\n slots: &[LocalSlotId; N],\n\n spans: FrozenRef<BcInstrSpans>,\n\n) -> Result<[Value<'v>; N], EvalException> {\n\n #[cold]\n\n #[inline(never)]\n\n fn fail<'v>(\n\n eval: &mut Evaluator<'v, '_>,\n\n index: usize,\n\n slot: LocalSlotId,\n\n spans: FrozenRef<BcInstrSpans>,\n\n ) -> EvalException {\n\n let err = eval.local_var_referenced_before_assignment(slot);\n\n let span = spans[index];\n\n add_span_to_expr_error(err, span, eval)\n\n }\n\n\n\n let mut values = MaybeUninit::uninit();\n\n #[allow(clippy::needless_range_loop)]\n", "file_path": "starlark/src/eval/bc/instr_impl.rs", "rank": 81, "score": 179640.8739561207 }, { "content": "fn subwriter<T: Display>(indent: &'static str, f: &mut fmt::Formatter, v: &T) -> fmt::Result {\n\n if f.alternate() {\n\n write!(indenter::indented(f).with_str(indent), \"{:#}\", v)\n\n } else {\n\n Display::fmt(v, f)\n\n }\n\n}\n\n\n", "file_path": "starlark/src/values/display.rs", "rank": 82, "score": 179626.67106941954 }, { "content": "fn is_mut_something(x: &Type, smth: &str) -> bool {\n\n match x {\n\n Type::Reference(TypeReference {\n\n mutability: Some(_),\n\n elem: x,\n\n ..\n\n }) => is_type_name(x, smth),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "starlark_derive/src/parse.rs", "rank": 83, "score": 179259.26412171294 }, { "content": "// Don't want to use a variable that has been defined to be ignored\n\nfn use_ignored(\n\n codemap: &CodeMap,\n\n scope: &Scope,\n\n root: Option<&Scope>,\n\n res: &mut Vec<LintT<NameWarning>>,\n\n) {\n\n match root {\n\n None => {\n\n // Things at the top level can be ignored and used\n\n for x in &scope.inner {\n\n if let Bind::Scope(x) = x {\n\n use_ignored(codemap, x, Some(scope), res)\n\n }\n\n }\n\n }\n\n Some(root) => {\n\n // You can only use _ variables which are defined at the root,\n\n // and thus must be free in this scope.\n\n // If you use _foo, but foo is already in this scope, you may have been avoiding shadowing.\n\n for x in &scope.inner {\n", "file_path": "starlark/src/analysis/names.rs", "rank": 84, "score": 179072.79693222488 }, { "content": "fn duplicate_assign(\n\n codemap: &CodeMap,\n\n scope: &Scope,\n\n top: bool,\n\n res: &mut Vec<LintT<NameWarning>>,\n\n) {\n\n // If I see two set's, without any intervening flow or get, the first one was pointless\n\n let mut warnings: HashMap<&str, _> = HashMap::new();\n\n let mut captured: HashSet<&str> = HashSet::new(); // those captured by child scopes\n\n for x in &scope.inner {\n\n match x {\n\n Bind::Set(reason, x) => {\n\n let ignored = !top && x.0.starts_with('_');\n\n if !ignored && !captured.contains(x.0.as_str()) {\n\n if let Some((span, typ)) = warnings.insert(&x.node.0, (x.span, *reason)) {\n\n res.push(NameWarning::unused(typ, codemap, span, x.0.clone()))\n\n }\n\n }\n\n }\n\n Bind::Get(x) => {\n", "file_path": "starlark/src/analysis/names.rs", "rank": 85, "score": 179038.54621775483 }, { "content": "pub fn compare_small_map<E, K, K2: Ord + Hash, V1, V2>(\n\n x: &SmallMap<K, V1>,\n\n y: &SmallMap<K, V2>,\n\n key: impl Fn(&K) -> K2,\n\n f: impl Fn(&V1, &V2) -> Result<Ordering, E>,\n\n) -> Result<Ordering, E> {\n\n Ok(cmp_chain! {\n\n x.len().cmp(&y.len()),\n\n x.iter()\n\n .sorted_by_key(|(k, _)| key(k))\n\n .try_cmp_by(\n\n y.iter().sorted_by_key(|(k, _)| key(k)),\n\n |(xk, xv), (yk, yv)| Ok(cmp_chain! { key(xk).cmp(&key(yk)), f(xv, yv)? })\n\n )?\n\n })\n\n}\n", "file_path": "starlark/src/values/comparison.rs", "rank": 86, "score": 178506.22415979565 }, { "content": "def ok():\n", "file_path": "starlark/src/analysis/flow.rs", "rank": 87, "score": 178276.97684034402 }, { "content": "def loop():\n\n xs = [1, 2, 3]\n\n for x in xs:\n\n if len(xs) == 3:\n\n xs.append(4)\n\nloop()\"#,\n\n \"mutate an iterable\",\n\n );\n\n}\n\n\n", "file_path": "starlark/src/eval/tests/mod.rs", "rank": 89, "score": 175279.48912782673 }, { "content": "#[test]\n\nfn test_span() {\n\n let expected = vec![\n\n (0, Newline, 1),\n\n (1, Def, 4),\n\n (5, Identifier(\"test\".to_owned()), 9),\n\n (9, OpeningRound, 10),\n\n (10, Identifier(\"a\".to_owned()), 11),\n\n (11, ClosingRound, 12),\n\n (12, Colon, 13),\n\n (13, Newline, 14),\n\n (14, Indent, 16),\n\n (16, Identifier(\"fail\".to_owned()), 20),\n\n (20, OpeningRound, 21),\n\n (21, Identifier(\"a\".to_owned()), 22),\n\n (22, ClosingRound, 23),\n\n (23, Newline, 24),\n\n (24, Newline, 25),\n\n (25, Dedent, 25),\n\n (25, Identifier(\"test\".to_owned()), 29),\n\n (29, OpeningRound, 30),\n\n (30, String(\"abc\".to_owned()), 35),\n\n (35, ClosingRound, 36),\n\n (36, Newline, 37),\n\n (37, Newline, 37),\n\n ];\n\n\n\n let actual = assert::lex_tokens(\n\n r#\"\n", "file_path": "starlark/src/syntax/lexer_tests.rs", "rank": 90, "score": 174590.60446849174 }, { "content": "#[test]\n\nfn test_return() {\n\n assert_eq!(\n\n assert::parse(\"def fn(): return 1\"),\n\n \"def fn():\\n return 1\\n\"\n\n );\n\n assert_eq!(\n\n assert::parse(\"def fn(): return a()\"),\n\n \"def fn():\\n return a()\\n\"\n\n );\n\n assert_eq!(assert::parse(\"def fn(): return\"), \"def fn():\\n return\\n\");\n\n}\n\n\n\n// Regression test for https://github.com/google/starlark-rust/issues/44.\n", "file_path": "starlark/src/syntax/grammar_tests.rs", "rank": 91, "score": 174588.60878647288 }, { "content": "#[test]\n\nfn test_tuples() {\n\n assert_eq!(assert::parse(\"a = (-1)\"), \"a = -1\\n\"); // Not a tuple\n\n assert_eq!(assert::parse(\"a = (+1,)\"), \"a = (+1,)\\n\"); // But this is one\n\n assert_eq!(assert::parse(\"a = ()\"), \"a = ()\\n\");\n\n}\n\n\n", "file_path": "starlark/src/syntax/grammar_tests.rs", "rank": 92, "score": 174576.63573105296 }, { "content": "def g(): f()\n\ng()\n\n\"#,\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_debug_variables() {\n\n let mut a = assert::Assert::new();\n\n a.globals_add(debugger);\n\n a.pass(\n\n r#\"\n\nroot = 12\n\n_ignore = [x for x in [True]]\n", "file_path": "starlark/src/debug/inspect.rs", "rank": 93, "score": 174141.63470929113 }, { "content": "def _impl(ctx):\n\n # Print Hello, World!\n\n print('Hello, World!')\n\n\"\n\n ),\n\n \"\\\"A docstring.\\\" \\n \\n def _impl ( ctx ) : \\n \\t print ( \\\"Hello, World!\\\" ) \\n \\n #dedent\"\n\n );\n\n}\n\n\n", "file_path": "starlark/src/syntax/lexer_tests.rs", "rank": 94, "score": 173741.0876725649 }, { "content": "#[test]\n\nfn test_def_list_inlined() {\n\n test_instrs(\n\n &[BcOpcode::ListOfConsts, BcOpcode::Return],\n\n r#\"\n", "file_path": "starlark/src/eval/tests/opt/def_inline.rs", "rank": 95, "score": 173082.14627258843 }, { "content": "fn freeze_impl(name: &Ident, data: &Data) -> TokenStream {\n\n match data {\n\n Data::Struct(data) => freeze_struct(name, data),\n\n Data::Enum(data) => freeze_enum(name, data),\n\n Data::Union(_) => unimplemented!(\"Can't derive freeze for unions\"),\n\n }\n\n}\n", "file_path": "starlark_derive/src/freeze.rs", "rank": 96, "score": 172859.745632348 }, { "content": "/*\n\n * Copyright 2019 The Starlark in Rust Authors.\n\n * Copyright (c) Facebook, Inc. and its affiliates.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * https://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::{\n\n codemap::Span,\n\n syntax::{\n\n ast::{AstStmt, Stmt},\n\n AstModule,\n\n },\n\n};\n\n\n", "file_path": "starlark/src/debug/breakpoint.rs", "rank": 97, "score": 148.0857182980679 }, { "content": "/*\n\n * Copyright 2019 The Starlark in Rust Authors.\n\n * Copyright (c) Facebook, Inc. and its affiliates.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * https://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\npub use types::Lint;\n\n\n\nuse crate::{analysis::types::LintT, syntax::AstModule};\n", "file_path": "starlark/src/analysis/mod.rs", "rank": 98, "score": 138.8409695427693 }, { "content": "/*\n\n * Copyright 2019 The Starlark in Rust Authors.\n\n * Copyright (c) Facebook, Inc. and its affiliates.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * https://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\n//! Array type used in implementation of `List`.\n\n//!\n\n//! This object is used internally, and not visible outside of `starlark` crate.\n", "file_path": "starlark/src/values/types/array.rs", "rank": 99, "score": 137.96631948928442 } ]
Rust
src/utils.rs
rustbunker/youki
2f3d8062ec006057e04b13c6697ebabf7cd5f9bd
use std::env; use std::ffi::CString; use std::fs; use std::ops::Deref; use std::path::{Path, PathBuf}; use std::time::Duration; use anyhow::Context; use anyhow::{bail, Result}; use nix::unistd; pub trait PathBufExt { fn as_in_container(&self) -> Result<PathBuf>; fn join_absolute_path(&self, p: &Path) -> Result<PathBuf>; } impl PathBufExt for PathBuf { fn as_in_container(&self) -> Result<PathBuf> { if self.is_relative() { bail!("Relative path cannot be converted to the path in the container.") } else { let path_string = self.to_string_lossy().into_owned(); Ok(PathBuf::from(path_string[1..].to_string())) } } fn join_absolute_path(&self, p: &Path) -> Result<PathBuf> { if !p.is_absolute() && !p.as_os_str().is_empty() { bail!( "cannot join {:?} because it is not the absolute path.", p.display() ) } Ok(PathBuf::from(format!("{}{}", self.display(), p.display()))) } } pub fn do_exec(path: impl AsRef<Path>, args: &[String], envs: &[String]) -> Result<()> { let p = CString::new(path.as_ref().to_string_lossy().to_string())?; let a: Vec<CString> = args .iter() .map(|s| CString::new(s.to_string()).unwrap_or_default()) .collect(); env::vars().for_each(|(key, _value)| std::env::remove_var(key)); envs.iter().for_each(|e| { let mut split = e.split('='); if let Some(key) = split.next() { let value: String = split.collect::<Vec<&str>>().join("="); env::set_var(key, value) }; }); unistd::execvp(&p, &a)?; Ok(()) } pub fn set_name(_name: &str) -> Result<()> { Ok(()) } pub fn get_cgroup_path(cgroups_path: &Option<PathBuf>, container_id: &str) -> PathBuf { match cgroups_path { Some(cpath) => cpath.clone(), None => PathBuf::from(format!("/youki/{}", container_id)), } } pub fn delete_with_retry<P: AsRef<Path>>(path: P) -> Result<()> { let mut attempts = 0; let mut delay = Duration::from_millis(10); let path = path.as_ref(); while attempts < 5 { if fs::remove_dir(path).is_ok() { return Ok(()); } std::thread::sleep(delay); attempts += attempts; delay *= attempts; } bail!("could not delete {:?}", path) } pub fn write_file<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { let path = path.as_ref(); fs::write(path, contents).with_context(|| format!("failed to write to {:?}", path))?; Ok(()) } pub struct TempDir { path: Option<PathBuf>, } impl TempDir { pub fn new<P: Into<PathBuf>>(path: P) -> Result<Self> { let p = path.into(); std::fs::create_dir_all(&p)?; Ok(Self { path: Some(p) }) } pub fn path(&self) -> &Path { self.path .as_ref() .expect("temp dir has already been removed") } pub fn remove(&mut self) { if let Some(p) = &self.path { let _ = fs::remove_dir_all(p); self.path = None; } } } impl Drop for TempDir { fn drop(&mut self) { self.remove(); } } impl AsRef<Path> for TempDir { fn as_ref(&self) -> &Path { self.path() } } impl Deref for TempDir { type Target = Path; fn deref(&self) -> &Self::Target { self.path() } } pub fn create_temp_dir(test_name: &str) -> Result<TempDir> { let dir = TempDir::new(std::env::temp_dir().join(test_name))?; Ok(dir) } #[cfg(test)] mod tests { use super::*; #[test] fn test_join_absolute_path() { assert_eq!( PathBuf::from("sample/a/") .join_absolute_path(&PathBuf::from("/b")) .unwrap(), PathBuf::from("sample/a/b") ); } #[test] fn test_join_absolute_path_error() { assert_eq!( PathBuf::from("sample/a/") .join_absolute_path(&PathBuf::from("b/c")) .is_err(), true ); } #[test] fn test_get_cgroup_path() { let cid = "sample_container_id"; assert_eq!( get_cgroup_path(&None, cid), PathBuf::from("/youki/sample_container_id") ); assert_eq!( get_cgroup_path(&Some(PathBuf::from("/youki")), cid), PathBuf::from("/youki") ); } }
use std::env; use std::ffi::CString; use std::fs; use std::ops::Deref; use std::path::{Path, PathBuf}; use std::time::Duration; use anyhow::Context; use anyhow::{bail, Result}; use nix::unistd; pub trait PathBufExt { fn as_in_container(&self) -> Result<PathBuf>; fn join_absolute_path(&self, p: &Path) -> Result<PathBuf>; } impl PathBufExt for PathBuf { fn as_in_conta
.unwrap(), PathBuf::from("sample/a/b") ); } #[test] fn test_join_absolute_path_error() { assert_eq!( PathBuf::from("sample/a/") .join_absolute_path(&PathBuf::from("b/c")) .is_err(), true ); } #[test] fn test_get_cgroup_path() { let cid = "sample_container_id"; assert_eq!( get_cgroup_path(&None, cid), PathBuf::from("/youki/sample_container_id") ); assert_eq!( get_cgroup_path(&Some(PathBuf::from("/youki")), cid), PathBuf::from("/youki") ); } }
iner(&self) -> Result<PathBuf> { if self.is_relative() { bail!("Relative path cannot be converted to the path in the container.") } else { let path_string = self.to_string_lossy().into_owned(); Ok(PathBuf::from(path_string[1..].to_string())) } } fn join_absolute_path(&self, p: &Path) -> Result<PathBuf> { if !p.is_absolute() && !p.as_os_str().is_empty() { bail!( "cannot join {:?} because it is not the absolute path.", p.display() ) } Ok(PathBuf::from(format!("{}{}", self.display(), p.display()))) } } pub fn do_exec(path: impl AsRef<Path>, args: &[String], envs: &[String]) -> Result<()> { let p = CString::new(path.as_ref().to_string_lossy().to_string())?; let a: Vec<CString> = args .iter() .map(|s| CString::new(s.to_string()).unwrap_or_default()) .collect(); env::vars().for_each(|(key, _value)| std::env::remove_var(key)); envs.iter().for_each(|e| { let mut split = e.split('='); if let Some(key) = split.next() { let value: String = split.collect::<Vec<&str>>().join("="); env::set_var(key, value) }; }); unistd::execvp(&p, &a)?; Ok(()) } pub fn set_name(_name: &str) -> Result<()> { Ok(()) } pub fn get_cgroup_path(cgroups_path: &Option<PathBuf>, container_id: &str) -> PathBuf { match cgroups_path { Some(cpath) => cpath.clone(), None => PathBuf::from(format!("/youki/{}", container_id)), } } pub fn delete_with_retry<P: AsRef<Path>>(path: P) -> Result<()> { let mut attempts = 0; let mut delay = Duration::from_millis(10); let path = path.as_ref(); while attempts < 5 { if fs::remove_dir(path).is_ok() { return Ok(()); } std::thread::sleep(delay); attempts += attempts; delay *= attempts; } bail!("could not delete {:?}", path) } pub fn write_file<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { let path = path.as_ref(); fs::write(path, contents).with_context(|| format!("failed to write to {:?}", path))?; Ok(()) } pub struct TempDir { path: Option<PathBuf>, } impl TempDir { pub fn new<P: Into<PathBuf>>(path: P) -> Result<Self> { let p = path.into(); std::fs::create_dir_all(&p)?; Ok(Self { path: Some(p) }) } pub fn path(&self) -> &Path { self.path .as_ref() .expect("temp dir has already been removed") } pub fn remove(&mut self) { if let Some(p) = &self.path { let _ = fs::remove_dir_all(p); self.path = None; } } } impl Drop for TempDir { fn drop(&mut self) { self.remove(); } } impl AsRef<Path> for TempDir { fn as_ref(&self) -> &Path { self.path() } } impl Deref for TempDir { type Target = Path; fn deref(&self) -> &Self::Target { self.path() } } pub fn create_temp_dir(test_name: &str) -> Result<TempDir> { let dir = TempDir::new(std::env::temp_dir().join(test_name))?; Ok(dir) } #[cfg(test)] mod tests { use super::*; #[test] fn test_join_absolute_path() { assert_eq!( PathBuf::from("sample/a/") .join_absolute_path(&PathBuf::from("/b"))
random
[ { "content": "#[inline]\n\npub fn write_cgroup_file_str<P: AsRef<Path>>(path: P, data: &str) -> Result<()> {\n\n fs::OpenOptions::new()\n\n .create(false)\n\n .write(true)\n\n .truncate(false)\n\n .open(path.as_ref())\n\n .with_context(|| format!(\"failed to open {:?}\", path.as_ref()))?\n\n .write_all(data.as_bytes())\n\n .with_context(|| format!(\"failed to write to {:?}\", path.as_ref()))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cgroups/common.rs", "rank": 1, "score": 201285.67837515107 }, { "content": "#[inline]\n\npub fn write_cgroup_file<P: AsRef<Path>, T: ToString>(path: P, data: T) -> Result<()> {\n\n fs::OpenOptions::new()\n\n .create(false)\n\n .write(true)\n\n .truncate(false)\n\n .open(path.as_ref())\n\n .with_context(|| format!(\"failed to open {:?}\", path.as_ref()))?\n\n .write_all(data.to_string().as_bytes())\n\n .with_context(|| format!(\"failed to write to {:?}\", path.as_ref()))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cgroups/common.rs", "rank": 3, "score": 192467.25384128944 }, { "content": "pub fn create_cgroup_manager<P: Into<PathBuf>>(cgroup_path: P) -> Result<Box<dyn CgroupManager>> {\n\n let cgroup_mount = Process::myself()?\n\n .mountinfo()?\n\n .into_iter()\n\n .find(|m| m.fs_type == \"cgroup\");\n\n\n\n let cgroup2_mount = Process::myself()?\n\n .mountinfo()?\n\n .into_iter()\n\n .find(|m| m.fs_type == \"cgroup2\");\n\n\n\n match (cgroup_mount, cgroup2_mount) {\n\n (Some(_), None) => {\n\n log::info!(\"cgroup manager V1 will be used\");\n\n Ok(Box::new(v1::manager::Manager::new(cgroup_path.into())?))\n\n }\n\n (None, Some(cgroup2)) => {\n\n log::info!(\"cgroup manager V2 will be used\");\n\n Ok(Box::new(v2::manager::Manager::new(\n\n cgroup2.mount_point,\n", "file_path": "src/cgroups/common.rs", "rank": 5, "score": 185657.08295525383 }, { "content": "/// Function to perform the first fork for in order to run the container process\n\npub fn fork_first<P: AsRef<Path>>(\n\n pid_file: Option<P>,\n\n rootless: Option<Rootless>,\n\n linux: &oci_spec::Linux,\n\n container: &Container,\n\n cmanager: Box<dyn CgroupManager>,\n\n) -> Result<Process> {\n\n // create new parent process structure\n\n let (mut parent, parent_channel) = parent::ParentProcess::new(rootless.clone())?;\n\n // create a new child process structure with sending end of parent process\n\n let mut child = child::ChildProcess::new(parent_channel)?;\n\n\n\n // fork the process\n\n match unsafe { unistd::fork()? } {\n\n // in the child process\n\n unistd::ForkResult::Child => {\n\n // if Out-of-memory score adjustment is set in specification.\n\n // set the score value for the current process\n\n // check https://dev.to/rrampage/surviving-the-linux-oom-killer-2ki9 for some more information\n\n if let Some(ref r) = linux.resources {\n", "file_path": "src/process/fork.rs", "rank": 6, "score": 171264.13167414692 }, { "content": "pub fn reset_effective(command: &impl Command) -> Result<()> {\n\n log::debug!(\"reset all caps\");\n\n command.set_capability(CapSet::Effective, &caps::all())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/capabilities.rs", "rank": 7, "score": 165620.05094624968 }, { "content": "pub fn get_unified_mount_point() -> Result<PathBuf> {\n\n Process::myself()?\n\n .mountinfo()?\n\n .into_iter()\n\n .find(|m| m.fs_type == \"cgroup2\")\n\n .map(|m| m.mount_point)\n\n .ok_or_else(|| anyhow!(\"could not find mountpoint for unified\"))\n\n}\n", "file_path": "src/cgroups/v2/util.rs", "rank": 8, "score": 162174.3739435258 }, { "content": "pub fn init(log_file: Option<PathBuf>) -> Result<()> {\n\n let _log_file = LOG_FILE.get_or_init(|| -> Option<File> {\n\n let level_filter = if let Ok(log_level_str) = env::var(\"YOUKI_LOG_LEVEL\") {\n\n LevelFilter::from_str(&log_level_str).unwrap_or(LevelFilter::Warn)\n\n } else {\n\n LevelFilter::Warn\n\n };\n\n\n\n let logger = YOUKI_LOGGER.get_or_init(|| YoukiLogger::new(level_filter.to_level()));\n\n log::set_logger(logger)\n\n .map(|()| log::set_max_level(level_filter))\n\n .expect(\"set logger failed\");\n\n log_file.as_ref().map(|log_file_path| {\n\n OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .truncate(false)\n\n .open(log_file_path)\n\n .expect(\"failed opening log file \")\n\n })\n", "file_path": "src/logger.rs", "rank": 9, "score": 161057.3918230246 }, { "content": "/// Looks up the location of the newuidmap and newgidmap binaries which\n\n/// are required to write multiple user/group mappings\n\npub fn lookup_map_binaries(spec: &Linux) -> Result<Option<(PathBuf, PathBuf)>> {\n\n if spec.uid_mappings.len() == 1 && spec.uid_mappings.len() == 1 {\n\n return Ok(None);\n\n }\n\n\n\n let uidmap = lookup_map_binary(\"newuidmap\")?;\n\n let gidmap = lookup_map_binary(\"newgidmap\")?;\n\n\n\n match (uidmap, gidmap) {\n\n (Some(newuidmap), Some(newgidmap)) => Ok(Some((newuidmap, newgidmap))),\n\n _ => bail!(\"newuidmap/newgidmap binaries could not be found in path. This is required if multiple id mappings are specified\"),\n\n }\n\n}\n\n\n", "file_path": "src/rootless.rs", "rank": 10, "score": 158954.1701945304 }, { "content": "pub fn set_fixture(temp_dir: &Path, filename: &str, val: &str) -> Result<PathBuf> {\n\n let full_path = temp_dir.join(filename);\n\n\n\n std::fs::OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .truncate(true)\n\n .open(&full_path)?\n\n .write_all(val.as_bytes())?;\n\n\n\n Ok(full_path)\n\n}\n\n\n\npub struct LinuxCpuBuilder {\n\n resource: LinuxCpu,\n\n}\n\n\n\nimpl LinuxCpuBuilder {\n\n pub fn new() -> Self {\n\n Self {\n", "file_path": "src/cgroups/test.rs", "rank": 12, "score": 156413.37100211254 }, { "content": "pub fn drop_privileges(cs: &LinuxCapabilities, command: &impl Command) -> Result<()> {\n\n let all = caps::all();\n\n log::debug!(\"dropping bounding capabilities to {:?}\", cs.bounding);\n\n for c in all.difference(&to_set(&cs.bounding)) {\n\n match c {\n\n Capability::CAP_PERFMON | Capability::CAP_CHECKPOINT_RESTORE | Capability::CAP_BPF => {\n\n log::warn!(\"{:?} doesn't support.\", c);\n\n continue;\n\n }\n\n _ => caps::drop(None, CapSet::Bounding, *c)?,\n\n }\n\n }\n\n\n\n command.set_capability(CapSet::Effective, &to_set(&cs.effective))?;\n\n command.set_capability(CapSet::Permitted, &to_set(&cs.permitted))?;\n\n command.set_capability(CapSet::Inheritable, &to_set(&cs.inheritable))?;\n\n\n\n if let Err(e) = command.set_capability(CapSet::Ambient, &to_set(&cs.ambient)) {\n\n log::error!(\"failed to set ambient capabilities: {}\", e);\n\n }\n", "file_path": "src/capabilities.rs", "rank": 13, "score": 148734.0642358727 }, { "content": "pub fn get_subsystem_mount_points(subsystem: &str) -> Result<PathBuf> {\n\n Process::myself()?\n\n .mountinfo()?\n\n .into_iter()\n\n .find(|m| {\n\n if m.fs_type == \"cgroup\" {\n\n // Some systems mount net_prio and net_cls in the same directory\n\n // other systems mount them in their own diretories. This\n\n // should handle both cases.\n\n if subsystem == \"net_cls\" {\n\n return m.mount_point.ends_with(\"net_cls,net_prio\")\n\n || m.mount_point.ends_with(\"net_prio,net_cls\")\n\n || m.mount_point.ends_with(\"net_cls\");\n\n } else if subsystem == \"net_prio\" {\n\n return m.mount_point.ends_with(\"net_cls,net_prio\")\n\n || m.mount_point.ends_with(\"net_prio,net_cls\")\n\n || m.mount_point.ends_with(\"net_prio\");\n\n }\n\n\n\n if subsystem == \"cpu\" {\n", "file_path": "src/cgroups/v1/util.rs", "rank": 14, "score": 148345.64535080825 }, { "content": "fn try_read_os_from<P: AsRef<Path>>(path: P) -> Option<String> {\n\n let os_release = path.as_ref();\n\n if !os_release.exists() {\n\n return None;\n\n }\n\n\n\n if let Ok(release_content) = fs::read_to_string(path) {\n\n let pretty = find_parameter(&release_content, \"PRETTY_NAME\");\n\n\n\n if let Some(pretty) = pretty {\n\n return Some(pretty.trim_matches('\"').to_owned());\n\n }\n\n\n\n let name = find_parameter(&release_content, \"NAME\");\n\n let version = find_parameter(&release_content, \"VERSION\");\n\n\n\n if let (Some(name), Some(version)) = (name, version) {\n\n return Some(format!(\n\n \"{} {}\",\n\n name.trim_matches('\"'),\n\n version.trim_matches('\"')\n\n ));\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 15, "score": 147195.1355234487 }, { "content": "pub fn list_subsystem_mount_points() -> Result<HashMap<String, PathBuf>> {\n\n let mut mount_paths = HashMap::with_capacity(CONTROLLERS.len());\n\n\n\n for controller in CONTROLLERS {\n\n if let Ok(mount_point) = get_subsystem_mount_points(&controller.to_string()) {\n\n mount_paths.insert(controller.to_string(), mount_point);\n\n }\n\n }\n\n\n\n Ok(mount_paths)\n\n}\n\n\n", "file_path": "src/cgroups/v1/util.rs", "rank": 16, "score": 145651.90425607373 }, { "content": "pub fn prepare_rootfs(spec: &Spec, rootfs: &Path, bind_devices: bool) -> Result<()> {\n\n let mut flags = MsFlags::MS_REC;\n\n match spec.linux {\n\n Some(ref linux) => match linux.rootfs_propagation.as_ref() {\n\n \"shared\" => flags |= MsFlags::MS_SHARED,\n\n \"private\" => flags |= MsFlags::MS_PRIVATE,\n\n \"slave\" | \"\" => flags |= MsFlags::MS_SLAVE,\n\n _ => panic!(),\n\n },\n\n None => flags |= MsFlags::MS_SLAVE,\n\n };\n\n nix_mount(None::<&str>, \"/\", None::<&str>, flags, None::<&str>)?;\n\n\n\n log::debug!(\"mount root fs {:?}\", rootfs);\n\n nix_mount::<Path, Path, str, str>(\n\n Some(&rootfs),\n\n &rootfs,\n\n None::<&str>,\n\n MsFlags::MS_BIND | MsFlags::MS_REC,\n\n None::<&str>,\n", "file_path": "src/rootfs.rs", "rank": 17, "score": 142159.9948265282 }, { "content": "/// Fork the process and actually start the container process\n\nfn run_container<P: AsRef<Path>>(\n\n pid_file: Option<P>,\n\n notify_socket: &mut NotifyListener,\n\n rootfs: PathBuf,\n\n spec: oci_spec::Spec,\n\n csocketfd: Option<FileDescriptor>,\n\n container: Container,\n\n command: impl Command,\n\n) -> Result<Process> {\n\n // disable core dump for the process, check https://man7.org/linux/man-pages/man2/prctl.2.html for more information\n\n prctl::set_dumpable(false).unwrap();\n\n\n\n // get Linux specific section of OCI spec,\n\n // refer https://github.com/opencontainers/runtime-spec/blob/master/config-linux.md for more information\n\n let linux = spec.linux.as_ref().unwrap();\n\n let namespaces: Namespaces = linux.namespaces.clone().into();\n\n\n\n let rootless = if should_use_rootless() {\n\n log::debug!(\"rootless container should be created\");\n\n log::warn!(\n", "file_path": "src/create.rs", "rank": 18, "score": 138781.26907665064 }, { "content": "fn setup_ptmx(rootfs: &Path) -> Result<()> {\n\n if let Err(e) = remove_file(rootfs.join(\"dev/ptmx\")) {\n\n if e.kind() != ::std::io::ErrorKind::NotFound {\n\n bail!(\"could not delete /dev/ptmx\")\n\n }\n\n }\n\n symlink(\"pts/ptmx\", rootfs.join(\"dev/ptmx\"))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rootfs.rs", "rank": 19, "score": 135677.6663337898 }, { "content": "fn setup_default_symlinks(rootfs: &Path) -> Result<()> {\n\n if Path::new(\"/proc/kcore\").exists() {\n\n symlink(\"/proc/kcore\", \"dev/kcore\")?;\n\n }\n\n\n\n let defaults = [\n\n (\"/proc/self/fd\", \"dev/fd\"),\n\n (\"/proc/self/fd/0\", \"dev/stdin\"),\n\n (\"/proc/self/fd/1\", \"dev/stdout\"),\n\n (\"/proc/self/fd/2\", \"dev/stderr\"),\n\n ];\n\n for &(src, dst) in defaults.iter() {\n\n symlink(src, rootfs.join(dst))?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rootfs.rs", "rank": 20, "score": 132384.3005233528 }, { "content": "/// Validates that the spec contains the required information for\n\n/// running in rootless mode\n\npub fn validate(spec: &Spec) -> Result<()> {\n\n let linux = spec.linux.as_ref().unwrap();\n\n\n\n if linux.uid_mappings.is_empty() {\n\n bail!(\"rootless containers require at least one uid mapping\");\n\n }\n\n\n\n if linux.gid_mappings.is_empty() {\n\n bail!(\"rootless containers require at least one gid mapping\")\n\n }\n\n\n\n let namespaces: Namespaces = linux.namespaces.clone().into();\n\n if !namespaces.clone_flags.contains(CloneFlags::CLONE_NEWUSER) {\n\n bail!(\"rootless containers require the specification of a user namespace\");\n\n }\n\n\n\n validate_mounts(&spec.mounts, &linux.uid_mappings, &linux.gid_mappings)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rootless.rs", "rank": 21, "score": 128938.49798053606 }, { "content": "pub fn from_str(signal: &str) -> Result<Signal> {\n\n use Signal::*;\n\n Ok(match signal.to_ascii_uppercase().as_str() {\n\n \"1\" | \"HUP\" | \"SIGHUP\" => Signal::SIGHUP,\n\n \"2\" | \"INT\" | \"SIGINT\" => Signal::SIGINT,\n\n \"3\" | \"QUIT\" | \"SIGQUIT\" => Signal::SIGQUIT,\n\n \"4\" | \"ILL\" | \"SIGILL\" => Signal::SIGILL,\n\n \"5\" | \"BUS\" | \"SIGBUS\" => Signal::SIGBUS,\n\n \"6\" | \"ABRT\" | \"IOT\" | \"SIGABRT\" | \"SIGIOT\" => Signal::SIGABRT,\n\n \"7\" | \"TRAP\" | \"SIGTRAP\" => Signal::SIGTRAP,\n\n \"8\" | \"FPE\" | \"SIGFPE\" => Signal::SIGFPE,\n\n \"9\" | \"KILL\" | \"SIGKILL\" => Signal::SIGKILL,\n\n \"10\" | \"USR1\" | \"SIGUSR1\" => Signal::SIGUSR1,\n\n \"11\" | \"SEGV\" | \"SIGSEGV\" => SIGSEGV,\n\n \"12\" | \"USR2\" | \"SIGUSR2\" => SIGUSR2,\n\n \"13\" | \"PIPE\" | \"SIGPIPE\" => SIGPIPE,\n\n \"14\" | \"ALRM\" | \"SIGALRM\" => SIGALRM,\n\n \"15\" | \"TERM\" | \"SIGTERM\" => SIGTERM,\n\n \"16\" | \"STKFLT\" | \"SIGSTKFLT\" => SIGSTKFLT,\n\n \"17\" | \"CHLD\" | \"SIGCHLD\" => SIGCHLD,\n", "file_path": "src/signal.rs", "rank": 24, "score": 123083.72885599242 }, { "content": "pub fn setup_console(console_fd: FileDescriptor) -> Result<()> {\n\n // You can also access pty master, but it is better to use the API.\n\n // ref. https://github.com/containerd/containerd/blob/261c107ffc4ff681bc73988f64e3f60c32233b37/vendor/github.com/containerd/go-runc/console.go#L139-L154\n\n let openpty_result = nix::pty::openpty(None, None)?;\n\n let pty_name: &[u8] = b\"/dev/ptmx\";\n\n let iov = [uio::IoVec::from_slice(pty_name)];\n\n let fds = [openpty_result.master];\n\n let cmsg = socket::ControlMessage::ScmRights(&fds);\n\n socket::sendmsg(\n\n console_fd.as_raw_fd(),\n\n &iov,\n\n &[cmsg],\n\n socket::MsgFlags::empty(),\n\n None,\n\n )?;\n\n\n\n setsid()?;\n\n if unsafe { libc::ioctl(openpty_result.slave, libc::TIOCSCTTY) } < 0 {\n\n log::warn!(\"could not TIOCSCTTY\");\n\n };\n", "file_path": "src/tty.rs", "rank": 25, "score": 119795.98996494043 }, { "content": "/// Checks if rootless mode should be used\n\npub fn should_use_rootless() -> bool {\n\n if !nix::unistd::geteuid().is_root() {\n\n return true;\n\n }\n\n\n\n if let Ok(\"true\") = std::env::var(\"YOUKI_USE_ROOTLESS\").as_deref() {\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "src/rootless.rs", "rank": 26, "score": 119721.368323625 }, { "content": "fn lookup_map_binary(binary: &str) -> Result<Option<PathBuf>> {\n\n let paths = env::var(\"PATH\")?;\n\n Ok(paths\n\n .split_terminator(':')\n\n .find(|p| PathBuf::from(p).join(binary).exists())\n\n .map(PathBuf::from))\n\n}\n", "file_path": "src/rootless.rs", "rank": 27, "score": 118144.28337483323 }, { "content": "pub fn get_supported_cgroup_fs() -> Result<Vec<Cgroup>> {\n\n let cgroup_mount = Process::myself()?\n\n .mountinfo()?\n\n .into_iter()\n\n .find(|m| m.fs_type == \"cgroup\");\n\n\n\n let cgroup2_mount = Process::myself()?\n\n .mountinfo()?\n\n .into_iter()\n\n .find(|m| m.fs_type == \"cgroup2\");\n\n\n\n let mut cgroups = vec![];\n\n if cgroup_mount.is_some() {\n\n cgroups.push(Cgroup::V1);\n\n }\n\n\n\n if cgroup2_mount.is_some() {\n\n cgroups.push(Cgroup::V2);\n\n }\n\n\n\n Ok(cgroups)\n\n}\n\n\n", "file_path": "src/cgroups/common.rs", "rank": 28, "score": 117171.50675012998 }, { "content": "/// Function to perform the second fork, which will spawn the actual container process\n\npub fn fork_init(mut child_process: ChildProcess) -> Result<Process> {\n\n // setup sockets for init process\n\n let sender_for_child = child_process.setup_pipe()?;\n\n // for the process into current process (C1) (which is child of first_fork) and init process\n\n match unsafe { unistd::fork()? } {\n\n // if it is child process, create new InitProcess structure and return\n\n unistd::ForkResult::Child => Ok(Process::Init(InitProcess::new(sender_for_child))),\n\n // in the forking process C1\n\n unistd::ForkResult::Parent { child } => {\n\n // wait for init process to be ready\n\n child_process.wait_for_init_ready()?;\n\n // notify the parent process (original youki process) that init process is forked and ready\n\n child_process.notify_parent(child)?;\n\n\n\n // wait for the init process, which is container process, to change state\n\n // check https://man7.org/linux/man-pages/man3/wait.3p.html for more information\n\n match waitpid(child, None)? {\n\n // if normally exited\n\n WaitStatus::Exited(pid, status) => {\n\n log::debug!(\"exited pid: {:?}, status: {:?}\", pid, status);\n", "file_path": "src/process/fork.rs", "rank": 30, "score": 107574.0623765694 }, { "content": "/// This specifies various kernel/other functionalities required for\n\n/// container management\n\npub trait Command {\n\n fn as_any(&self) -> &dyn Any;\n\n fn pivot_rootfs(&self, path: &Path) -> Result<()>;\n\n fn set_ns(&self, rawfd: i32, nstype: CloneFlags) -> Result<()>;\n\n fn set_id(&self, uid: Uid, gid: Gid) -> Result<()>;\n\n fn unshare(&self, flags: CloneFlags) -> Result<()>;\n\n fn set_capability(&self, cset: CapSet, value: &CapsHashSet) -> Result<(), CapsError>;\n\n fn set_hostname(&self, hostname: &str) -> Result<()>;\n\n fn set_rlimit(&self, rlimit: &LinuxRlimit) -> Result<()>;\n\n}\n", "file_path": "src/command/command.rs", "rank": 31, "score": 106136.49635047872 }, { "content": "pub fn setup(testname: &str, cgroup_file: &str) -> (TempDir, PathBuf) {\n\n let tmp = create_temp_dir(testname).expect(\"create temp directory for test\");\n\n let cgroup_file = set_fixture(&tmp, cgroup_file, \"\")\n\n .unwrap_or_else(|_| panic!(\"set test fixture for {}\", cgroup_file));\n\n\n\n (tmp, cgroup_file)\n\n}\n\n\n", "file_path": "src/cgroups/test.rs", "rank": 32, "score": 105532.05431436814 }, { "content": "pub trait Controller {\n\n fn apply(linux_resources: &LinuxResources, cgroup_root: &Path, pid: Pid) -> Result<()>;\n\n}\n", "file_path": "src/cgroups/v1/controller.rs", "rank": 33, "score": 103528.20535149402 }, { "content": "pub trait CgroupManager {\n\n fn apply(&self, linux_resources: &LinuxResources, pid: Pid) -> Result<()>;\n\n fn remove(&self) -> Result<()>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Cgroup {\n\n V1,\n\n V2,\n\n}\n\n\n\nimpl Display for Cgroup {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let print = match *self {\n\n Cgroup::V1 => \"v1\",\n\n Cgroup::V2 => \"v2\",\n\n };\n\n\n\n write!(f, \"{}\", print)\n\n }\n\n}\n\n\n", "file_path": "src/cgroups/common.rs", "rank": 34, "score": 103528.20535149402 }, { "content": "pub trait Controller {\n\n fn apply(linux_resources: &LinuxResources, cgroup_path: &Path) -> Result<()>;\n\n}\n", "file_path": "src/cgroups/v2/controller.rs", "rank": 35, "score": 103528.20535149402 }, { "content": "/// This is the entry point in the container runtime. The binary is run by a high-level container runtime,\n\n/// with various flags passed. This parses the flags, creates and manages appropriate resources.\n\nfn main() -> Result<()> {\n\n let opts = Opts::parse();\n\n\n\n if let Err(e) = youki::logger::init(opts.log) {\n\n eprintln!(\"log init failed: {:?}\", e);\n\n }\n\n\n\n let root_path = if should_use_rootless() && opts.root.eq(&PathBuf::from(\"/run/youki\")) {\n\n PathBuf::from(\"/tmp/rootless\")\n\n } else {\n\n PathBuf::from(&opts.root)\n\n };\n\n fs::create_dir_all(&root_path)?;\n\n\n\n match opts.subcmd {\n\n SubCommand::Create(create) => create.exec(root_path, LinuxCommand),\n\n SubCommand::Start(start) => start.exec(root_path),\n\n SubCommand::Kill(kill) => {\n\n // resolves relative paths, symbolic links etc. and get complete path\n\n let root_path = fs::canonicalize(root_path)?;\n", "file_path": "src/main.rs", "rank": 36, "score": 98862.97488920056 }, { "content": "pub fn print_kernel() {\n\n let uname = nix::sys::utsname::uname();\n\n println!(\"{:<18}{}\", \"Kernel-Release\", uname.release());\n\n println!(\"{:<18}{}\", \"Kernel-Version\", uname.version());\n\n println!(\"{:<18}{}\", \"Architecture\", uname.machine());\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 37, "score": 94563.97498060776 }, { "content": "pub fn print_hardware() {\n\n if let Ok(cpu_info) = CpuInfo::new() {\n\n println!(\"{:<18}{}\", \"Cores\", cpu_info.num_cores());\n\n }\n\n\n\n if let Ok(mem_info) = Meminfo::new() {\n\n println!(\n\n \"{:<18}{}\",\n\n \"Total Memory\",\n\n mem_info.mem_total / u64::pow(1024, 2)\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 38, "score": 94563.97498060776 }, { "content": "pub fn print_youki() {\n\n println!(\"{:<18}{}\", \"Version\", env!(\"CARGO_PKG_VERSION\"));\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 39, "score": 94563.97498060776 }, { "content": "// see https://www.freedesktop.org/software/systemd/man/os-release.html\n\npub fn print_os() {\n\n if let Some(os) = try_read_os_from(\"/etc/os-release\") {\n\n println!(\"{:<18}{}\", \"Operating System\", os);\n\n } else if let Some(os) = try_read_os_from(\"/usr/lib/os-release\") {\n\n println!(\"{:<18}{}\", \"Operating System\", os);\n\n }\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 40, "score": 94563.97498060776 }, { "content": "pub fn connect_stdio(\n\n stdin: &FileDescriptor,\n\n stdout: &FileDescriptor,\n\n stderr: &FileDescriptor,\n\n) -> Result<()> {\n\n dup2(stdin.as_raw_fd(), STDIN)?;\n\n dup2(stdout.as_raw_fd(), STDOUT)?;\n\n // FIXME: Rarely does it fail.\n\n // error message: `Error: Resource temporarily unavailable (os error 11)`\n\n dup2(stderr.as_raw_fd(), STDERR)?;\n\n Ok(())\n\n}\n", "file_path": "src/stdio.rs", "rank": 41, "score": 94563.97498060776 }, { "content": "pub fn print_cgroups() {\n\n if let Ok(cgroup_fs) = cgroups::common::get_supported_cgroup_fs() {\n\n let cgroup_fs: Vec<String> = cgroup_fs.into_iter().map(|c| c.to_string()).collect();\n\n println!(\"{:<18}{}\", \"cgroup version\", cgroup_fs.join(\" and \"));\n\n }\n\n\n\n println!(\"cgroup mounts\");\n\n if let Ok(v1_mounts) = cgroups::v1::util::list_subsystem_mount_points() {\n\n let mut v1_mounts: Vec<String> = v1_mounts\n\n .iter()\n\n .map(|kv| format!(\" {:<16}{}\", kv.0, kv.1.display()))\n\n .collect();\n\n\n\n v1_mounts.sort();\n\n for cgroup_mount in v1_mounts {\n\n println!(\"{}\", cgroup_mount);\n\n }\n\n }\n\n\n\n let unified = cgroups::v2::util::get_unified_mount_point();\n\n if let Ok(mount_point) = unified {\n\n println!(\" {:<16}{}\", \"unified\", mount_point.display());\n\n }\n\n}\n", "file_path": "src/info.rs", "rank": 42, "score": 94563.97498060776 }, { "content": "pub fn setup_console_socket(\n\n container_dir: &Path,\n\n console_socket_path: &Path,\n\n) -> Result<FileDescriptor> {\n\n let csocket = \"console-socket\";\n\n symlink(console_socket_path, container_dir.join(csocket))?;\n\n\n\n let mut csocketfd = socket::socket(\n\n socket::AddressFamily::Unix,\n\n socket::SockType::Stream,\n\n socket::SockFlag::empty(),\n\n None,\n\n )?;\n\n csocketfd = match socket::connect(\n\n csocketfd,\n\n &socket::SockAddr::Unix(socket::UnixAddr::new(&*csocket)?),\n\n ) {\n\n Err(e) => {\n\n if e != ::nix::Error::Sys(Errno::ENOENT) {\n\n bail!(\"failed to open {}\", csocket);\n\n }\n\n -1\n\n }\n\n Ok(()) => csocketfd,\n\n };\n\n Ok(csocketfd.into())\n\n}\n\n\n", "file_path": "src/tty.rs", "rank": 43, "score": 91990.35258739762 }, { "content": "fn bind_dev(dev: &LinuxDevice) -> Result<()> {\n\n let fd = open(\n\n &dev.path.as_in_container()?,\n\n OFlag::O_RDWR | OFlag::O_CREAT,\n\n Mode::from_bits_truncate(0o644),\n\n )?;\n\n close(fd)?;\n\n nix_mount(\n\n Some(&*dev.path.as_in_container()?),\n\n &dev.path,\n\n None::<&str>,\n\n MsFlags::MS_BIND,\n\n None::<&str>,\n\n )?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rootfs.rs", "rank": 44, "score": 84347.7459656437 }, { "content": "fn mknod_dev(dev: &LinuxDevice) -> Result<()> {\n\n fn makedev(major: u64, minor: u64) -> u64 {\n\n (minor & 0xff) | ((major & 0xfff) << 8) | ((minor & !0xff) << 12) | ((major & !0xfff) << 32)\n\n }\n\n\n\n mknod(\n\n &dev.path.as_in_container()?,\n\n dev.typ.to_sflag()?,\n\n Mode::from_bits_truncate(dev.file_mode.unwrap_or(0)),\n\n makedev(dev.major, dev.minor),\n\n )?;\n\n chown(\n\n &dev.path.as_in_container()?,\n\n dev.uid.map(Uid::from_raw),\n\n dev.gid.map(Gid::from_raw),\n\n )?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rootfs.rs", "rank": 45, "score": 84347.7459656437 }, { "content": "pub fn default_devices() -> Vec<LinuxDevice> {\n\n vec![\n\n LinuxDevice {\n\n path: PathBuf::from(\"/dev/null\"),\n\n typ: LinuxDeviceType::C,\n\n major: 1,\n\n minor: 3,\n\n file_mode: Some(0o066),\n\n uid: None,\n\n gid: None,\n\n },\n\n LinuxDevice {\n\n path: PathBuf::from(\"/dev/zero\"),\n\n typ: LinuxDeviceType::C,\n\n major: 1,\n\n minor: 5,\n\n file_mode: Some(0o066),\n\n uid: None,\n\n gid: None,\n\n },\n", "file_path": "src/rootfs.rs", "rank": 46, "score": 83057.34637567514 }, { "content": "fn create_devices(devices: &[LinuxDevice], bind: bool) -> Result<()> {\n\n let old_mode = umask(Mode::from_bits_truncate(0o000));\n\n if bind {\n\n let _ = default_devices()\n\n .iter()\n\n .chain(devices)\n\n .map(|dev| {\n\n if !dev.path.starts_with(\"/dev\") {\n\n panic!(\"{} is not a valid device path\", dev.path.display());\n\n }\n\n bind_dev(dev)\n\n })\n\n .collect::<Result<Vec<_>>>()?;\n\n } else {\n\n default_devices()\n\n .iter()\n\n .chain(devices)\n\n .map(|dev| {\n\n if !dev.path.starts_with(\"/dev\") {\n\n panic!(\"{} is not a valid device path\", dev.path.display());\n\n }\n\n mknod_dev(dev)\n\n })\n\n .collect::<Result<Vec<_>>>()?;\n\n }\n\n umask(old_mode);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rootfs.rs", "rank": 47, "score": 76943.99927790985 }, { "content": "fn is_id_mapped(id: &str, mappings: &[LinuxIdMapping]) -> Result<bool> {\n\n let id = id.parse::<u32>()?;\n\n Ok(mappings\n\n .iter()\n\n .any(|m| id >= m.container_id && id <= m.container_id + m.size))\n\n}\n\n\n", "file_path": "src/rootless.rs", "rank": 48, "score": 72103.43894219614 }, { "content": "fn deserialize_caps<'de, D>(desirializer: D) -> Result<Option<LinuxCapabilities>, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n let r: serde_json::Value = serde::Deserialize::deserialize(desirializer)?;\n\n match r {\n\n serde_json::Value::Null => Ok(None),\n\n serde_json::Value::Array(a) => {\n\n let caps = cap_from_array::<D>(&a)?;\n\n let capabilities = LinuxCapabilities {\n\n bounding: caps.clone(),\n\n effective: caps.clone(),\n\n inheritable: caps.clone(),\n\n permitted: caps.clone(),\n\n ambient: caps,\n\n };\n\n\n\n Ok(Some(capabilities))\n\n }\n\n serde_json::Value::Object(o) => {\n", "file_path": "oci_spec/src/lib.rs", "rank": 49, "score": 62987.779895472224 }, { "content": "fn cap_from_array<'de, D>(a: &[serde_json::Value]) -> Result<Vec<LinuxCapabilityType>, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n let mut caps = Vec::new();\n\n for c in a {\n\n match LinuxCapabilityType::deserialize(c) {\n\n Ok(val) => caps.push(val),\n\n Err(_) => {\n\n let msg = format!(\"Capability '{}' is not valid\", c);\n\n return Err(serde::de::Error::custom(msg));\n\n }\n\n }\n\n }\n\n Ok(caps)\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Root {\n\n #[serde(default)]\n", "file_path": "oci_spec/src/lib.rs", "rank": 50, "score": 59702.242135375505 }, { "content": "#[test]\n\nfn main() {\n\n let current_dir_path_result = env::current_dir();\n\n let current_dir_path = match current_dir_path_result {\n\n Ok(path_buf) => path_buf,\n\n Err(_) => panic!(\"directory is not found\"),\n\n };\n\n let youki_path = current_dir_path.join(PathBuf::from(\"youki\"));\n\n let status = Command::new(youki_path)\n\n .stdout(Stdio::null())\n\n .arg(\"-h\")\n\n .status()\n\n .expect(\"failed to execute process\");\n\n assert!(status.success());\n\n}\n", "file_path": "tests/integration.rs", "rank": 51, "score": 48242.48010276707 }, { "content": "fn mount_to_container(\n\n m: &Mount,\n\n rootfs: &Path,\n\n flags: MsFlags,\n\n data: &str,\n\n label: &str,\n\n) -> Result<()> {\n\n let d = if !label.is_empty() && m.typ != \"proc\" && m.typ != \"sysfs\" {\n\n if data.is_empty() {\n\n format!(\"context=\\\"{}\\\"\", label)\n\n } else {\n\n format!(\"{},context=\\\"{}\\\"\", data, label)\n\n }\n\n } else {\n\n data.to_string()\n\n };\n\n\n\n let dest_for_host = format!(\n\n \"{}{}\",\n\n rootfs.to_string_lossy().into_owned(),\n", "file_path": "src/rootfs.rs", "rank": 52, "score": 46710.00323264479 }, { "content": "/// setup hostname, rootfs for the container process\n\nfn init_process(\n\n spec: oci_spec::Spec,\n\n command: impl Command,\n\n rootfs: PathBuf,\n\n namespaces: Namespaces,\n\n) -> Result<()> {\n\n let proc = spec.process.clone();\n\n\n\n command.set_hostname(&spec.hostname.as_str())?;\n\n if spec.process.no_new_privileges {\n\n let _ = prctl::set_no_new_privileges(true);\n\n }\n\n\n\n rootfs::prepare_rootfs(\n\n &spec,\n\n &rootfs,\n\n namespaces\n\n .clone_flags\n\n .contains(sched::CloneFlags::CLONE_NEWUSER),\n\n )?;\n", "file_path": "src/create.rs", "rank": 53, "score": 46710.00323264479 }, { "content": "fn validate_mounts(\n\n mounts: &[Mount],\n\n uid_mappings: &[LinuxIdMapping],\n\n gid_mappings: &[LinuxIdMapping],\n\n) -> Result<()> {\n\n for mount in mounts {\n\n for opt in &mount.options {\n\n if opt.starts_with(\"uid=\") && !is_id_mapped(&opt[4..], uid_mappings)? {\n\n bail!(\"Mount {:?} specifies option {} which is not mapped inside the rootless container\", mount, opt);\n\n }\n\n\n\n if opt.starts_with(\"gid=\") && !is_id_mapped(&opt[4..], gid_mappings)? {\n\n bail!(\"Mount {:?} specifies option {} which is not mapped inside the rootless container\", mount, opt);\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rootless.rs", "rank": 54, "score": 46710.00323264479 }, { "content": "fn write_id_mapping(\n\n map_file: &str,\n\n mappings: &[LinuxIdMapping],\n\n map_binary: Option<&Path>,\n\n) -> Result<()> {\n\n let mappings: Vec<String> = mappings\n\n .iter()\n\n .map(|m| format!(\"{} {} {}\", m.container_id, m.host_id, m.size))\n\n .collect();\n\n if mappings.len() == 1 {\n\n utils::write_file(map_file, mappings.first().unwrap())?;\n\n } else {\n\n Command::new(map_binary.unwrap())\n\n .args(mappings)\n\n .output()\n\n .with_context(|| format!(\"failed to execute {:?}\", map_binary))?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/process/parent.rs", "rank": 55, "score": 44031.117843165826 }, { "content": "fn cap_from_object<'de, D>(\n\n o: &serde_json::Map<String, serde_json::Value>,\n\n key: &str,\n\n) -> Result<Vec<LinuxCapabilityType>, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n if let Some(v) = o.get(key) {\n\n match *v {\n\n serde_json::Value::Null => Ok(Vec::new()),\n\n serde_json::Value::Array(ref a) => cap_from_array::<D>(a),\n\n _ => Err(serde::de::Error::custom(\n\n \"Unexpected value in capability set\",\n\n )),\n\n }\n\n } else {\n\n Ok(Vec::new())\n\n }\n\n}\n\n\n", "file_path": "oci_spec/src/lib.rs", "rank": 56, "score": 39678.25953944718 }, { "content": "fn parse_mount(m: &Mount) -> (MsFlags, String) {\n\n let mut flags = MsFlags::empty();\n\n let mut data = Vec::new();\n\n for s in &m.options {\n\n if let Some((is_clear, flag)) = match s.as_str() {\n\n \"defaults\" => Some((false, MsFlags::empty())),\n\n \"ro\" => Some((false, MsFlags::MS_RDONLY)),\n\n \"rw\" => Some((true, MsFlags::MS_RDONLY)),\n\n \"suid\" => Some((true, MsFlags::MS_NOSUID)),\n\n \"nosuid\" => Some((false, MsFlags::MS_NOSUID)),\n\n \"dev\" => Some((true, MsFlags::MS_NODEV)),\n\n \"nodev\" => Some((false, MsFlags::MS_NODEV)),\n\n \"exec\" => Some((true, MsFlags::MS_NOEXEC)),\n\n \"noexec\" => Some((false, MsFlags::MS_NOEXEC)),\n\n \"sync\" => Some((false, MsFlags::MS_SYNCHRONOUS)),\n\n \"async\" => Some((true, MsFlags::MS_SYNCHRONOUS)),\n\n \"dirsync\" => Some((false, MsFlags::MS_DIRSYNC)),\n\n \"remount\" => Some((false, MsFlags::MS_REMOUNT)),\n\n \"mand\" => Some((false, MsFlags::MS_MANDLOCK)),\n\n \"nomand\" => Some((true, MsFlags::MS_MANDLOCK)),\n", "file_path": "src/rootfs.rs", "rank": 57, "score": 38105.743539562056 }, { "content": "fn to_set(caps: &[LinuxCapabilityType]) -> CapsHashSet {\n\n let mut capabilities = CapsHashSet::new();\n\n for c in caps {\n\n capabilities.insert(c.cap);\n\n }\n\n capabilities\n\n}\n\n\n", "file_path": "src/capabilities.rs", "rank": 58, "score": 36901.91053814845 }, { "content": "fn find_parameter<'a>(content: &'a str, param_name: &str) -> Option<&'a str> {\n\n let param_value = content\n\n .lines()\n\n .find(|l| l.starts_with(param_name))\n\n .map(|l| l.split_terminator('=').last());\n\n\n\n if let Some(Some(value)) = param_value {\n\n return Some(value);\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 59, "score": 32667.34552960575 }, { "content": "#[cfg(feature = \"proptests\")]\n\nfn some_none_generator_util<T: Arbitrary>(g: &mut Gen) -> Option<T> {\n\n let choice = g.choose(&[true, false]).unwrap();\n\n match choice {\n\n false => None,\n\n true => Some(T::arbitrary(g)),\n\n }\n\n}\n\n\n\n#[cfg(feature = \"proptests\")]\n\nimpl Arbitrary for LinuxDeviceCgroup {\n\n fn arbitrary(g: &mut Gen) -> LinuxDeviceCgroup {\n\n let typ_choices = [\"b\", \"c\", \"u\", \"p\", \"a\"];\n\n\n\n let typ_chosen = g.choose(&typ_choices).unwrap();\n\n\n\n let typ = match typ_chosen.to_string().as_str() {\n\n \"b\" => LinuxDeviceType::B,\n\n \"c\" => LinuxDeviceType::C,\n\n \"u\" => LinuxDeviceType::U,\n\n \"p\" => LinuxDeviceType::P,\n", "file_path": "oci_spec/src/lib.rs", "rank": 60, "score": 31987.95635754693 }, { "content": "use anyhow::Result;\n\n\n\nuse super::controller::Controller;\n\nuse oci_spec::LinuxResources;\n\n\n\npub struct Io {}\n\n\n\nimpl Controller for Io {\n\n fn apply(_: &LinuxResources, _: &std::path::Path) -> Result<()> {\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/cgroups/v2/io.rs", "rank": 61, "score": 17.424648433043664 }, { "content": "use anyhow::Result;\n\n\n\nuse super::controller::Controller;\n\nuse oci_spec::LinuxResources;\n\n\n\npub struct Pids {}\n\n\n\nimpl Controller for Pids {\n\n fn apply(_: &LinuxResources, _: &std::path::Path) -> Result<()> {\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/cgroups/v2/pids.rs", "rank": 62, "score": 17.424648433043664 }, { "content": "use anyhow::Result;\n\nuse std::path::Path;\n\n\n\nuse oci_spec::{LinuxMemory, LinuxResources};\n\n\n\nuse super::controller::Controller;\n\n\n\npub struct Memory {}\n\n\n\nimpl Controller for Memory {\n\n fn apply(linux_resources: &LinuxResources, cgroup_path: &Path) -> Result<()> {\n\n if let Some(memory) = &linux_resources.memory {\n\n Self::apply(cgroup_path, memory)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Memory {\n\n fn apply(_: &Path, _: &LinuxMemory) -> Result<()> {\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/cgroups/v2/memory.rs", "rank": 63, "score": 17.277856813938893 }, { "content": "use anyhow::Result;\n\n\n\nuse super::controller::Controller;\n\nuse oci_spec::LinuxResources;\n\n\n\npub struct HugeTlb {}\n\n\n\nimpl Controller for HugeTlb {\n\n fn apply(_: &LinuxResources, _: &std::path::Path) -> Result<()> {\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/cgroups/v2/hugetlb.rs", "rank": 64, "score": 17.127047214151922 }, { "content": "//! Starts execution of the container\n\n\n\nuse std::path::PathBuf;\n\n\n\nuse anyhow::{bail, Result};\n\nuse clap::Clap;\n\nuse nix::unistd;\n\n\n\nuse crate::container::{Container, ContainerStatus};\n\nuse crate::notify_socket::NotifySocket;\n\n\n\n#[derive(Clap, Debug)]\n\npub struct Start {\n\n pub container_id: String,\n\n}\n\n\n\nimpl Start {\n\n pub fn exec(&self, root_path: PathBuf) -> Result<()> {\n\n let container_root = root_path.join(&self.container_id);\n\n if !container_root.exists() {\n", "file_path": "src/start.rs", "rank": 65, "score": 16.821392195378852 }, { "content": "use std::fs;\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse anyhow::Result;\n\nuse nix::unistd::Pid;\n\nuse procfs::process::Process;\n\n\n\nuse crate::container::{ContainerStatus, State};\n\n\n\n/// Structure representing the container data\n\n#[derive(Debug)]\n\npub struct Container {\n\n // State of the container\n\n pub state: State,\n\n // indicated the directory for the root path in the container\n\n pub root: PathBuf,\n\n}\n\n\n\nimpl Container {\n\n pub fn new(\n", "file_path": "src/container/container.rs", "rank": 66, "score": 16.729533032464577 }, { "content": "use std::io::prelude::*;\n\nuse std::os::unix::io::AsRawFd;\n\nuse std::os::unix::net::{UnixListener, UnixStream};\n\nuse std::path::Path;\n\n\n\nuse anyhow::Result;\n\nuse nix::unistd::close;\n\n\n\npub const NOTIFY_FILE: &str = \"notify.sock\";\n\n\n\npub struct NotifyListener {\n\n socket: UnixListener,\n\n}\n\n\n\nimpl NotifyListener {\n\n pub fn new(root: &Path) -> Result<Self> {\n\n let _notify_file_path = root.join(NOTIFY_FILE);\n\n let stream = UnixListener::bind(\"notify.sock\")?;\n\n Ok(Self { socket: stream })\n\n }\n", "file_path": "src/notify_socket.rs", "rank": 67, "score": 15.75626349595329 }, { "content": "use std::{\n\n fs::{self},\n\n path::Path,\n\n};\n\n\n\nuse anyhow::Result;\n\n\n\nuse crate::cgroups::{\n\n common::{self, CGROUP_PROCS},\n\n v1::Controller,\n\n};\n\nuse oci_spec::{LinuxPids, LinuxResources};\n\n\n\npub struct Pids {}\n\n\n\nimpl Controller for Pids {\n\n fn apply(\n\n linux_resources: &LinuxResources,\n\n cgroup_root: &std::path::Path,\n\n pid: nix::unistd::Pid,\n", "file_path": "src/cgroups/v1/pids.rs", "rank": 68, "score": 15.36297624537637 }, { "content": "use oci_spec::LinuxRlimit;\n\n\n\nuse super::Command;\n\nuse crate::capabilities;\n\n\n\n/// Empty structure to implement Command trait for\n\n#[derive(Clone)]\n\npub struct LinuxCommand;\n\n\n\nimpl Command for LinuxCommand {\n\n /// To enable dynamic typing,\n\n /// see https://doc.rust-lang.org/std/any/index.html for more information\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n /// Function to set given path as root path inside process\n\n fn pivot_rootfs(&self, path: &Path) -> Result<()> {\n\n // open the path as directory and read only\n\n let newroot = open(path, OFlag::O_DIRECTORY | OFlag::O_RDONLY, Mode::empty())?;\n", "file_path": "src/command/linux.rs", "rank": 69, "score": 15.184404797458319 }, { "content": "use anyhow::Result;\n\nuse std::path::Path;\n\n\n\nuse crate::cgroups::common;\n\nuse oci_spec::{LinuxCpu, LinuxResources};\n\n\n\nuse super::controller::Controller;\n\n\n\nconst CGROUP_CPUSET_CPUS: &str = \"cpuset.cpus\";\n\nconst CGROUP_CPUSET_MEMS: &str = \"cpuset.mems\";\n\n\n\npub struct CpuSet {}\n\n\n\nimpl Controller for CpuSet {\n\n fn apply(linux_resources: &LinuxResources, cgroup_path: &Path) -> Result<()> {\n\n if let Some(cpuset) = &linux_resources.cpu {\n\n Self::apply(cgroup_path, cpuset)?;\n\n }\n\n\n\n Ok(())\n", "file_path": "src/cgroups/v2/cpuset.rs", "rank": 70, "score": 15.053462389734685 }, { "content": "use std::{fs, path::Path};\n\n\n\nuse anyhow::Result;\n\nuse nix::unistd::Pid;\n\nuse oci_spec::LinuxResources;\n\n\n\nuse crate::cgroups::common::{self, CGROUP_PROCS};\n\n\n\nuse super::Controller;\n\n\n\npub struct CpuAcct {}\n\n\n\nimpl Controller for CpuAcct {\n\n fn apply(_linux_resources: &LinuxResources, cgroup_path: &Path, pid: Pid) -> Result<()> {\n\n log::debug!(\"Apply cpuacct cgroup config\");\n\n fs::create_dir_all(cgroup_path)?;\n\n\n\n common::write_cgroup_file(cgroup_path.join(CGROUP_PROCS), pid)?;\n\n Ok(())\n\n }\n", "file_path": "src/cgroups/v1/cpuacct.rs", "rank": 71, "score": 14.95093478250667 }, { "content": "use anyhow::{bail, Result};\n\nuse std::path::Path;\n\n\n\nuse crate::cgroups::common;\n\nuse oci_spec::{LinuxCpu, LinuxResources};\n\n\n\nuse super::controller::Controller;\n\n\n\nconst CGROUP_CPU_WEIGHT: &str = \"cpu.weight\";\n\nconst CGROUP_CPU_MAX: &str = \"cpu.max\";\n\nconst DEFAULT_PERIOD: &str = \"100000\";\n\nconst UNRESTRICTED_QUOTA: &str = \"max\";\n\n\n\npub struct Cpu {}\n\n\n\nimpl Controller for Cpu {\n\n fn apply(linux_resources: &LinuxResources, path: &Path) -> Result<()> {\n\n if let Some(cpu) = &linux_resources.cpu {\n\n Self::apply(path, cpu)?;\n\n }\n", "file_path": "src/cgroups/v2/cpu.rs", "rank": 72, "score": 14.819502815622084 }, { "content": "use std::{fs, path::Path};\n\n\n\nuse anyhow::bail;\n\nuse regex::Regex;\n\n\n\nuse crate::cgroups::{\n\n common::{self, CGROUP_PROCS},\n\n v1::Controller,\n\n};\n\nuse oci_spec::{LinuxHugepageLimit, LinuxResources};\n\n\n\npub struct Hugetlb {}\n\n\n\nimpl Controller for Hugetlb {\n\n fn apply(\n\n linux_resources: &LinuxResources,\n\n cgroup_root: &std::path::Path,\n\n pid: nix::unistd::Pid,\n\n ) -> anyhow::Result<()> {\n\n log::debug!(\"Apply Hugetlb cgroup config\");\n", "file_path": "src/cgroups/v1/hugetlb.rs", "rank": 73, "score": 14.423234012893232 }, { "content": "use std::{fs, path::Path};\n\n\n\nuse anyhow::{bail, Result};\n\nuse nix::unistd::Pid;\n\nuse oci_spec::{LinuxCpu, LinuxResources};\n\n\n\nuse crate::cgroups::common::{self, CGROUP_PROCS};\n\n\n\nuse super::{util, Controller, ControllerType};\n\n\n\nconst CGROUP_CPUSET_CPUS: &str = \"cpuset.cpus\";\n\nconst CGROUP_CPUSET_MEMS: &str = \"cpuset.mems\";\n\n\n\npub struct CpuSet {}\n\n\n\nimpl Controller for CpuSet {\n\n fn apply(linux_resources: &LinuxResources, cgroup_path: &Path, pid: Pid) -> Result<()> {\n\n log::debug!(\"Apply CpuSet cgroup config\");\n\n fs::create_dir_all(cgroup_path)?;\n\n\n", "file_path": "src/cgroups/v1/cpuset.rs", "rank": 74, "score": 14.218507287538788 }, { "content": "use std::{fs::create_dir_all, path::Path};\n\n\n\nuse anyhow::Result;\n\nuse nix::unistd::Pid;\n\n\n\nuse crate::cgroups::common;\n\nuse crate::cgroups::common::CGROUP_PROCS;\n\nuse crate::cgroups::v1::Controller;\n\nuse oci_spec::{LinuxNetwork, LinuxResources};\n\n\n\npub struct NetworkClassifier {}\n\n\n\nimpl Controller for NetworkClassifier {\n\n fn apply(linux_resources: &LinuxResources, cgroup_root: &Path, pid: Pid) -> Result<()> {\n\n log::debug!(\"Apply NetworkClassifier cgroup config\");\n\n create_dir_all(&cgroup_root)?;\n\n\n\n if let Some(network) = linux_resources.network.as_ref() {\n\n Self::apply(cgroup_root, network)?;\n\n }\n", "file_path": "src/cgroups/v1/network_classifier.rs", "rank": 75, "score": 13.702813941630778 }, { "content": "use std::{fs::create_dir_all, path::Path};\n\n\n\nuse anyhow::Result;\n\nuse nix::unistd::Pid;\n\n\n\nuse crate::cgroups::common;\n\nuse crate::cgroups::common::CGROUP_PROCS;\n\nuse crate::cgroups::v1::Controller;\n\nuse oci_spec::{LinuxNetwork, LinuxResources};\n\n\n\npub struct NetworkPriority {}\n\n\n\nimpl Controller for NetworkPriority {\n\n fn apply(linux_resources: &LinuxResources, cgroup_root: &Path, pid: Pid) -> Result<()> {\n\n log::debug!(\"Apply NetworkPriority cgroup config\");\n\n create_dir_all(&cgroup_root)?;\n\n\n\n if let Some(network) = linux_resources.network.as_ref() {\n\n Self::apply(cgroup_root, network)?;\n\n }\n", "file_path": "src/cgroups/v1/network_priority.rs", "rank": 76, "score": 13.702813941630778 }, { "content": "use std::{fs::create_dir_all, path::Path};\n\n\n\nuse anyhow::Result;\n\nuse nix::unistd::Pid;\n\n\n\nuse crate::cgroups::common::{self, CGROUP_PROCS};\n\nuse crate::{cgroups::v1::Controller, rootfs::default_devices};\n\nuse oci_spec::{LinuxDeviceCgroup, LinuxDeviceType, LinuxResources};\n\n\n\npub struct Devices {}\n\n\n\nimpl Controller for Devices {\n\n fn apply(linux_resources: &LinuxResources, cgroup_root: &Path, pid: Pid) -> Result<()> {\n\n log::debug!(\"Apply Devices cgroup config\");\n\n create_dir_all(&cgroup_root)?;\n\n\n\n for d in &linux_resources.devices {\n\n Self::apply_device(d, cgroup_root)?;\n\n }\n\n\n", "file_path": "src/cgroups/v1/devices.rs", "rank": 77, "score": 13.692298059623386 }, { "content": "\n\nimpl Spec {\n\n pub fn load(path: &str) -> Result<Self> {\n\n let file = File::open(path)?;\n\n let mut spec: Spec = serde_json::from_reader(&file)?;\n\n // FIME: It is fail if the caller isn't in the correct directory.\n\n spec.root.path = std::fs::canonicalize(spec.root.path)?;\n\n Ok(spec)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"proptests\")]\n\nuse quickcheck::{Arbitrary, Gen};\n\n\n", "file_path": "oci_spec/src/lib.rs", "rank": 78, "score": 13.551262002778925 }, { "content": "use std::{env, path::PathBuf};\n\n\n\nuse anyhow::{bail, Result};\n\nuse nix::sched::CloneFlags;\n\nuse oci_spec::{Linux, LinuxIdMapping, Mount, Spec};\n\n\n\nuse crate::namespaces::Namespaces;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Rootless {\n\n /// Location of the newuidmap binary\n\n pub newuidmap: Option<PathBuf>,\n\n /// Location of the newgidmap binary\n\n pub newgidmap: Option<PathBuf>,\n\n /// Mappings for user ids\n\n pub uid_mappings: Vec<LinuxIdMapping>,\n\n /// Mappings for group ids\n\n pub gid_mappings: Vec<LinuxIdMapping>,\n\n}\n\n\n", "file_path": "src/rootless.rs", "rank": 79, "score": 13.332789274662085 }, { "content": "use std::{\n\n env,\n\n fmt::{Debug, Display},\n\n fs,\n\n io::Write,\n\n path::{Path, PathBuf},\n\n};\n\n\n\n\n\nuse anyhow::{bail, Context, Result};\n\nuse nix::unistd::Pid;\n\nuse oci_spec::LinuxResources;\n\nuse procfs::process::Process;\n\n\n\nuse crate::cgroups::v1;\n\nuse crate::cgroups::v2;\n\n\n\npub const CGROUP_PROCS: &str = \"cgroup.procs\";\n\npub const DEFAULT_CGROUP_ROOT: &str = \"/sys/fs/cgroup\";\n\n\n", "file_path": "src/cgroups/common.rs", "rank": 80, "score": 13.180737183231397 }, { "content": "use nix::sys::stat::SFlag;\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::path::PathBuf;\n\n\n\nuse anyhow::{bail, Result};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Platform {\n\n #[serde(default)]\n\n pub os: String,\n\n #[serde(default)]\n\n pub arch: String,\n\n}\n\n\n\n#[derive(Default, PartialEq, Serialize, Deserialize, Debug, Clone)]\n\npub struct Box {\n\n #[serde(default)]\n\n pub height: u64,\n", "file_path": "oci_spec/src/lib.rs", "rank": 81, "score": 13.173248950819943 }, { "content": "use std::{fs, path::Path};\n\n\n\nuse anyhow::Result;\n\nuse nix::unistd::Pid;\n\nuse oci_spec::{LinuxCpu, LinuxResources};\n\n\n\nuse crate::cgroups::common::{self, CGROUP_PROCS};\n\n\n\nuse super::Controller;\n\n\n\nconst CGROUP_CPU_SHARES: &str = \"cpu.shares\";\n\nconst CGROUP_CPU_QUOTA: &str = \"cpu.cfs_quota_us\";\n\nconst CGROUP_CPU_PERIOD: &str = \"cpu.cfs_period_us\";\n\nconst CGROUP_CPU_RT_RUNTIME: &str = \"cpu.rt_runtime_us\";\n\nconst CGROUP_CPU_RT_PERIOD: &str = \"cpu.rt_period_us\";\n\n\n\npub struct Cpu {}\n\n\n\nimpl Controller for Cpu {\n\n fn apply(linux_resources: &LinuxResources, cgroup_root: &Path, pid: Pid) -> Result<()> {\n", "file_path": "src/cgroups/v1/cpu.rs", "rank": 82, "score": 12.973886019828104 }, { "content": "//! Default Youki Logger\n\n\n\nuse std::env;\n\nuse std::io::{stderr, Write};\n\nuse std::path::PathBuf;\n\nuse std::{\n\n fs::{File, OpenOptions},\n\n str::FromStr,\n\n};\n\n\n\nuse anyhow::Result;\n\nuse log::{LevelFilter, Log, Metadata, Record};\n\nuse once_cell::sync::OnceCell;\n\n\n\npub static YOUKI_LOGGER: OnceCell<YoukiLogger> = OnceCell::new();\n\npub static LOG_FILE: OnceCell<Option<File>> = OnceCell::new();\n\n\n", "file_path": "src/logger.rs", "rank": 83, "score": 12.476677328551798 }, { "content": "//! Unix pipe wrapper\n\n\n\nuse std::os::unix::io::RawFd;\n\n\n\nuse anyhow::Result;\n\nuse nix::fcntl::OFlag;\n\nuse nix::unistd::{close, pipe2, read};\n\n\n\npub struct Pipe {\n\n rfd: RawFd,\n\n wfd: RawFd,\n\n}\n\n\n\nimpl Pipe {\n\n pub fn new() -> Result<Self> {\n\n // Sets as close-on-execution\n\n let (rfd, wfd) = pipe2(OFlag::O_CLOEXEC)?;\n\n Ok(Pipe { rfd, wfd })\n\n }\n\n\n", "file_path": "src/pipe.rs", "rank": 84, "score": 12.348634730965259 }, { "content": "//! An interface trait so that rest of Youki can call\n\n//! necessary functions without having to worry about their\n\n//! implementation details\n\nuse std::{any::Any, path::Path};\n\n\n\nuse anyhow::Result;\n\nuse caps::{errors::CapsError, CapSet, CapsHashSet};\n\nuse nix::{\n\n sched::CloneFlags,\n\n unistd::{Gid, Uid},\n\n};\n\n\n\nuse oci_spec::LinuxRlimit;\n\n\n\n/// This specifies various kernel/other functionalities required for\n\n/// container management\n", "file_path": "src/command/command.rs", "rank": 85, "score": 12.002053699443657 }, { "content": "use anyhow::Result;\n\nuse std::path::Path;\n\n\n\nuse oci_spec::LinuxResources;\n\n\n", "file_path": "src/cgroups/v2/controller.rs", "rank": 86, "score": 11.985057462322386 }, { "content": "pub struct Manager {\n\n subsystems: HashMap<String, PathBuf>,\n\n}\n\n\n\nimpl Manager {\n\n pub fn new(cgroup_path: PathBuf) -> Result<Self> {\n\n let mut subsystems = HashMap::<String, PathBuf>::new();\n\n for subsystem in CONTROLLERS.iter().map(|c| c.to_string()) {\n\n subsystems.insert(\n\n subsystem.to_owned(),\n\n Self::get_subsystem_path(&cgroup_path, &subsystem)?,\n\n );\n\n }\n\n\n\n Ok(Manager { subsystems })\n\n }\n\n\n\n fn get_subsystem_path(cgroup_path: &Path, subsystem: &str) -> anyhow::Result<PathBuf> {\n\n log::debug!(\"Get path for subsystem: {}\", subsystem);\n\n let mount_point = util::get_subsystem_mount_points(subsystem)?;\n", "file_path": "src/cgroups/v1/manager.rs", "rank": 87, "score": 11.901587661014233 }, { "content": "use std::path::PathBuf;\n\n\n\nuse anyhow::{anyhow, Result};\n\nuse procfs::process::Process;\n\n\n", "file_path": "src/cgroups/v2/util.rs", "rank": 88, "score": 11.87491709128099 }, { "content": "\n\nimpl NotifySocket {\n\n pub fn new(_root: &Path) -> Result<Self> {\n\n Ok(Self {})\n\n }\n\n\n\n pub fn notify_container_start(&mut self) -> Result<()> {\n\n log::debug!(\"connection start\");\n\n let mut stream = UnixStream::connect(\"notify.sock\")?;\n\n stream.write_all(b\"start container\")?;\n\n log::debug!(\"write finish\");\n\n Ok(())\n\n }\n\n\n\n pub fn notify_container_finish(&mut self) -> Result<()> {\n\n // self.socket.write_all(b\"finish container\")?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/notify_socket.rs", "rank": 89, "score": 11.869087850329471 }, { "content": "use std::path::Path;\n\n\n\nuse anyhow::Result;\n\nuse nix::unistd::Pid;\n\n\n\nuse oci_spec::LinuxResources;\n\n\n", "file_path": "src/cgroups/v1/controller.rs", "rank": 90, "score": 11.764397936823453 }, { "content": "//! Implements Command trait for Linux systems\n\nuse std::{any::Any, path::Path};\n\n\n\nuse anyhow::{bail, Result};\n\nuse caps::{errors::CapsError, CapSet, CapsHashSet};\n\nuse nix::{\n\n errno::Errno,\n\n unistd::{fchdir, pivot_root, sethostname},\n\n};\n\nuse nix::{fcntl::open, sched::CloneFlags};\n\nuse nix::{\n\n fcntl::OFlag,\n\n unistd::{Gid, Uid},\n\n};\n\nuse nix::{\n\n mount::{umount2, MntFlags},\n\n unistd,\n\n};\n\nuse nix::{sched::unshare, sys::stat::Mode};\n\n\n", "file_path": "src/command/linux.rs", "rank": 91, "score": 11.480785339523472 }, { "content": "use std::io::Write;\n\n\n\nuse anyhow::Result;\n\nuse mio::unix::pipe::Sender;\n\n\n\nuse crate::process::message::Message;\n\n\n\n/// Contains sending end for pipe for the child process\n\npub struct InitProcess {\n\n sender_for_child: Sender,\n\n}\n\n\n\nimpl InitProcess {\n\n /// create a new Init process structure\n\n pub fn new(sender_for_child: Sender) -> Self {\n\n Self { sender_for_child }\n\n }\n\n\n\n /// Notify that this process is ready\n\n // The child here is in perspective of overall hierarchy\n", "file_path": "src/process/init.rs", "rank": 92, "score": 11.443218232137967 }, { "content": "\n\nconst CGROUP_CONTROLLERS: &str = \"cgroup.controllers\";\n\nconst CGROUP_SUBTREE_CONTROL: &str = \"cgroup.subtree_control\";\n\n\n\nconst CONTROLLER_TYPES: &[ControllerType] = &[\n\n ControllerType::Cpu,\n\n ControllerType::CpuSet,\n\n ControllerType::HugeTlb,\n\n ControllerType::Io,\n\n ControllerType::Memory,\n\n ControllerType::Pids,\n\n];\n\n\n\npub struct Manager {\n\n root_path: PathBuf,\n\n cgroup_path: PathBuf,\n\n}\n\n\n\nimpl Manager {\n\n pub fn new(root_path: PathBuf, cgroup_path: PathBuf) -> Result<Self> {\n", "file_path": "src/cgroups/v2/manager.rs", "rank": 93, "score": 11.422750903758988 }, { "content": "//! Information about status and state of the container\n\nuse std::collections::HashMap;\n\nuse std::fs;\n\nuse std::{fs::File, path::Path};\n\n\n\nuse anyhow::Result;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nconst STATE_FILE_PATH: &str = \"state.json\";\n\n\n\n/// Indicates status of the container\n\n#[derive(Serialize, Deserialize, Debug, Copy, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub enum ContainerStatus {\n\n // The container is being created\n\n Creating,\n\n // The runtime has finished the create operation\n\n Created,\n\n // The container process has executed the user-specified program but has not exited\n\n Running,\n", "file_path": "src/container/state.rs", "rank": 94, "score": 11.31330680395806 }, { "content": "use std::os::unix::io::{AsRawFd, RawFd};\n\n\n\nuse anyhow::Result;\n\nuse nix::unistd::dup2;\n\n\n\n#[derive(Debug)]\n\npub struct FileDescriptor(RawFd);\n\n\n\nconst STDIN: i32 = 0;\n\nconst STDOUT: i32 = 1;\n\nconst STDERR: i32 = 2;\n\n\n\n// impl Drop for FileDescriptor {\n\n// fn drop(&mut self) {\n\n// close(self.0).expect(\"FileDescriptor close failed.\")\n\n// }\n\n// }\n\n\n\nimpl AsRawFd for FileDescriptor {\n\n fn as_raw_fd(&self) -> RawFd {\n", "file_path": "src/stdio.rs", "rank": 95, "score": 11.30270765631637 }, { "content": "use std::{collections::HashMap, path::PathBuf};\n\n\n\nuse anyhow::{anyhow, Result};\n\nuse procfs::process::Process;\n\n\n\nuse super::controller_type::CONTROLLERS;\n\n\n", "file_path": "src/cgroups/v1/util.rs", "rank": 96, "score": 11.297732916304048 }, { "content": "#![cfg(test)]\n\n\n\nuse anyhow::Result;\n\nuse std::{\n\n io::Write, \n\n path::{Path, PathBuf},\n\n};\n\n\n\nuse oci_spec::LinuxCpu;\n\n\n\nuse crate::utils::{create_temp_dir, TempDir}; \n\n\n\n\n", "file_path": "src/cgroups/test.rs", "rank": 97, "score": 11.285265320020153 }, { "content": "\n\nimpl State {\n\n pub fn new(\n\n container_id: &str,\n\n status: ContainerStatus,\n\n pid: Option<i32>,\n\n bundle: &str,\n\n ) -> Self {\n\n Self {\n\n oci_version: \"v1.0.2\".to_string(),\n\n id: container_id.to_string(),\n\n status,\n\n pid,\n\n bundle: bundle.to_string(),\n\n annotations: HashMap::default(),\n\n }\n\n }\n\n\n\n pub fn save(&self, container_root: &Path) -> Result<()> {\n\n let state_file_path = container_root.join(STATE_FILE_PATH);\n", "file_path": "src/container/state.rs", "rank": 98, "score": 11.282948730099239 }, { "content": "use std::io::prelude::*;\n\nuse std::{\n\n fs::{create_dir_all, OpenOptions},\n\n path::Path,\n\n thread, time,\n\n};\n\n\n\nuse anyhow::{Result, *};\n\nuse nix::unistd::Pid;\n\n\n\nuse crate::cgroups::common::{self, CGROUP_PROCS};\n\nuse crate::cgroups::v1::Controller;\n\nuse oci_spec::{FreezerState, LinuxResources};\n\n\n\nconst CGROUP_FREEZER_STATE: &str = \"freezer.state\";\n\nconst FREEZER_STATE_THAWED: &str = \"THAWED\";\n\nconst FREEZER_STATE_FROZEN: &str = \"FROZEN\";\n\nconst FREEZER_STATE_FREEZING: &str = \"FREEZING\";\n\n\n\npub struct Freezer {}\n", "file_path": "src/cgroups/v1/freezer.rs", "rank": 99, "score": 11.112517770786944 } ]
Rust
cli/src/command/export/export.rs
bennyboer/worklog
bce3c3954c3a14cca7c029caf2641ec1f5af4478
use crate::command::command::Command; use crate::command::list; use cmd_args::{arg, option, Group}; use persistence::calc::event::EventType; use persistence::calc::WorkItem; use std::collections::HashMap; use std::fs; pub struct ExportCommand {} impl Command for ExportCommand { fn build(&self) -> Group { Group::new( Box::new(|args, options| execute(args, options)), "Export work log entries", ) .add_argument(arg::Descriptor::new( arg::Type::Str, "Export type (e. g. 'markdown')", )) .add_option(option::Descriptor::new( "path", option::Type::Str { default: String::from("log_export.md"), }, "Path of the file to export to", )) .add_option(option::Descriptor::new( "filter", option::Type::Str { default: String::from("today"), }, "Filter by a date ('today' (default), 'yesterday', '2020-02-20' (yyyy-MM-dd))", )) } fn aliases(&self) -> Option<Vec<&str>> { None } fn name(&self) -> &str { "export" } } fn execute(args: &Vec<arg::Value>, options: &HashMap<&str, option::Value>) { let export_type = args[0].str().unwrap(); if export_type.trim().to_lowercase() == "markdown" { export_to_markdown( options.get("path").unwrap().str().unwrap(), options .get("filter") .unwrap() .str() .map_or(String::from("today"), |v| v.to_owned()), ); } else { panic!("Export type '{}' currently not supported", export_type); } } fn export_to_markdown(file_path: &str, filter: String) { let (from_timestamp, to_timestamp) = list::filter_keyword_to_time_range(&filter[..]); let items = persistence::find_items_by_timerange(from_timestamp, to_timestamp).unwrap(); let first_item = items.first().unwrap(); let date_time = shared::time::get_local_date_time(first_item.created_timestamp()); let mut data = String::new(); data.push_str(&format!( "# Report for {} the {}\n\n", date_time.format("%A").to_string(), date_time.format("%Y-%m-%d").to_string() )); data.push_str("## Statistics\n\n"); let total_work_time = { let item_refs: Vec<&WorkItem> = items.iter().collect(); let total_work_time_ms = list::calculate_total_work_time(&item_refs); shared::time::format_duration((total_work_time_ms / 1000) as u32) }; let start_time = shared::time::get_local_date_time(find_earliest_work_item(&items).created_timestamp()) .format("%H:%M") .to_string(); let end_time = shared::time::get_local_date_time(find_latest_work_item(&items).created_timestamp()) .format("%H:%M") .to_string(); data.push_str(&format!( "\ | Total time worked | Started working | Finished working | | ----------------- | --------------- | ---------------- | | {} | {} | {} |\n\n", total_work_time, start_time, end_time )); data.push_str("## Work items\n\n"); for item in &items { data.push_str(&format!( "- {}. Took `{}` ({}). Tags: *{}*.\n", item.description(), shared::time::format_duration((item.time_taken() / 1000) as u32), format_event_timeline(item), item.tags().join(", ") )); } fs::write(file_path, data).expect("Unable to write export file"); } fn format_event_timeline(item: &WorkItem) -> String { let mut result = Vec::new(); let mut start_time = None; for event in item.events() { match event.event_type() { EventType::Started | EventType::Continued => { start_time = Some(shared::time::get_local_date_time(event.timestamp())) } EventType::Finished | EventType::Paused => result.push(format!( "{} - {}", start_time.unwrap().format("%H:%M"), shared::time::get_local_date_time(event.timestamp()).format("%H:%M") )), }; } result.join(", ") } fn find_latest_work_item(items: &[WorkItem]) -> &WorkItem { let mut lastest_ts: i64 = items.first().unwrap().events().last().unwrap().timestamp(); let mut latest_item = items.first().unwrap(); for item in &items[1..] { if item.events().last().unwrap().timestamp() > lastest_ts { lastest_ts = item.events().last().unwrap().timestamp(); latest_item = item; } } latest_item } fn find_earliest_work_item(items: &[WorkItem]) -> &WorkItem { let mut earliest_ts: i64 = items.first().unwrap().created_timestamp(); let mut earliest_item = items.first().unwrap(); for item in &items[1..] { if item.created_timestamp() < earliest_ts { earliest_ts = item.created_timestamp(); earliest_item = item; } } earliest_item }
use crate::command::command::Command; use crate::command::list; use cmd_args::{arg, option, Group}; use persistence::calc::event::EventType; use persistence::calc::WorkItem; use std::collections::HashMap; use std::fs; pub struct ExportCommand {} impl Command for ExportCommand { fn build(&self) -> Group { Group::new( Box::new(|args, options| execute(args, options)), "Export work log entries", ) .add_argument(arg::Descriptor::new( arg::Type::Str, "Export type (e. g. 'markdown')", )) .add_option(option::Descriptor::new( "path", option::Type::Str { default: String::from("log_export.md"), }, "Path of the file to export to", )) .add_option(option::Descriptor::new( "filter", option::Type::Str { default: String::from("today"), }, "Filter by a date ('today' (default), 'yesterday', '2020-02-20' (yyyy-MM-dd))", )) } fn aliases(&self
::time::format_duration((total_work_time_ms / 1000) as u32) }; let start_time = shared::time::get_local_date_time(find_earliest_work_item(&items).created_timestamp()) .format("%H:%M") .to_string(); let end_time = shared::time::get_local_date_time(find_latest_work_item(&items).created_timestamp()) .format("%H:%M") .to_string(); data.push_str(&format!( "\ | Total time worked | Started working | Finished working | | ----------------- | --------------- | ---------------- | | {} | {} | {} |\n\n", total_work_time, start_time, end_time )); data.push_str("## Work items\n\n"); for item in &items { data.push_str(&format!( "- {}. Took `{}` ({}). Tags: *{}*.\n", item.description(), shared::time::format_duration((item.time_taken() / 1000) as u32), format_event_timeline(item), item.tags().join(", ") )); } fs::write(file_path, data).expect("Unable to write export file"); } fn format_event_timeline(item: &WorkItem) -> String { let mut result = Vec::new(); let mut start_time = None; for event in item.events() { match event.event_type() { EventType::Started | EventType::Continued => { start_time = Some(shared::time::get_local_date_time(event.timestamp())) } EventType::Finished | EventType::Paused => result.push(format!( "{} - {}", start_time.unwrap().format("%H:%M"), shared::time::get_local_date_time(event.timestamp()).format("%H:%M") )), }; } result.join(", ") } fn find_latest_work_item(items: &[WorkItem]) -> &WorkItem { let mut lastest_ts: i64 = items.first().unwrap().events().last().unwrap().timestamp(); let mut latest_item = items.first().unwrap(); for item in &items[1..] { if item.events().last().unwrap().timestamp() > lastest_ts { lastest_ts = item.events().last().unwrap().timestamp(); latest_item = item; } } latest_item } fn find_earliest_work_item(items: &[WorkItem]) -> &WorkItem { let mut earliest_ts: i64 = items.first().unwrap().created_timestamp(); let mut earliest_item = items.first().unwrap(); for item in &items[1..] { if item.created_timestamp() < earliest_ts { earliest_ts = item.created_timestamp(); earliest_item = item; } } earliest_item }
) -> Option<Vec<&str>> { None } fn name(&self) -> &str { "export" } } fn execute(args: &Vec<arg::Value>, options: &HashMap<&str, option::Value>) { let export_type = args[0].str().unwrap(); if export_type.trim().to_lowercase() == "markdown" { export_to_markdown( options.get("path").unwrap().str().unwrap(), options .get("filter") .unwrap() .str() .map_or(String::from("today"), |v| v.to_owned()), ); } else { panic!("Export type '{}' currently not supported", export_type); } } fn export_to_markdown(file_path: &str, filter: String) { let (from_timestamp, to_timestamp) = list::filter_keyword_to_time_range(&filter[..]); let items = persistence::find_items_by_timerange(from_timestamp, to_timestamp).unwrap(); let first_item = items.first().unwrap(); let date_time = shared::time::get_local_date_time(first_item.created_timestamp()); let mut data = String::new(); data.push_str(&format!( "# Report for {} the {}\n\n", date_time.format("%A").to_string(), date_time.format("%Y-%m-%d").to_string() )); data.push_str("## Statistics\n\n"); let total_work_time = { let item_refs: Vec<&WorkItem> = items.iter().collect(); let total_work_time_ms = list::calculate_total_work_time(&item_refs); shared
random
[ { "content": "/// Execute the log command.\n\nfn execute(args: &Vec<arg::Value>, _options: &HashMap<&str, option::Value>) {\n\n let description = args[0].str().unwrap();\n\n let tags_str = args[1].str().unwrap();\n\n let time_taken_str = args[2].str().unwrap();\n\n\n\n let tags: Vec<String> = tags_str.split(\",\").map(|s| s.trim().to_owned()).collect();\n\n let time_taken_ms = shared::time::parse_duration(time_taken_str).unwrap() as i64 * 1000;\n\n\n\n let current_timestamp_ms = chrono::Utc::now().timestamp_millis();\n\n let item = persistence::calc::WorkItem::new_internal(\n\n -1,\n\n description.to_owned(),\n\n Status::Done,\n\n HashSet::from_iter(tags.into_iter()),\n\n vec![\n\n Event::new(EventType::Started, current_timestamp_ms - time_taken_ms),\n\n Event::new(EventType::Finished, current_timestamp_ms),\n\n ],\n\n );\n\n\n\n let new_id = persistence::log_item(item).unwrap();\n\n\n\n println!(\n\n \"Create work item with ID {}.\",\n\n format!(\"#{}\", new_id).color(colorful::Color::DodgerBlue3)\n\n );\n\n}\n", "file_path": "cli/src/command/log/log.rs", "rank": 1, "score": 147431.5408097652 }, { "content": "pub fn delete_item(id: i32) -> Result<Option<WorkItem>, Box<dyn Error>> {\n\n let mut data_access = data_access::get_data_access()?;\n\n\n\n Ok(data_access.delete_item(id)?)\n\n}\n\n\n", "file_path": "persistence/src/lib.rs", "rank": 3, "score": 146667.47518226592 }, { "content": "pub fn find_item_by_id(id: i32) -> Result<Option<WorkItem>, Box<dyn Error>> {\n\n let data_access = data_access::get_data_access()?;\n\n\n\n Ok(data_access.find_item_by_id(id)?)\n\n}\n\n\n", "file_path": "persistence/src/lib.rs", "rank": 4, "score": 144305.59587040448 }, { "content": "/// Build the detail view of a work item (if one is selected).\n\nfn build_detail_view_wrapper() -> impl Widget<Option<Rc<RefCell<UiWorkItem>>>> {\n\n Maybe::new(\n\n || {\n\n SideBar::new(\n\n build_detail_view().lens(SelectedWorkItemLens),\n\n false,\n\n true,\n\n |ctx, _, _| {\n\n ctx.submit_command(\n\n controller::SELECT_ITEM\n\n .with(-1)\n\n .to(controller::DAY_VIEW_WIDGET_ID),\n\n );\n\n },\n\n )\n\n },\n\n || SizedBox::empty().lens(lens::Unit),\n\n )\n\n .with_id(SIDEBAR_WIDGET_ID)\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/day_view.rs", "rank": 5, "score": 143052.59846349034 }, { "content": "/// Get the primary monitor from the given list of monitors.\n\npub fn get_primary_monitor(monitors: &[Monitor]) -> Option<&Monitor> {\n\n for monitor in monitors {\n\n if monitor.is_primary() {\n\n return Some(monitor);\n\n }\n\n }\n\n\n\n None\n\n}\n", "file_path": "ui/src/util/ui/ui.rs", "rank": 8, "score": 134333.64229620327 }, { "content": "/// Log a work calc.\n\n/// Will return the ID of the new item.\n\npub fn log_item(item: WorkItem) -> Result<i32, Box<dyn Error>> {\n\n let mut data_access = data_access::get_data_access()?;\n\n\n\n Ok(data_access.log_item(item)?)\n\n}\n\n\n", "file_path": "persistence/src/lib.rs", "rank": 9, "score": 133858.02482638846 }, { "content": "/// Print the header for a new date.\n\nfn print_date_header(items: &[&WorkItem]) {\n\n let first = *items.first().unwrap();\n\n let date_time = shared::time::get_local_date_time(first.created_timestamp());\n\n\n\n println!();\n\n println!(\n\n \"{}\",\n\n format!(\n\n \"# {} ({})\",\n\n date_time.format(\"%A - %d. %B %Y\"),\n\n shared::time::format_duration((calculate_total_work_time(items) / 1000) as u32)\n\n )\n\n .underlined()\n\n );\n\n println!();\n\n}\n\n\n\n/// Calculate the total work time of the passed items.\n\npub(crate) fn calculate_total_work_time(items: &[&WorkItem]) -> i64 {\n\n let mut time_events: Vec<shared::calc::TimeEvent> = items\n", "file_path": "cli/src/command/list/list.rs", "rank": 11, "score": 128073.92365360238 }, { "content": "/// Build the header of the day view.\n\nfn build_header() -> impl Widget<Rc<chrono::Date<chrono::Local>>> {\n\n let arrow_left_svg = icon::get_icon(icon::ARROW_LEFT);\n\n let arrow_right_svg = icon::get_icon(icon::ARROW_RIGHT);\n\n\n\n Flex::row()\n\n .main_axis_alignment(MainAxisAlignment::SpaceBetween)\n\n .with_spacer(10.0)\n\n .with_child(\n\n UiButton::new(Svg::new(arrow_left_svg).fix_width(18.0).padding(8.0))\n\n .with_corner_radius(100.0)\n\n .on_click(|ctx, _, _| {\n\n ctx.submit_command(controller::PREV_DAY.to(controller::DAY_VIEW_WIDGET_ID))\n\n }),\n\n )\n\n .with_flex_spacer(1.0)\n\n .with_child(build_header_date_label())\n\n .with_flex_spacer(1.0)\n\n .with_child(\n\n UiButton::new(Svg::new(arrow_right_svg).fix_width(18.0).padding(8.0))\n\n .with_corner_radius(100.0)\n\n .on_click(|ctx, _, _| {\n\n ctx.submit_command(controller::NEXT_DAY.to(controller::DAY_VIEW_WIDGET_ID))\n\n }),\n\n )\n\n .with_spacer(10.0)\n\n .padding((0.0, 10.0))\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/day_view.rs", "rank": 12, "score": 124825.146497379 }, { "content": "/// Build the status panel that is part of the work item list item widget.\n\nfn build_status_panel() -> impl Widget<UiWorkItem> {\n\n Painter::new(|ctx, item: &UiWorkItem, _: &_| {\n\n let size = ctx.size().to_rounded_rect(2.0);\n\n\n\n let color = match item.status {\n\n UiWorkItemStatus::InProgress => Color::rgb8(130, 200, 50),\n\n UiWorkItemStatus::Paused => Color::rgb8(216, 139, 100),\n\n UiWorkItemStatus::Finished => Color::rgb8(100, 177, 216),\n\n };\n\n\n\n ctx.fill(size, &color)\n\n })\n\n .fix_width(4.0)\n\n}\n\n\n\nimpl Widget<UiWorkItem> for WorkItemListItemWidget {\n\n fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut UiWorkItem, env: &Env) {\n\n match event {\n\n Event::Command(cmd) => {\n\n if cmd.is(ITEM_CHANGED) {\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 13, "score": 121881.43014290417 }, { "content": "fn build_work_item_widget() -> impl Widget<UiWorkItem> {\n\n WorkItemListItemWidget::new()\n\n .on_click(|ctx, item, _| {\n\n ctx.submit_command(\n\n controller::SELECT_ITEM\n\n .with(item.id)\n\n .to(controller::DAY_VIEW_WIDGET_ID),\n\n );\n\n ctx.submit_command(OPEN_SIDEBAR.to(SIDEBAR_WIDGET_ID));\n\n })\n\n .background(Color::WHITE)\n\n .rounded(2.0)\n\n .padding((10.0, 4.0))\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/day_view.rs", "rank": 14, "score": 121539.56405201412 }, { "content": "/// Build a widget representing a tag.\n\nfn build_tag_widget() -> impl Widget<String> {\n\n let tag_color = rand_color(); // TODO: Use fixed color per tag instead\n\n\n\n Label::new(|text: &String, _: &Env| format!(\"#{}\", text))\n\n .with_text_color(invert_color(&tag_color))\n\n .with_text_size(11.0)\n\n .padding((3.0, 1.0))\n\n .background(tag_color)\n\n .rounded(100.0)\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 15, "score": 119881.94096521134 }, { "content": "fn build_day_view_work_items() -> impl Widget<state::DayViewWorkItems> {\n\n Scroll::new(LensWrap::new(\n\n List::new(|| build_work_item_widget().lens(SelectedWorkItemLens)),\n\n state::DayViewWorkItems::items,\n\n ))\n\n .vertical()\n\n .with_id(ITEM_LIST_WIDGET_ID)\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/day_view.rs", "rank": 16, "score": 114624.80948407942 }, { "content": "/// Build the tags list widget.\n\nfn build_tags() -> impl Widget<im::Vector<String>> {\n\n List::new(|| build_tag_widget())\n\n .horizontal()\n\n .with_spacing(2.0)\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 17, "score": 114257.72385786253 }, { "content": "fn build_detail_view() -> impl Widget<UiWorkItem> {\n\n Scroll::new(Padding::new(\n\n (15.0, 10.0),\n\n Flex::column()\n\n .main_axis_alignment(MainAxisAlignment::Start)\n\n .cross_axis_alignment(CrossAxisAlignment::Start)\n\n .with_child(build_detail_view_title())\n\n .with_child(\n\n HorizontalSeparator::new(4.0, Color::rgb(0.9, 0.9, 0.9))\n\n .lens(lens::Unit)\n\n .padding((100.0, 10.0)),\n\n )\n\n .with_child(build_detail_view_status())\n\n .with_child(\n\n HorizontalSeparator::new(4.0, Color::rgb(0.9, 0.9, 0.9))\n\n .lens(lens::Unit)\n\n .padding((100.0, 10.0)),\n\n )\n\n .with_child(\n\n Label::new(\"Tags\")\n", "file_path": "ui/src/widget/day_view/day_view.rs", "rank": 18, "score": 113888.14059431934 }, { "content": "/// Get local date time for the passed timestamp.\n\npub fn get_local_date_time(timestamp_millis: i64) -> chrono::DateTime<chrono::Local> {\n\n let date_time: chrono::DateTime<chrono::Utc> = chrono::DateTime::from_utc(\n\n chrono::NaiveDateTime::from_timestamp(timestamp_millis / 1000, 0),\n\n chrono::Utc,\n\n );\n\n\n\n // Adjust UTC date time to the local timezone\n\n chrono::DateTime::from(date_time)\n\n}\n", "file_path": "shared/src/time/mod.rs", "rank": 19, "score": 113422.04358349845 }, { "content": "fn build_detail_view_title() -> impl Widget<UiWorkItem> {\n\n let title_edit_id = WidgetId::next();\n\n\n\n let non_editing_widget = Label::new(|data: &UiWorkItem, _: &Env| data.description.to_owned())\n\n .with_line_break_mode(LineBreaking::WordWrap)\n\n .with_text_size(20.0)\n\n .padding(2.0)\n\n .expand_width();\n\n\n\n let editing_widget = TextBox::multiline()\n\n .with_text_size(20.0)\n\n .lens(UiWorkItem::description)\n\n .expand_width();\n\n\n\n EditableFieldWidget::new(\n\n non_editing_widget,\n\n editing_widget,\n\n |ctx, data: &mut UiWorkItem, _| {\n\n // Update work item in backend\n\n let mut work_item = data.work_item.borrow_mut();\n", "file_path": "ui/src/widget/day_view/day_view.rs", "rank": 20, "score": 112125.71998166101 }, { "content": "fn build_detail_view_tags() -> impl Widget<UiWorkItem> {\n\n TagCloud::new(|ctx, data| {\n\n ctx.submit_command(ITEM_CHANGED.with(data.id).to(ITEM_LIST_WIDGET_ID))\n\n })\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/day_view.rs", "rank": 21, "score": 112125.71998166101 }, { "content": "fn build_detail_view_status() -> impl Widget<UiWorkItem> {\n\n Flex::row()\n\n .cross_axis_alignment(CrossAxisAlignment::Center)\n\n .with_child(\n\n Label::new(\"Status: \")\n\n .with_text_size(18.0)\n\n .with_text_color(Color::rgb8(120, 120, 120)),\n\n )\n\n .with_child(\n\n Label::new(|data: &UiWorkItem, _: &Env| {\n\n format!(\n\n \"{}\",\n\n match data.status {\n\n UiWorkItemStatus::InProgress => \"In progress\",\n\n UiWorkItemStatus::Paused => \"Paused\",\n\n UiWorkItemStatus::Finished => \"Done\",\n\n }\n\n )\n\n })\n\n .with_text_size(18.0),\n\n )\n\n .with_flex_spacer(1.0)\n\n .with_child(build_detail_view_status_buttons())\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/day_view.rs", "rank": 22, "score": 112125.71998166101 }, { "content": "/// Execute the delete command.\n\nfn execute(args: &Vec<arg::Value>, _options: &HashMap<&str, option::Value>) {\n\n let id = args[0].int().expect(\"Expected to have an ID supplied\");\n\n\n\n match persistence::delete_item(id) {\n\n Ok(item) => match item {\n\n Some(_) => println!(\"Work item with ID {} has been deleted.\", id),\n\n None => println!(\"There is no work item with ID {}.\", id),\n\n },\n\n Err(e) => println!(\n\n \"An error occurred trying to delete work item with ID {}. Error: '{}'.\",\n\n id, e\n\n ),\n\n };\n\n}\n", "file_path": "cli/src/command/delete/delete.rs", "rank": 23, "score": 111713.64364737246 }, { "content": "/// Execute the finish command.\n\nfn execute(args: &Vec<arg::Value>, _options: &HashMap<&str, option::Value>) {\n\n let id = args[0]\n\n .int()\n\n .expect(\"Expected first argument to be a work item ID\");\n\n\n\n match persistence::find_item_by_id(id).unwrap() {\n\n Some(mut item) => {\n\n if let Status::Done = item.status() {\n\n println!(\"Work item with ID {} is already finished.\", id);\n\n } else {\n\n item.finish_working(None).unwrap();\n\n\n\n match persistence::update_items(vec![&item]) {\n\n Ok(_) => println!(\"Finished work item with ID {}.\", id),\n\n Err(e) => println!(\n\n \"Failed to update work item with ID {}. Error: '{}'\",\n\n item.id().unwrap(),\n\n e\n\n ),\n\n };\n", "file_path": "cli/src/command/finish/finish.rs", "rank": 24, "score": 111713.64364737246 }, { "content": "/// Execute the list command.\n\nfn execute(_args: &Vec<arg::Value>, options: &HashMap<&str, option::Value>) {\n\n let all: bool = options.get(\"all\").map_or(false, |v| v.bool().unwrap());\n\n\n\n let mut entries = match all {\n\n true => persistence::list_items().unwrap(),\n\n false => {\n\n let filter: &str = options.get(\"filter\").map_or(\"today\", |v| v.str().unwrap());\n\n\n\n // Check if filter string is a work item ID\n\n match filter.parse::<i32>() {\n\n Ok(id) => persistence::find_item_by_id(id)\n\n .unwrap()\n\n .map_or(Vec::new(), |v| vec![v]),\n\n Err(_) => {\n\n // Filter string is not an work item ID but a date!\n\n let (from_timestamp, to_timestamp) = filter_keyword_to_time_range(filter);\n\n\n\n persistence::find_items_by_timerange(from_timestamp, to_timestamp).unwrap()\n\n }\n\n }\n", "file_path": "cli/src/command/list/list.rs", "rank": 25, "score": 111713.64364737246 }, { "content": "/// Execute the clear command.\n\nfn execute(_args: &Vec<arg::Value>, options: &HashMap<&str, option::Value>) {\n\n let acknowlegement = options.get(\"ack\").unwrap().bool().unwrap();\n\n\n\n if acknowlegement {\n\n match persistence::clear() {\n\n Ok(_) => println!(\"Cleared the database (Removed all work items).\"),\n\n Err(e) => println!(\"Could not clear the database. Error: '{}'.\", e),\n\n }\n\n } else {\n\n println!(\"Do you really want to clear the database (Remove all work items)?\");\n\n println!(\"Please acknowledge the operation by re-entering the clear command followed by the --ack flag\");\n\n }\n\n}\n", "file_path": "cli/src/command/clear/clear.rs", "rank": 26, "score": 111713.64364737246 }, { "content": "/// Execute the start command.\n\nfn execute(args: &Vec<arg::Value>, options: &HashMap<&str, option::Value>) {\n\n let description = args[0].str().unwrap();\n\n let tags_str = args[1].str().unwrap();\n\n\n\n let tags: Vec<String> = tags_str.split(\",\").map(|s| s.trim().to_owned()).collect();\n\n\n\n let pause_work_items_in_progress = options.get(\"pause\").unwrap().bool().unwrap();\n\n let finish_work_items_in_progress = options.get(\"finish\").unwrap().bool().unwrap();\n\n\n\n // If both --pause and --finish are specified we are finishing all items!\n\n\n\n // Stopping in progress work items first\n\n if finish_work_items_in_progress || pause_work_items_in_progress {\n\n pause::pause_all_work_items_in_progress();\n\n }\n\n\n\n // When --finish specified -> Finish all paused work items\n\n if finish_work_items_in_progress {\n\n finish::finish_all_paused_work_items();\n\n }\n", "file_path": "cli/src/command/start/start.rs", "rank": 27, "score": 111713.64364737246 }, { "content": "/// Execute the pause command.\n\nfn execute(args: &Vec<arg::Value>, _options: &HashMap<&str, option::Value>) {\n\n let value = args[0].str().expect(\"Expected to have one argument\");\n\n\n\n match value.parse::<i32>() {\n\n Ok(id) => {\n\n pause_work_item_by_id(id);\n\n }\n\n Err(_) => {\n\n // Check if value is \"all\" to pause all work items in progress\n\n if value == \"all\" {\n\n pause_all_work_items_in_progress();\n\n\n\n println!(\"Paused all work items in progress.\");\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "cli/src/command/pause/pause.rs", "rank": 28, "score": 111713.64364737246 }, { "content": "/// Execute the show command.\n\nfn execute(args: &Vec<arg::Value>, _options: &HashMap<&str, option::Value>) {\n\n let id = args[0].int().unwrap();\n\n\n\n match persistence::find_item_by_id(id) {\n\n Ok(optional_item) => match optional_item {\n\n Some(item) => print_item(item),\n\n None => println!(\"Could not find work item with ID {}.\", id),\n\n },\n\n Err(e) => println!(\n\n \"Failed to show details for work item with ID {}. Error: '{}'.\",\n\n id, e\n\n ),\n\n }\n\n}\n\n\n", "file_path": "cli/src/command/show/show.rs", "rank": 29, "score": 111713.64364737246 }, { "content": "/// Execute the edit command.\n\nfn execute(args: &Vec<arg::Value>, options: &HashMap<&str, option::Value>) {\n\n let id = args[0]\n\n .int()\n\n .expect(\"Expected to have an ID supplied as first argument\");\n\n\n\n let description = options\n\n .get(\"description\")\n\n .unwrap()\n\n .str()\n\n .map(|v| {\n\n if v.is_empty() {\n\n None\n\n } else {\n\n Some(v.to_owned())\n\n }\n\n })\n\n .unwrap();\n\n\n\n let tags: Option<Vec<String>> = options\n\n .get(\"tags\")\n", "file_path": "cli/src/command/edit/edit.rs", "rank": 30, "score": 111713.64364737246 }, { "content": "fn build_detail_view_status_buttons() -> impl Widget<UiWorkItem> {\n\n Flex::row()\n\n .with_child(When::new(\n\n |data: &UiWorkItem| data.status == UiWorkItemStatus::InProgress,\n\n UiButton::new(Label::new(\"Pause\").padding((4.0, 2.0)))\n\n .with_color(Color::rgb8(255, 179, 102))\n\n .on_click(|ctx, data: &mut UiWorkItem, _| {\n\n // Update UI work item\n\n data.status = UiWorkItemStatus::Paused;\n\n\n\n // Update work item in backend\n\n let mut work_item = data.work_item.borrow_mut();\n\n work_item.pause_working().unwrap();\n\n persistence::update_items(vec![&work_item]).unwrap();\n\n\n\n // Notify list item that it needs to update as well\n\n ctx.submit_command(ITEM_CHANGED.with(data.id).to(ITEM_LIST_WIDGET_ID));\n\n\n\n ctx.request_update();\n\n }),\n", "file_path": "ui/src/widget/day_view/day_view.rs", "rank": 31, "score": 110441.80859499042 }, { "content": "pub fn list_items() -> Result<Vec<WorkItem>, Box<dyn Error>> {\n\n let data_access = data_access::get_data_access()?;\n\n\n\n Ok(data_access.list_items()?)\n\n}\n\n\n", "file_path": "persistence/src/lib.rs", "rank": 32, "score": 109954.49977201765 }, { "content": "/// Execute the continue command.\n\nfn execute(args: &Vec<arg::Value>, _options: &HashMap<&str, option::Value>) {\n\n let id = args[0]\n\n .int()\n\n .expect(\"Expected an ID of a work item as first argument\");\n\n\n\n match persistence::find_item_by_id(id) {\n\n Ok(option) => match option {\n\n Some(mut item) => {\n\n if let Status::Paused = item.status() {\n\n item.continue_working().unwrap();\n\n\n\n match persistence::update_items(vec![&item]) {\n\n Ok(_) => println!(\"Continued work item with ID {}.\", id),\n\n Err(e) => println!(\n\n \"Failed to continue work item with ID {}. Error: '{}'\",\n\n id, e\n\n ),\n\n };\n\n } else {\n\n println!(\"Work item with ID {} is currently not paused and thus cannot be continued working on.\", id);\n", "file_path": "cli/src/command/continue_cmd/continue_cmd.rs", "rank": 33, "score": 108473.84662028338 }, { "content": "/// Update the passed work item with the given optional changes.\n\nfn update_work_item(\n\n item: &mut WorkItem,\n\n description: Option<String>,\n\n tags: Option<Vec<String>>,\n\n) -> Result<(), Box<dyn Error>> {\n\n if description.is_some() {\n\n item.set_description(description.unwrap());\n\n }\n\n\n\n if tags.is_some() {\n\n item.set_tags(HashSet::from_iter(tags.unwrap().into_iter()));\n\n }\n\n\n\n // Persist changes\n\n persistence::update_items(vec![item])\n\n}\n", "file_path": "cli/src/command/edit/edit.rs", "rank": 34, "score": 108015.35959673095 }, { "content": "/// Update a bunch of work items.\n\npub fn update_items(items: Vec<&WorkItem>) -> Result<(), Box<dyn Error>> {\n\n let mut data_access = data_access::get_data_access()?;\n\n\n\n Ok(data_access.update_items(items)?)\n\n}\n\n\n", "file_path": "persistence/src/lib.rs", "rank": 35, "score": 106654.85457109503 }, { "content": "pub fn find_items_by_timerange(\n\n from_timestamp: i64,\n\n to_timestamp: i64,\n\n) -> Result<Vec<WorkItem>, Box<dyn Error>> {\n\n let data_access = data_access::get_data_access()?;\n\n\n\n Ok(data_access.filter_items(from_timestamp, to_timestamp)?)\n\n}\n\n\n", "file_path": "persistence/src/lib.rs", "rank": 36, "score": 104655.58815245888 }, { "content": "pub fn find_items_by_status(status: Status) -> Result<Vec<WorkItem>, Box<dyn Error>> {\n\n let data_access = data_access::get_data_access()?;\n\n\n\n Ok(data_access.find_items_by_status(status)?)\n\n}\n\n\n", "file_path": "persistence/src/lib.rs", "rank": 37, "score": 101801.64283446182 }, { "content": "/// Print the work item.\n\nfn print_item(item: WorkItem) {\n\n print_header(&format!(\n\n \"Details for work item with ID {}\",\n\n item.id().unwrap()\n\n ));\n\n println!();\n\n\n\n println!(\"{}\", \"# Description\".underlined());\n\n\n\n println!(\"{}\", item.description());\n\n\n\n println!();\n\n\n\n println!(\"{}\", \"# Status\".underlined());\n\n\n\n println!(\n\n \"{}\",\n\n format!(\"{}\", item.status()).color(colorful::Color::OrangeRed1)\n\n );\n\n\n", "file_path": "cli/src/command/show/show.rs", "rank": 38, "score": 100981.84984259323 }, { "content": "fn pause_work_item_by_id(id: i32) {\n\n match persistence::find_item_by_id(id).unwrap() {\n\n Some(mut item) => {\n\n if let Status::InProgress = item.status() {\n\n item.pause_working().unwrap();\n\n\n\n match persistence::update_items(vec![&item]) {\n\n Ok(_) => println!(\"Paused work item with ID {}.\", id),\n\n Err(e) => println!(\"Failed to update work item with ID {}. Error: '{}'\", id, e),\n\n };\n\n } else {\n\n println!(\n\n \"Work item with ID {} is currently not in progress and thus cannot be paused.\",\n\n id\n\n );\n\n }\n\n }\n\n None => {\n\n println!(\"Could not find work item with ID {}.\", id);\n\n }\n", "file_path": "cli/src/command/pause/pause.rs", "rank": 39, "score": 98755.20050009128 }, { "content": "/// Determine the path to the logs database.\n\nfn determine_database_path() -> Result<path::PathBuf, &'static str> {\n\n Ok(match home::home_dir() {\n\n Some(path) => Ok(path),\n\n None => Err(\"Could not determine the current users HOME directory\"),\n\n }?\n\n .join(SUB_HOME_DIRECTORY)\n\n .join(FILE_NAME))\n\n}\n\n\n", "file_path": "persistence/src/data_access/sqlite/sqlite_data_access.rs", "rank": 40, "score": 97925.2924904109 }, { "content": "/// Format a work item.\n\nfn format_item(item: &WorkItem) -> String {\n\n let id_str = format!(\n\n \"#{}\",\n\n item.id().expect(\"Work item must have an ID at this point!\")\n\n )\n\n .color(colorful::Color::DodgerBlue3);\n\n\n\n let time_str = shared::time::get_local_date_time(item.created_timestamp())\n\n .format(\"%H:%M\")\n\n .to_string()\n\n .color(colorful::Color::DeepPink1a);\n\n\n\n let description = item.description();\n\n\n\n let duration_str = shared::time::format_duration((item.time_taken() / 1000) as u32)\n\n .color(colorful::Color::Orange1);\n\n let status_str = match item.status() {\n\n Status::Done => duration_str,\n\n Status::InProgress => {\n\n format!(\"IN PROGRESS ({})\", duration_str).color(colorful::Color::GreenYellow)\n", "file_path": "cli/src/command/list/list.rs", "rank": 41, "score": 97858.49028601675 }, { "content": "/// Build placeholder for no items.\n\nfn build_placeholder() -> impl Widget<()> {\n\n Flex::column()\n\n .main_axis_alignment(MainAxisAlignment::Center)\n\n .with_child(Svg::new(icon::get_icon(icon::SLOTH)).fix_height(150.0))\n\n .with_spacer(30.0)\n\n .with_child(\n\n Label::new(\"No work items for the day!\")\n\n .with_text_size(24.0)\n\n .with_line_break_mode(LineBreaking::WordWrap)\n\n .with_text_alignment(TextAlignment::Center)\n\n .with_text_color(Color::rgb8(100, 100, 100))\n\n .fix_width(300.0),\n\n )\n\n .expand_height()\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/day_view.rs", "rank": 42, "score": 95982.05345837408 }, { "content": "pub fn clear() -> Result<(), Box<dyn Error>> {\n\n let mut data_access = data_access::get_data_access()?;\n\n\n\n Ok(data_access.clear()?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n assert_eq!(2 + 2, 4);\n\n }\n\n}\n", "file_path": "persistence/src/lib.rs", "rank": 43, "score": 92121.17661339477 }, { "content": "struct ItemHeaderWidget {\n\n left: WidgetPod<UiWorkItem, Box<dyn Widget<UiWorkItem>>>,\n\n right: WidgetPod<UiWorkItem, Box<dyn Widget<UiWorkItem>>>,\n\n hovered: bool,\n\n}\n\n\n\nimpl Widget<UiWorkItem> for ItemHeaderWidget {\n\n fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut UiWorkItem, env: &Env) {\n\n self.left.event(ctx, event, data, env);\n\n self.right.event(ctx, event, data, env);\n\n }\n\n\n\n fn lifecycle(\n\n &mut self,\n\n ctx: &mut LifeCycleCtx,\n\n event: &LifeCycle,\n\n data: &UiWorkItem,\n\n env: &Env,\n\n ) {\n\n self.left.lifecycle(ctx, event, data, env);\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 44, "score": 91756.59428771073 }, { "content": "/// Build the root widget.\n\nfn build_root_widget() -> impl Widget<state::UiState> {\n\n Flex::row()\n\n .cross_axis_alignment(CrossAxisAlignment::Start)\n\n .with_flex_child(\n\n LensWrap::new(day_view::DayViewWidget::new(), state::UiState::day),\n\n 1.0,\n\n )\n\n .background(LinearGradient::new(\n\n UnitPoint::TOP,\n\n UnitPoint::BOTTOM,\n\n (Color::rgb8(255, 255, 255), Color::rgb8(220, 230, 240)),\n\n ))\n\n .env_scope(|env, _| {\n\n util::ui::env::configure_environment(env);\n\n })\n\n}\n", "file_path": "ui/src/main.rs", "rank": 45, "score": 91432.4202347183 }, { "content": "/// Configure the druid environment.\n\npub fn configure_environment(env: &mut Env) {\n\n theme::configure_theme(env);\n\n}\n", "file_path": "ui/src/util/ui/env.rs", "rank": 46, "score": 90909.995512291 }, { "content": "/// Configure the theme for druid.\n\npub fn configure_theme(env: &mut Env) {\n\n env.set(theme::BACKGROUND_LIGHT, Color::rgb8(245, 245, 245));\n\n env.set(theme::BORDER_LIGHT, Color::rgb8(140, 150, 160));\n\n env.set(theme::BORDER_DARK, Color::rgb8(120, 130, 140));\n\n env.set(theme::SELECTION_COLOR, Color::rgb8(51, 152, 255));\n\n env.set(theme::CURSOR_COLOR, Color::rgb8(40, 40, 40));\n\n\n\n configure_button_theme(env);\n\n configure_label_theme(env);\n\n configure_scrollbar_theme(env);\n\n}\n\n\n", "file_path": "ui/src/util/ui/theme.rs", "rank": 47, "score": 90909.995512291 }, { "content": "/// Build the date label for the day view header.\n\nfn build_header_date_label() -> Label<Rc<chrono::Date<chrono::Local>>> {\n\n Label::dynamic(|date_ref: &Rc<chrono::Date<chrono::Local>>, _| {\n\n date_ref.as_ref().format(\"%A, %d. %B\").to_string()\n\n })\n\n .with_text_size(32.0)\n\n}\n\n\n\nimpl Widget<state::DayViewState> for DayViewWidget {\n\n fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut DayViewState, env: &Env) {\n\n self.child.event(ctx, event, data, env);\n\n }\n\n\n\n fn lifecycle(\n\n &mut self,\n\n ctx: &mut LifeCycleCtx,\n\n event: &LifeCycle,\n\n data: &DayViewState,\n\n env: &Env,\n\n ) {\n\n self.child.lifecycle(ctx, event, data, env);\n", "file_path": "ui/src/widget/day_view/day_view.rs", "rank": 48, "score": 90888.45182177662 }, { "content": "struct ItemHeaderWidgetController;\n\n\n\nimpl Controller<UiWorkItem, ItemHeaderWidget> for ItemHeaderWidgetController {\n\n fn event(\n\n &mut self,\n\n child: &mut ItemHeaderWidget,\n\n ctx: &mut EventCtx,\n\n event: &Event,\n\n data: &mut UiWorkItem,\n\n env: &Env,\n\n ) {\n\n match event {\n\n Event::Command(cmd) => {\n\n if cmd.is(HOVER_CHANGED) {\n\n child.hovered = !child.hovered;\n\n ctx.request_paint();\n\n }\n\n }\n\n _ => child.event(ctx, event, data, env),\n\n }\n\n }\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 49, "score": 90556.02876156347 }, { "content": "/// Configure the scrollbar theme.\n\npub fn configure_scrollbar_theme(env: &mut Env) {\n\n env.set(theme::SCROLLBAR_BORDER_COLOR, Color::rgb8(40, 40, 40));\n\n env.set(theme::SCROLLBAR_COLOR, Color::rgb8(40, 40, 40));\n\n}\n", "file_path": "ui/src/util/ui/theme.rs", "rank": 50, "score": 89482.88068915461 }, { "content": "/// Configure the button theme.\n\npub fn configure_button_theme(env: &mut Env) {\n\n env.set(theme::BUTTON_DARK, Color::rgb8(180, 190, 200));\n\n env.set(theme::BUTTON_LIGHT, Color::rgb8(190, 200, 210));\n\n}\n\n\n", "file_path": "ui/src/util/ui/theme.rs", "rank": 51, "score": 89482.88068915461 }, { "content": "/// Configure the label theme.\n\npub fn configure_label_theme(env: &mut Env) {\n\n env.set(theme::LABEL_COLOR, Color::rgb8(20, 20, 20));\n\n}\n\n\n", "file_path": "ui/src/util/ui/theme.rs", "rank": 52, "score": 89482.88068915461 }, { "content": "/// Format a duration given in seconds in the form \"Xh Xm Xs\".\n\npub fn format_duration(mut seconds: u32) -> String {\n\n let hours = seconds / 60 / 60;\n\n seconds -= hours * 60 * 60;\n\n\n\n let minutes = seconds / 60;\n\n seconds -= minutes * 60;\n\n\n\n let mut result = Vec::new();\n\n if hours > 0 {\n\n result.push(format!(\"{}h\", hours));\n\n }\n\n if minutes > 0 {\n\n result.push(format!(\"{}m\", minutes));\n\n }\n\n if seconds > 0 {\n\n result.push(format!(\"{}s\", seconds));\n\n }\n\n\n\n result.join(\" \")\n\n}\n", "file_path": "shared/src/time/duration_parser.rs", "rank": 53, "score": 87607.76308765775 }, { "content": "/// Work item to be filled with more data.\n\nstruct TmpWorkItem {\n\n id: i32,\n\n description: String,\n\n status: Status,\n\n}\n\n\n\nimpl SQLiteDataAccess {\n\n /// Create a new SQLite data access.\n\n pub fn new() -> Result<SQLiteDataAccess, Box<dyn Error>> {\n\n let db_path = determine_database_path()?;\n\n\n\n // Create directories if they do not exist\n\n fs::create_dir_all(&db_path.parent().unwrap())?;\n\n\n\n let mut data_access = SQLiteDataAccess {\n\n connection: Connection::open(&db_path)?,\n\n };\n\n\n\n data_access.prepare_database()?;\n\n\n", "file_path": "persistence/src/data_access/sqlite/sqlite_data_access.rs", "rank": 54, "score": 86549.91756906899 }, { "content": "/// Parse the time taken in seconds from the given string in the format\n\n/// \"Xh Xm Xs\" where X must be a number and h, m or s are optional.\n\npub fn parse_duration(src: &str) -> Result<i32, String> {\n\n let mut hours = 0;\n\n let mut minutes = 0;\n\n let mut seconds = 0;\n\n\n\n let mut number_buffer: String = String::new();\n\n for char in src.chars() {\n\n let is_hours = char == 'h';\n\n let is_minutes = char == 'm';\n\n let is_seconds = char == 's';\n\n\n\n let consume_buffer = is_hours || is_minutes || is_seconds;\n\n if consume_buffer {\n\n // Parse number from buffer and clear it\n\n let num: i32 = match number_buffer.trim().parse() {\n\n Ok(res) => Ok(res),\n\n Err(_) => Err(format!(\n\n \"Could not parse time taken from given string '{}'\",\n\n src\n\n )),\n", "file_path": "shared/src/time/duration_parser.rs", "rank": 55, "score": 84574.71630311233 }, { "content": "/// Calculate the total work time without counting parallel work time ranges\n\n/// multiple times.\n\n/// For example when you have parallel work items, the time is only counted once!\n\npub fn calculate_unique_total_time(events: &mut [TimeEvent]) -> i64 {\n\n // Sort events by their timestamp\n\n events.sort_by_key(|v| v.timestamp);\n\n\n\n // Iterate over events and sum up the total time\n\n let mut total_time = 0;\n\n let mut in_progress_count = 0;\n\n let mut start_timestamp: Option<i64> = None;\n\n for event in events {\n\n if event.is_start {\n\n in_progress_count += 1;\n\n\n\n if in_progress_count == 1 {\n\n start_timestamp = Some(event.timestamp); // Memorize as start timestamp\n\n }\n\n } else {\n\n in_progress_count -= 1;\n\n\n\n if in_progress_count == 0 {\n\n // No time range currently in progress -> save to time taken\n", "file_path": "shared/src/calc/time_calculator.rs", "rank": 56, "score": 83729.68958321991 }, { "content": "fn build_tag_widget(widget_id: WidgetId) -> impl Widget<String> {\n\n TagWidget::new(move |ctx: &mut EventCtx, data: &String| {\n\n ctx.submit_command(DELETE_TAG.with(data.to_owned()).to(widget_id));\n\n })\n\n}\n\n\n", "file_path": "ui/src/widget/tag_cloud/tag_cloud.rs", "rank": 57, "score": 81911.10043446887 }, { "content": "/// Load work items for the given date.\n\nfn load_work_items(\n\n date: &chrono::Date<chrono::Local>,\n\n) -> Result<Option<DayViewWorkItems>, Box<dyn Error>> {\n\n let from_timestamp = date.and_hms(0, 0, 0).timestamp_millis();\n\n let to_timestamp = date.succ().and_hms(0, 0, 0).timestamp_millis();\n\n\n\n let items = persistence::find_items_by_timerange(from_timestamp, to_timestamp)?;\n\n\n\n if items.is_empty() {\n\n return Ok(None);\n\n }\n\n\n\n let mut ui_work_items = im::Vector::new();\n\n for item in items {\n\n ui_work_items.push_back(Rc::new(RefCell::new(work_item::UiWorkItem {\n\n id: item.id().unwrap(),\n\n description: item.description().to_owned(),\n\n status: match item.status() {\n\n Status::Done => work_item::UiWorkItemStatus::Finished,\n\n Status::InProgress => work_item::UiWorkItemStatus::InProgress,\n", "file_path": "ui/src/state/state.rs", "rank": 58, "score": 79369.15059000047 }, { "content": "fn rand_color() -> Color {\n\n Color::rgb(\n\n rand::random::<f64>(),\n\n rand::random::<f64>(),\n\n rand::random::<f64>(),\n\n )\n\n .with_alpha(0.4)\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 59, "score": 77044.91378598246 }, { "content": "/// Build the status label of the work item.\n\nfn build_status_label() -> Label<UiWorkItem> {\n\n Label::new(|item: &UiWorkItem, _env: &_| {\n\n format!(\n\n \"{}\",\n\n match item.status {\n\n UiWorkItemStatus::InProgress => \"In progress\",\n\n UiWorkItemStatus::Paused => \"Paused\",\n\n UiWorkItemStatus::Finished => \"Done\",\n\n }\n\n )\n\n })\n\n .with_text_size(12.0)\n\n .with_text_color(Color::rgb8(100, 100, 100))\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 60, "score": 76662.11397189897 }, { "content": "/// Build the work item timing label.\n\nfn build_timing_label() -> Label<UiWorkItem> {\n\n Label::new(|item: &UiWorkItem, _: &Env| {\n\n let work_item = item.work_item.as_ref().borrow();\n\n\n\n let time_str = shared::time::get_local_date_time(work_item.created_timestamp())\n\n .format(\"%H:%M\")\n\n .to_string();\n\n let duration_str = shared::time::format_duration((work_item.time_taken() / 1000) as u32);\n\n\n\n format!(\"{} ({})\", duration_str, time_str)\n\n })\n\n .with_text_size(12.0)\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 61, "score": 76662.11397189897 }, { "content": "use crate::command::command::Command;\n\nuse cmd_args::{arg, option, Group};\n\nuse colorful::Colorful;\n\nuse persistence::calc::event::{Event, EventType};\n\nuse persistence::calc::Status;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::iter::FromIterator;\n\n\n\n/// Command used to log work items directly.\n\npub struct LogCommand {}\n\n\n\nimpl Command for LogCommand {\n\n fn build(&self) -> Group {\n\n Group::new(\n\n Box::new(|args, options| execute(args, options)),\n\n \"Log an already done work item\",\n\n )\n\n .add_argument(arg::Descriptor::new(\n\n arg::Type::Str,\n\n \"Description of the work done\",\n", "file_path": "cli/src/command/log/log.rs", "rank": 62, "score": 76340.48048292975 }, { "content": " ))\n\n .add_argument(arg::Descriptor::new(arg::Type::Str, \"Tags\"))\n\n .add_argument(arg::Descriptor::new(\n\n arg::Type::Str,\n\n \"Time spent on the task (Format like '2h 3m 12s', '45m' or '1h 15m')\",\n\n ))\n\n }\n\n\n\n fn aliases(&self) -> Option<Vec<&str>> {\n\n Some(vec![\"l\"])\n\n }\n\n\n\n fn name(&self) -> &str {\n\n \"log\"\n\n }\n\n}\n\n\n\n/// Execute the log command.\n", "file_path": "cli/src/command/log/log.rs", "rank": 63, "score": 76326.33139935827 }, { "content": "/// Get the current UTC+0 timestamp in milliseconds.\n\nfn get_current_timestamp() -> i64 {\n\n chrono::Utc::now().timestamp_millis()\n\n}\n", "file_path": "persistence/src/calc/work_item.rs", "rank": 68, "score": 74781.78772682502 }, { "content": "/// Get the data access to use.\n\npub fn get_data_access() -> Result<Box<dyn DataAccess>, Box<dyn Error>> {\n\n Ok(Box::new(SQLiteDataAccess::new()?))\n\n}\n", "file_path": "persistence/src/data_access/data_access_factory.rs", "rank": 69, "score": 74766.00557655616 }, { "content": "/// Print a header string to the console.\n\nfn print_header(str: &str) {\n\n println!(\" {} \", \"-\".repeat(str.len() + 2));\n\n println!(\"| {} |\", str);\n\n println!(\" {} \", \"-\".repeat(str.len() + 2));\n\n}\n", "file_path": "cli/src/command/show/show.rs", "rank": 70, "score": 73510.82813624857 }, { "content": "/// Create final work items from the passed caches.\n\nfn create_work_items(\n\n item_lookup: HashMap<i32, TmpWorkItem>,\n\n mut tags_lookup: HashMap<i32, HashSet<String>>,\n\n mut events_lookup: HashMap<i32, Vec<Event>>,\n\n) -> Vec<WorkItem> {\n\n item_lookup\n\n .into_iter()\n\n .map(|(id, tmp_item)| {\n\n // Retrieve cached tags\n\n let tags = tags_lookup.remove(&id).unwrap_or(HashSet::new());\n\n\n\n // Retrieve cached events and sort them by their timestamp\n\n let mut events = events_lookup\n\n .remove(&id)\n\n .expect(\"Found no events for a work item, which must not be possible\");\n\n events.sort_by_key(|e| e.timestamp());\n\n\n\n WorkItem::new_internal(\n\n tmp_item.id,\n\n tmp_item.description,\n\n tmp_item.status,\n\n tags,\n\n events,\n\n )\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "persistence/src/data_access/sqlite/sqlite_data_access.rs", "rank": 71, "score": 72923.98294958856 }, { "content": "fn invert_color(color: &Color) -> Color {\n\n let (red, green, blue, _) = color.as_rgba();\n\n let sum = red + green + blue;\n\n\n\n if sum < 1.5 {\n\n Color::WHITE\n\n } else {\n\n Color::BLACK\n\n }\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 72, "score": 72820.38641359325 }, { "content": "fn get_item_background_color(is_hot: bool) -> Color {\n\n if is_hot {\n\n Color::rgb8(245, 245, 245)\n\n } else {\n\n Color::WHITE\n\n }\n\n}\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 73, "score": 70694.97947147759 }, { "content": "mod log;\n\n\n\npub use log::LogCommand;\n", "file_path": "cli/src/command/log/mod.rs", "rank": 74, "score": 67819.42780069103 }, { "content": "mod export;\n\n\n\npub use export::ExportCommand;\n", "file_path": "cli/src/command/export/mod.rs", "rank": 75, "score": 67697.79594958237 }, { "content": "/// Widget displaying a tag\n\nstruct TagWidget {\n\n inner: WidgetPod<String, Box<dyn Widget<String>>>,\n\n hover: WidgetPod<String, Box<dyn Widget<String>>>,\n\n is_hovered: bool,\n\n}\n\n\n\nimpl TagWidget {\n\n pub fn new(on_delete: impl Fn(&mut EventCtx, &String) + 'static) -> TagWidget {\n\n let tag_color = rand_color(); // TODO: Use fixed color per tag instead\n\n\n\n let tag_label = Label::new(|text: &String, _: &Env| format!(\"#{}\", text))\n\n .with_text_color(invert_color(&tag_color))\n\n .with_text_size(13.0)\n\n .padding((6.0, 4.0))\n\n .background(tag_color)\n\n .rounded(100.0)\n\n .padding((4.0, 2.0));\n\n\n\n let hover_widget = Flex::row()\n\n .main_axis_alignment(MainAxisAlignment::Center)\n", "file_path": "ui/src/widget/tag_cloud/tag_cloud.rs", "rank": 76, "score": 59856.42170240773 }, { "content": "/// Child widget of a stack.\n\nstruct StackChild<T> {\n\n /// Whether the child will let mouse events through.\n\n pod: WidgetPod<T, Box<dyn Widget<T>>>,\n\n}\n\n\n\nimpl<T> StackChild<T>\n\nwhere\n\n T: Data,\n\n{\n\n pub fn new(inner: impl Widget<T> + 'static) -> StackChild<T> {\n\n StackChild {\n\n pod: WidgetPod::new(inner.boxed()),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Widget<T> for StackChild<T>\n\nwhere\n\n T: Data,\n\n{\n", "file_path": "ui/src/widget/stack/stack.rs", "rank": 77, "score": 58664.6852650724 }, { "content": "/// Common data access interface.\n\npub trait DataAccess {\n\n /// Log a work calc.\n\n /// Will return the ID of the new work item.\n\n fn log_item(&mut self, item: WorkItem) -> Result<i32, Box<dyn Error>>;\n\n\n\n /// Update a bunch of work items.\n\n fn update_items(&mut self, items: Vec<&WorkItem>) -> Result<(), Box<dyn Error>>;\n\n\n\n /// List all available work items.\n\n fn list_items(&self) -> Result<Vec<WorkItem>, Box<dyn Error>>;\n\n\n\n /// List available work items in the given time range.\n\n fn filter_items(\n\n &self,\n\n from_timestamp: i64,\n\n to_timestamp: i64,\n\n ) -> Result<Vec<WorkItem>, Box<dyn Error>>;\n\n\n\n /// Find a work item by its ID.\n\n fn find_item_by_id(&self, id: i32) -> Result<Option<WorkItem>, Box<dyn Error>>;\n", "file_path": "persistence/src/data_access/data_access.rs", "rank": 78, "score": 56244.461173986434 }, { "content": "/// Patch that will patch the database to another version.\n\npub trait Patch {\n\n /// Get the version the patch will patch the database to.\n\n fn version(&self) -> i32;\n\n\n\n /// Try to patch the database to the patches version.\n\n fn patch(&self, con: &mut Connection) -> Result<(), Box<dyn Error>>;\n\n}\n", "file_path": "persistence/src/data_access/sqlite/patch/patch.rs", "rank": 79, "score": 56244.461173986434 }, { "content": "/// Create a tmp work item lookup from the given logs table rows.\n\nfn tmp_item_lookup_from_rows(mut rows: Rows) -> Result<HashMap<i32, TmpWorkItem>, Box<dyn Error>> {\n\n let mut item_lookup = HashMap::new();\n\n while let Some(row) = rows.next()? {\n\n let id: i32 = row.get(0)?;\n\n\n\n let description = row.get(1)?;\n\n\n\n let status_str: String = row.get(2)?;\n\n let status = Status::from_str(&status_str)\n\n .expect(\"Could not interpret Status string stored in the database\");\n\n\n\n item_lookup.insert(\n\n id,\n\n TmpWorkItem {\n\n id,\n\n description,\n\n status,\n\n },\n\n );\n\n }\n\n\n\n Ok(item_lookup)\n\n}\n\n\n", "file_path": "persistence/src/data_access/sqlite/sqlite_data_access.rs", "rank": 80, "score": 50819.488087177175 }, { "content": "fn main() {\n\n let mut group = Group::new(\n\n Box::new(|_args, _options| {\n\n println!(\"### Incorrect usage ###\");\n\n println!(\"Pass '--help' to see all available options.\");\n\n process::exit(1);\n\n }),\n\n \"Tool to log your work\",\n\n );\n\n\n\n // Add all sub-commands\n\n for command in &command::COMMANDS {\n\n group = group.add_child(command.name(), command.aliases(), command.build());\n\n }\n\n\n\n // Start the command line parser\n\n parser::parse(group, None).unwrap();\n\n}\n", "file_path": "cli/src/main.rs", "rank": 81, "score": 50769.14252964522 }, { "content": "/// Insert all the given events for the work item with the passed ID.\n\nfn insert_events(\n\n transaction: &Transaction,\n\n id: i32,\n\n events: &[Event],\n\n) -> Result<(), Box<dyn Error>> {\n\n for event in events {\n\n transaction.execute(\n\n \"INSERT INTO log_events (log_id, timestamp, event) VALUES (?1, ?2, ?3)\",\n\n params![id, event.timestamp(), format!(\"{}\", event.event_type())],\n\n )?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "persistence/src/data_access/sqlite/sqlite_data_access.rs", "rank": 82, "score": 45382.65068539779 }, { "content": "fn rand_color() -> Color {\n\n Color::rgb(\n\n rand::random::<f64>(),\n\n rand::random::<f64>(),\n\n rand::random::<f64>(),\n\n )\n\n .with_alpha(0.4)\n\n}\n\n\n\nimpl Widget<UiWorkItem> for TagCloud {\n\n fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut UiWorkItem, env: &Env) {\n\n match event {\n\n Event::Command(cmd) => {\n\n if cmd.is(DELETE_TAG) {\n\n if let Some(tag) = cmd.get(DELETE_TAG) {\n\n if let Some(index) = data.tags.index_of(tag) {\n\n data.tags.remove(index);\n\n }\n\n\n\n // Remove from backend work item and update it\n", "file_path": "ui/src/widget/tag_cloud/tag_cloud.rs", "rank": 83, "score": 44194.48103600931 }, { "content": "/// Entry point of the application.\n\nfn main() -> Result<(), Box<dyn Error>> {\n\n let state = state::UiState {\n\n day: state::DayViewState::new(chrono::Local::today()),\n\n };\n\n\n\n // Create and configure main window\n\n let window_description = configure_window(WindowDesc::new(build_root_widget()));\n\n\n\n // Launch app\n\n AppLauncher::with_window(window_description).launch(state)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "ui/src/main.rs", "rank": 84, "score": 43071.330232589855 }, { "content": "/// Command usable using the worklog CLI.\n\npub(crate) trait Command {\n\n /// Build the command group for the command line parser.\n\n fn build(&self) -> cmd_args::Group;\n\n\n\n /// Get all available command aliases.\n\n fn aliases(&self) -> Option<Vec<&str>>;\n\n\n\n /// Command name.\n\n fn name(&self) -> &str;\n\n}\n", "file_path": "cli/src/command/command.rs", "rank": 85, "score": 42122.86654708663 }, { "content": "use crate::command::clear::ClearCommand;\n\nuse crate::command::command::Command;\n\nuse crate::command::continue_cmd::ContinueCommand;\n\nuse crate::command::delete::DeleteCommand;\n\nuse crate::command::edit::EditCommand;\n\nuse crate::command::export::ExportCommand;\n\nuse crate::command::finish::FinishCommand;\n\nuse crate::command::list::ListCommand;\n\nuse crate::command::log::LogCommand;\n\nuse crate::command::pause::PauseCommand;\n\nuse crate::command::show::ShowCommand;\n\nuse crate::command::start::StartCommand;\n\n\n\n/// All available commands.\n\npub(crate) const COMMANDS: [&dyn Command; 11] = [\n\n &ListCommand {},\n\n &LogCommand {},\n\n &StartCommand {},\n\n &FinishCommand {},\n\n &PauseCommand {},\n\n &ContinueCommand {},\n\n &ExportCommand {},\n\n &DeleteCommand {},\n\n &EditCommand {},\n\n &ClearCommand {},\n\n &ShowCommand {},\n\n];\n", "file_path": "cli/src/command/commands.rs", "rank": 86, "score": 42122.82358897604 }, { "content": "fn invert_color(color: &Color) -> Color {\n\n let (red, green, blue, _) = color.as_rgba();\n\n let sum = red + green + blue;\n\n\n\n if sum < 1.5 {\n\n Color::WHITE\n\n } else {\n\n Color::BLACK\n\n }\n\n}\n\n\n", "file_path": "ui/src/widget/tag_cloud/tag_cloud.rs", "rank": 87, "score": 40829.66381140408 }, { "content": "use druid::{Data, Lens};\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\n/// Work item displayable in the UI.\n\n#[derive(Clone, Data, Lens)]\n\npub struct UiWorkItem {\n\n /// ID of the work item.\n\n pub id: i32,\n\n /// Description of the work item.\n\n pub description: String,\n\n /// Status of the work item.\n\n pub status: UiWorkItemStatus,\n\n /// Tags of the item.\n\n pub tags: im::Vector<String>,\n\n /// Reference to the original work item.\n\n pub work_item: Rc<RefCell<persistence::calc::WorkItem>>,\n\n /// Temporary string used for example to add a new tag to the tag list.\n\n pub tmp: String,\n\n}\n\n\n\n#[derive(Clone, Data, PartialEq, Debug)]\n\npub enum UiWorkItemStatus {\n\n InProgress,\n\n Paused,\n\n Finished,\n\n}\n", "file_path": "ui/src/state/work_item/work_item.rs", "rank": 88, "score": 40141.02318330466 }, { "content": " }\n\n }\n\n\n\n /// Specify a callback to be called when the item has been clicked.\n\n pub fn on_click(\n\n self,\n\n f: impl Fn(&mut EventCtx, &mut UiWorkItem, &Env) + 'static,\n\n ) -> ControllerHost<Self, Click<UiWorkItem>> {\n\n ControllerHost::new(self, Click::new(f))\n\n }\n\n}\n\n\n\n/// Build the work item timing label.\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 89, "score": 37411.753530701855 }, { "content": "use crate::state::work_item::{UiWorkItem, UiWorkItemStatus};\n\nuse crate::Size;\n\nuse druid::widget::{\n\n Click, Controller, ControllerHost, CrossAxisAlignment, Flex, Label, LineBreaking, List,\n\n MainAxisAlignment, Painter,\n\n};\n\nuse druid::{\n\n BoxConstraints, Color, Env, Event, EventCtx, LayoutCtx, LifeCycle, LifeCycleCtx,\n\n LinearGradient, PaintCtx, Point, Rect, RenderContext, Selector, UnitPoint, UpdateCtx, Widget,\n\n WidgetExt, WidgetId, WidgetPod,\n\n};\n\n\n\nconst HOVER_CHANGED: Selector = Selector::new(\"work-item-header.hover-changed\");\n\npub(crate) const ITEM_CHANGED: Selector<i32> = Selector::new(\"work-item.item-changed\");\n\n\n\npub(crate) struct WorkItemListItemWidget {\n\n header_id: WidgetId,\n\n child: WidgetPod<UiWorkItem, Box<dyn Widget<UiWorkItem>>>,\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 90, "score": 37409.29245990715 }, { "content": "impl WorkItemListItemWidget {\n\n pub fn new() -> WorkItemListItemWidget {\n\n let header_id = WidgetId::next();\n\n let item_header = ControllerHost::new(\n\n ItemHeaderWidget {\n\n left: WidgetPod::new(\n\n Label::new(|item: &UiWorkItem, _env: &_| format!(\"{}\", item.description))\n\n .with_text_size(18.0)\n\n .with_line_break_mode(LineBreaking::Clip),\n\n )\n\n .boxed(),\n\n right: WidgetPod::new(build_timing_label().padding((10.0, 0.0, 0.0, 0.0))).boxed(),\n\n hovered: false,\n\n },\n\n ItemHeaderWidgetController,\n\n )\n\n .with_id(header_id);\n\n\n\n let child = Flex::row()\n\n .main_axis_alignment(MainAxisAlignment::Start)\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 91, "score": 37406.94726385971 }, { "content": " if let LifeCycle::HotChanged(_) = event {\n\n ctx.request_paint();\n\n ctx.submit_command(HOVER_CHANGED.to(self.header_id))\n\n }\n\n\n\n self.child.lifecycle(ctx, event, data, env);\n\n }\n\n\n\n fn update(\n\n &mut self,\n\n ctx: &mut UpdateCtx,\n\n _old_data: &UiWorkItem,\n\n data: &UiWorkItem,\n\n env: &Env,\n\n ) {\n\n self.child.update(ctx, data, env);\n\n }\n\n\n\n fn layout(\n\n &mut self,\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 92, "score": 37406.0704872391 }, { "content": " self.right.lifecycle(ctx, event, data, env);\n\n }\n\n\n\n fn update(\n\n &mut self,\n\n ctx: &mut UpdateCtx,\n\n _old_data: &UiWorkItem,\n\n data: &UiWorkItem,\n\n env: &Env,\n\n ) {\n\n self.left.update(ctx, data, env);\n\n self.right.update(ctx, data, env);\n\n }\n\n\n\n fn layout(\n\n &mut self,\n\n ctx: &mut LayoutCtx,\n\n bc: &BoxConstraints,\n\n data: &UiWorkItem,\n\n env: &Env,\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 93, "score": 37404.42974961734 }, { "content": " .with_child(build_status_panel())\n\n .with_spacer(10.0)\n\n .with_flex_child(\n\n Flex::column()\n\n .cross_axis_alignment(CrossAxisAlignment::Start)\n\n .with_child(item_header)\n\n .with_child(\n\n Flex::row()\n\n .with_child(build_status_label())\n\n .with_flex_spacer(1.0)\n\n .with_child(build_tags().lens(UiWorkItem::tags)),\n\n ),\n\n 1.0,\n\n )\n\n .with_spacer(10.0)\n\n .fix_height(60.0);\n\n\n\n WorkItemListItemWidget {\n\n header_id,\n\n child: WidgetPod::new(child).boxed(),\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 94, "score": 37403.86047340015 }, { "content": " ctx: &mut LayoutCtx,\n\n bc: &BoxConstraints,\n\n data: &UiWorkItem,\n\n env: &Env,\n\n ) -> Size {\n\n self.child.layout(ctx, bc, data, env)\n\n }\n\n\n\n fn paint(&mut self, ctx: &mut PaintCtx, data: &UiWorkItem, env: &Env) {\n\n let is_hot = ctx.is_hot();\n\n let size = ctx.size().to_rounded_rect(2.0);\n\n\n\n ctx.fill(size, &get_item_background_color(is_hot));\n\n\n\n self.child.paint(ctx, data, env);\n\n }\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 95, "score": 37403.81911732684 }, { "content": " if let Some(item_id) = cmd.get(ITEM_CHANGED) {\n\n if data.id == *item_id {\n\n ctx.request_update();\n\n }\n\n }\n\n }\n\n }\n\n _ => {}\n\n }\n\n\n\n self.child.event(ctx, event, data, env);\n\n }\n\n\n\n fn lifecycle(\n\n &mut self,\n\n ctx: &mut LifeCycleCtx,\n\n event: &LifeCycle,\n\n data: &UiWorkItem,\n\n env: &Env,\n\n ) {\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 96, "score": 37403.35846998134 }, { "content": " ) -> Size {\n\n // First layout right item\n\n let right_size = self.right.layout(ctx, bc, data, env);\n\n\n\n // Give left item all the remaining space\n\n let left_bc = BoxConstraints::new(\n\n Size::ZERO,\n\n Size::new(bc.max().width - right_size.width, bc.max().height),\n\n );\n\n let left_size = self.left.layout(ctx, &left_bc, data, env);\n\n\n\n // Translate right widget by the left widget width\n\n self.right\n\n .set_origin(ctx, data, env, Point::new(left_bc.max().width, 0.0));\n\n\n\n Size::new(bc.max().width, left_size.height.max(right_size.height))\n\n }\n\n\n\n fn paint(&mut self, ctx: &mut PaintCtx, data: &UiWorkItem, env: &Env) {\n\n self.left.paint(ctx, data, env);\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 97, "score": 37402.429522914026 }, { "content": " self.right.paint(ctx, data, env);\n\n\n\n // Draw fade gradient for left widget\n\n let right_rect = self.right.layout_rect();\n\n let fade_width = right_rect.x0.min(20.0);\n\n let fade_rect = Rect::new(\n\n right_rect.x0 - fade_width,\n\n 0.0,\n\n right_rect.x0,\n\n ctx.size().height,\n\n );\n\n\n\n let color = get_item_background_color(self.hovered);\n\n let transparent_color = get_item_background_color(self.hovered).with_alpha(0.0);\n\n\n\n ctx.fill(\n\n fade_rect,\n\n &LinearGradient::new(\n\n UnitPoint::LEFT,\n\n UnitPoint::RIGHT,\n\n (transparent_color, color),\n\n ),\n\n );\n\n }\n\n}\n\n\n", "file_path": "ui/src/widget/day_view/work_item/work_item_list_item.rs", "rank": 98, "score": 37401.13175453854 }, { "content": "/// Create the main window description.\n\nfn configure_window<T>(window_description: WindowDesc<T>) -> WindowDesc<T>\n\nwhere\n\n T: Data,\n\n{\n\n // Find primary monitor\n\n let monitors = Screen::get_monitors();\n\n let primary_monitor =\n\n util::ui::get_primary_monitor(&monitors).expect(\"Expected to have a primary monitor!\");\n\n\n\n // Get available space\n\n let monitor_rect = primary_monitor.virtual_work_rect();\n\n let available_space = Size::new(\n\n monitor_rect.x1 - monitor_rect.x0,\n\n monitor_rect.y1 - monitor_rect.y0,\n\n );\n\n\n\n // Calculate initial window size and position\n\n let initial_window_size = Size::new(available_space.width * 0.3, available_space.height * 0.4);\n\n let window_position = Point::new(\n\n monitor_rect.x0 + (available_space.width - initial_window_size.width) / 2.0,\n", "file_path": "ui/src/main.rs", "rank": 99, "score": 37394.315742585924 } ]
Rust
src/client/general.rs
stuck-overflow/obws
6b71ed39bfacb66bbb3050b300e97ac2be2efa44
use serde::Serialize; use super::Client; use crate::requests::{KeyModifier, Projector, ProjectorInternal, QtGeometry, RequestType}; use crate::responses; use crate::{Error, Result}; pub struct General<'a> { pub(super) client: &'a Client, } impl<'a> General<'a> { pub async fn get_version(&self) -> Result<responses::Version> { self.client.send_message(RequestType::GetVersion).await } pub async fn get_auth_required(&self) -> Result<responses::AuthRequired> { self.client.send_message(RequestType::GetAuthRequired).await } pub async fn authenticate(&self, auth: &str) -> Result<()> { self.client .send_message(RequestType::Authenticate { auth }) .await } pub async fn set_filename_formatting(&self, filename_formatting: &str) -> Result<()> { self.client .send_message(RequestType::SetFilenameFormatting { filename_formatting, }) .await } pub async fn get_filename_formatting(&self) -> Result<String> { self.client .send_message::<responses::FilenameFormatting>(RequestType::GetFilenameFormatting) .await .map(|ff| ff.filename_formatting) } pub async fn get_stats(&self) -> Result<responses::ObsStats> { self.client .send_message::<responses::Stats>(RequestType::GetStats) .await .map(|s| s.stats) } pub async fn broadcast_custom_message<T>(&self, realm: &str, data: &T) -> Result<()> where T: Serialize, { self.client .send_message(RequestType::BroadcastCustomMessage { realm, data: &serde_json::to_value(data).map_err(Error::SerializeCustomData)?, }) .await } pub async fn get_video_info(&self) -> Result<responses::VideoInfo> { self.client.send_message(RequestType::GetVideoInfo).await } pub async fn open_projector(&self, projector: Projector<'_>) -> Result<()> { self.client .send_message(RequestType::OpenProjector(ProjectorInternal { ty: projector.ty, monitor: projector.monitor, geometry: projector.geometry.map(QtGeometry::serialize).as_deref(), name: projector.name, })) .await } pub async fn trigger_hotkey_by_name(&self, hotkey_name: &str) -> Result<()> { self.client .send_message(RequestType::TriggerHotkeyByName { hotkey_name }) .await } pub async fn trigger_hotkey_by_sequence( &self, key_id: &str, key_modifiers: &[KeyModifier], ) -> Result<()> { self.client .send_message(RequestType::TriggerHotkeyBySequence { key_id, key_modifiers, }) .await } }
use serde::Serialize; use super::Client; use crate::requests::{KeyModifier, Projector, ProjectorInternal, QtGeometry, RequestType}; use crate::responses; use crate::{Error, Result}; pub struct General<'a> { pub(super) client: &'a Client, } impl<'a> General<'a> { pub async fn get_version(&self) -> Result<responses::Version> { self.client.send_message(RequestType::GetVersion).await } pub async fn get_auth_required(&self) -> Result<responses::AuthRequired> { self.client.send_message(RequestType::GetAuthRequired).await } pub async fn authenticate(&self, auth: &str) -> Result<()> { self.client .send_message(RequestType::Authenticate { auth }) .await } pub async fn set_filename_formatting(&self, filename_formatting: &str) -> Result<()> { self.client .send_message(RequestType::SetFilenameFormatting { filename_formatting, }) .await } pub async fn get_filename_formatting(&self) -> Result<String> { self.client .send_message::<responses::FilenameFormatting>(RequestType::GetFilenameFormatting) .await .map(|ff| ff.filename_formatting) }
pub async fn broadcast_custom_message<T>(&self, realm: &str, data: &T) -> Result<()> where T: Serialize, { self.client .send_message(RequestType::BroadcastCustomMessage { realm, data: &serde_json::to_value(data).map_err(Error::SerializeCustomData)?, }) .await } pub async fn get_video_info(&self) -> Result<responses::VideoInfo> { self.client.send_message(RequestType::GetVideoInfo).await } pub async fn open_projector(&self, projector: Projector<'_>) -> Result<()> { self.client .send_message(RequestType::OpenProjector(ProjectorInternal { ty: projector.ty, monitor: projector.monitor, geometry: projector.geometry.map(QtGeometry::serialize).as_deref(), name: projector.name, })) .await } pub async fn trigger_hotkey_by_name(&self, hotkey_name: &str) -> Result<()> { self.client .send_message(RequestType::TriggerHotkeyByName { hotkey_name }) .await } pub async fn trigger_hotkey_by_sequence( &self, key_id: &str, key_modifiers: &[KeyModifier], ) -> Result<()> { self.client .send_message(RequestType::TriggerHotkeyBySequence { key_id, key_modifiers, }) .await } }
pub async fn get_stats(&self) -> Result<responses::ObsStats> { self.client .send_message::<responses::Stats>(RequestType::GetStats) .await .map(|s| s.stats) }
function_block-full_function
[ { "content": "pub fn duration_millis<'de, D>(deserializer: D) -> Result<Duration, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n deserializer.deserialize_i64(DurationMillisVisitor)\n\n}\n\n\n", "file_path": "src/de.rs", "rank": 0, "score": 109560.20084211772 }, { "content": "pub fn duration_nanos<'de, D>(deserializer: D) -> Result<Duration, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n deserializer.deserialize_i64(DurationNanosVisitor)\n\n}\n\n\n", "file_path": "src/de.rs", "rank": 1, "score": 109560.20084211772 }, { "content": "pub fn duration_opt<'de, D>(deserializer: D) -> Result<Option<Duration>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n deserializer.deserialize_option(DurationOptVisitor)\n\n}\n\n\n", "file_path": "src/de.rs", "rank": 2, "score": 105808.3579033795 }, { "content": "pub fn duration_millis_opt<'de, D>(deserializer: D) -> Result<Option<Duration>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n deserializer.deserialize_option(DurationMillisOptVisitor)\n\n}\n\n\n", "file_path": "src/de.rs", "rank": 3, "score": 103731.06421193774 }, { "content": "pub fn bitflags_u8<'de, D, T, TE>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n T: TryFrom<u8, Error = TE>,\n\n TE: Display,\n\n{\n\n deserializer.deserialize_u8(BitflagsU8Visitor { flags: PhantomData })\n\n}\n\n\n", "file_path": "src/de.rs", "rank": 4, "score": 102310.27692834633 }, { "content": "pub fn rgba8_inverse_opt<'de, D>(deserializer: D) -> Result<Option<RGBA8>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n deserializer.deserialize_option(Rgba8InverseOptVisitor)\n\n}\n\n\n", "file_path": "src/responses/de.rs", "rank": 5, "score": 101773.29032025381 }, { "content": "pub fn string_comma_list<'de, D, T>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n T: FromIterator<String>,\n\n{\n\n deserializer.deserialize_str(StringListVisitor {\n\n sep: ',',\n\n container: PhantomData,\n\n })\n\n}\n\n\n", "file_path": "src/responses/de.rs", "rank": 6, "score": 101773.29032025381 }, { "content": "pub fn duration_millis<S>(value: &Duration, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n serializer.serialize_i64(value.num_milliseconds())\n\n}\n\n\n", "file_path": "src/requests/ser.rs", "rank": 7, "score": 100352.5030366624 }, { "content": "pub fn duration_nanos<S>(value: &Duration, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n match value.num_nanoseconds() {\n\n Some(nanos) => serializer.serialize_i64(nanos),\n\n None => Err(ser::Error::custom(Error::DurationTooBig(*value))),\n\n }\n\n}\n\n\n", "file_path": "src/requests/ser.rs", "rank": 8, "score": 100352.5030366624 }, { "content": "pub fn rgba8_inverse_opt<S>(value: &Option<RGBA8>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n match value {\n\n Some(rgba) => {\n\n let abgr = (rgba.a as u32) << 24\n\n | (rgba.b as u32) << 16\n\n | (rgba.g as u32) << 8\n\n | (rgba.r as u32);\n\n serializer.serialize_some(&abgr)\n\n }\n\n None => serializer.serialize_none(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use bitflags::bitflags;\n\n use serde::Serialize;\n", "file_path": "src/requests/ser.rs", "rank": 9, "score": 95444.5380750823 }, { "content": "pub fn duration_millis_opt<S>(value: &Option<Duration>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n match value {\n\n Some(duration) => serializer.serialize_some(&duration.num_milliseconds()),\n\n None => serializer.serialize_none(),\n\n }\n\n}\n\n\n", "file_path": "src/requests/ser.rs", "rank": 10, "score": 95444.5380750823 }, { "content": "pub fn bitflags_u8_opt<S, T>(value: &Option<T>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n T: Into<u8> + Copy,\n\n{\n\n match value {\n\n Some(flags) => serializer.serialize_some(&(*flags).into()),\n\n None => serializer.serialize_none(),\n\n }\n\n}\n\n\n", "file_path": "src/requests/ser.rs", "rank": 11, "score": 92574.20632011202 }, { "content": "fn extract_error(value: &mut serde_json::Value) -> Option<String> {\n\n value\n\n .as_object_mut()\n\n .and_then(|o| o.get_mut(\"error\"))\n\n .and_then(|e| {\n\n if let serde_json::Value::String(msg) = e.take() {\n\n Some(msg)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n\nimpl Drop for Client {\n\n fn drop(&mut self) {\n\n // We simply drop the future as the background task has been aborted but we have no way here\n\n // to wait for it to fully shut down (except spinning up a new tokio runtime).\n\n drop(self.disconnect());\n\n }\n\n}\n", "file_path": "src/client/mod.rs", "rank": 12, "score": 60594.44018995633 }, { "content": "struct DurationOptVisitor;\n\n\n\nimpl<'de> Visitor<'de> for DurationOptVisitor {\n\n type Value = Option<Duration>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"an optional duration formatted as 'HH:MM:SS.mmm'\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n let duration = || -> Result<Duration, Error> {\n\n let mut hms = v.splitn(3, ':');\n\n let hours = hms.next().ok_or(Error::HoursMissing)?.parse()?;\n\n let minutes = hms.next().ok_or(Error::MinutesMissing)?.parse()?;\n\n let seconds = hms.next().ok_or(Error::SecondsMissing)?;\n\n\n\n let mut sm = seconds.splitn(2, '.');\n", "file_path": "src/de.rs", "rank": 13, "score": 55471.42166088536 }, { "content": "struct DurationNanosVisitor;\n\n\n\nimpl<'de> Visitor<'de> for DurationNanosVisitor {\n\n type Value = Duration;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"a duration in nanoseconds\")\n\n }\n\n\n\n fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Ok(Duration::nanoseconds(v))\n\n }\n\n\n\n fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n match i64::try_from(v) {\n\n Ok(value) => self.visit_i64(value),\n\n Err(e) => Err(de::Error::custom(Error::ValueTooLargeI64(e, v))),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/de.rs", "rank": 14, "score": 55471.42166088536 }, { "content": "struct DurationMillisVisitor;\n\n\n\nimpl<'de> Visitor<'de> for DurationMillisVisitor {\n\n type Value = Duration;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"a duration in milliseconds\")\n\n }\n\n\n\n fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Ok(Duration::milliseconds(v))\n\n }\n\n\n\n fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n match i64::try_from(v) {\n\n Ok(value) => self.visit_i64(value),\n\n Err(e) => Err(de::Error::custom(Error::ValueTooLargeI64(e, v))),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/de.rs", "rank": 15, "score": 55471.42166088536 }, { "content": "struct DurationMillisOptVisitor;\n\n\n\nimpl<'de> Visitor<'de> for DurationMillisOptVisitor {\n\n type Value = Option<Duration>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"a duration in milliseconds where -1 means a fixed duration\")\n\n }\n\n\n\n fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Ok(if v < 0 {\n\n None\n\n } else {\n\n Some(Duration::milliseconds(v))\n\n })\n\n }\n\n\n", "file_path": "src/de.rs", "rank": 16, "score": 54163.946645754346 }, { "content": "struct Rgba8InverseOptVisitor;\n\n\n\nimpl<'de> Visitor<'de> for Rgba8InverseOptVisitor {\n\n type Value = Option<RGBA8>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"a RGBA color value encoded as integer in inverse order (ABGR)\")\n\n }\n\n\n\n fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>\n\n where\n\n E: Error,\n\n {\n\n match u32::try_from(v) {\n\n Ok(v) => self.visit_u32(v),\n\n Err(e) => Err(Error::custom(e)),\n\n }\n\n }\n\n\n\n fn visit_u32<E>(self, v: u32) -> Result<Self::Value, E>\n", "file_path": "src/responses/de.rs", "rank": 17, "score": 52957.98105659465 }, { "content": "struct StringListVisitor<T> {\n\n sep: char,\n\n container: PhantomData<T>,\n\n}\n\n\n\nimpl<'de, T> Visitor<'de> for StringListVisitor<T>\n\nwhere\n\n T: FromIterator<String>,\n\n{\n\n type Value = T;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n formatter,\n\n \"a string containing values separated by '{}'\",\n\n self.sep\n\n )\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: Error,\n\n {\n\n Ok(v.split(self.sep).map(|s| s.to_owned()).collect())\n\n }\n\n}\n\n\n", "file_path": "src/responses/de.rs", "rank": 18, "score": 51429.47171429057 }, { "content": "struct BitflagsU8Visitor<T, TE> {\n\n flags: PhantomData<(T, TE)>,\n\n}\n\n\n\nimpl<'de, T, TE> Visitor<'de> for BitflagsU8Visitor<T, TE>\n\nwhere\n\n T: TryFrom<u8, Error = TE>,\n\n TE: Display,\n\n{\n\n type Value = T;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"bitflags encoded as u8 integer\")\n\n }\n\n\n\n fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n u8::try_from(v)\n", "file_path": "src/de.rs", "rank": 19, "score": 50090.708359307006 }, { "content": "fn is_required_scene(scene: &Scene) -> bool {\n\n scene.name == TEST_SCENE\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 20, "score": 42463.074111028196 }, { "content": "fn is_required_profile(profile: &Profile) -> bool {\n\n profile.profile_name == TEST_PROFILE\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 21, "score": 42463.074111028196 }, { "content": "fn is_required_output(output: &Output) -> bool {\n\n output.name == TEST_OUTPUT\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 22, "score": 42463.074111028196 }, { "content": "fn is_required_scene_2(scene: &Scene) -> bool {\n\n scene.name == TEST_SCENE_2\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 23, "score": 42463.074111028196 }, { "content": "fn is_required_transition_2(transition: &Transition) -> bool {\n\n transition.name == TEST_TRANSITION_2\n\n}\n\n\n\n#[allow(unused_macros)]\n\nmacro_rules! wait_for {\n\n ($expression:expr, $pattern:pat) => {\n\n while let Some(Event { ty, .. }) = $expression.next().await {\n\n if matches!(ty, $pattern) {\n\n break;\n\n }\n\n }\n\n };\n\n}\n", "file_path": "tests/common/mod.rs", "rank": 24, "score": 42463.074111028196 }, { "content": "fn is_required_transition(transition: &Transition) -> bool {\n\n transition.name == TEST_TRANSITION\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 25, "score": 42463.074111028196 }, { "content": "fn is_required_source(source: &SourceListItem) -> bool {\n\n source.name == TEXT_SOURCE && is_text_input_source(source)\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 26, "score": 40481.864116391866 }, { "content": "fn is_required_source_2(source: &SourceListItem) -> bool {\n\n source.name == TEXT_SOURCE_2 && is_text_input_source(source)\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 27, "score": 40481.864116391866 }, { "content": "fn is_required_scene_collection(output: &SceneCollection) -> bool {\n\n output.sc_name == TEST_COLLECTION\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 28, "score": 40481.864116391866 }, { "content": "fn is_required_media_source(source: &SourceListItem) -> bool {\n\n source.name == TEST_MEDIA && is_media_input_source(source)\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 29, "score": 39591.11627192355 }, { "content": "fn is_required_browser_source(source: &SourceListItem) -> bool {\n\n source.name == TEST_BROWSER && is_browser_input_source(source)\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 30, "score": 39591.11627192355 }, { "content": "fn is_text_input_source(source: &SourceListItem) -> bool {\n\n source.ty == \"input\" && source.type_id == SOURCE_KIND_TEXT_FT2\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 31, "score": 39591.11627192355 }, { "content": "fn is_media_input_source(source: &SourceListItem) -> bool {\n\n source.ty == \"input\" && source.type_id == SOURCE_KIND_VLC\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 32, "score": 39591.11627192355 }, { "content": "fn is_browser_input_source(source: &SourceListItem) -> bool {\n\n source.ty == \"input\" && source.type_id == SOURCE_KIND_BROWSER\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 33, "score": 39591.11627192355 }, { "content": "use super::Client;\n\nuse crate::requests::RequestType;\n\nuse crate::responses;\n\nuse crate::Result;\n\n\n\n/// API functions related to profiles.\n\npub struct Profiles<'a> {\n\n pub(super) client: &'a Client,\n\n}\n\n\n\nimpl<'a> Profiles<'a> {\n\n /// Set the currently active profile.\n\n ///\n\n /// - `profile_name`: Name of the desired profile.\n\n pub async fn set_current_profile(&self, profile_name: &str) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetCurrentProfile { profile_name })\n\n .await\n\n }\n\n\n", "file_path": "src/client/profiles.rs", "rank": 36, "score": 33836.46433164828 }, { "content": "use super::Client;\n\nuse crate::requests::{RequestType, SceneItem, SceneTransitionOverride};\n\nuse crate::responses;\n\nuse crate::Result;\n\n\n\n/// API functions related to scenes.\n\npub struct Scenes<'a> {\n\n pub(super) client: &'a Client,\n\n}\n\n\n\nimpl<'a> Scenes<'a> {\n\n /// Switch to the specified scene.\n\n ///\n\n /// - `scene_name`: Name of the scene to switch to.\n\n pub async fn set_current_scene(&self, scene_name: &str) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetCurrentScene { scene_name })\n\n .await\n\n }\n\n\n", "file_path": "src/client/scenes.rs", "rank": 37, "score": 33836.017593333214 }, { "content": " pub async fn get_mute(&self, source: &str) -> Result<responses::Mute> {\n\n self.client\n\n .send_message(RequestType::GetMute { source })\n\n .await\n\n }\n\n\n\n /// Sets the mute status of a specified source.\n\n ///\n\n /// - `source`: Source name.\n\n /// - `mute`: Desired mute status.\n\n pub async fn set_mute(&self, source: &str, mute: bool) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetMute { source, mute })\n\n .await\n\n }\n\n\n\n /// Inverts the mute status of a specified source.\n\n ///\n\n /// - `source`: Source name.\n\n pub async fn toggle_mute(&self, source: &str) -> Result<()> {\n", "file_path": "src/client/sources.rs", "rank": 38, "score": 33834.89832926002 }, { "content": "use super::Client;\n\nuse crate::requests::{RequestType, SetStreamSettings, Stream};\n\nuse crate::responses;\n\nuse crate::Result;\n\n\n\n/// API functions related to streaming.\n\npub struct Streaming<'a> {\n\n pub(super) client: &'a Client,\n\n}\n\n\n\nimpl<'a> Streaming<'a> {\n\n /// Get current streaming and recording status.\n\n pub async fn get_streaming_status(&self) -> Result<responses::StreamingStatus> {\n\n self.client\n\n .send_message(RequestType::GetStreamingStatus)\n\n .await\n\n }\n\n\n\n /// Toggle streaming on or off (depending on the current stream state).\n\n pub async fn start_stop_streaming(&self) -> Result<()> {\n", "file_path": "src/client/streaming.rs", "rank": 39, "score": 33834.85977769004 }, { "content": "use std::path::{Path, PathBuf};\n\n\n\nuse super::Client;\n\nuse crate::requests::RequestType;\n\nuse crate::responses;\n\nuse crate::Result;\n\n\n\n/// API functions related to recording.\n\npub struct Recording<'a> {\n\n pub(super) client: &'a Client,\n\n}\n\n\n\nimpl<'a> Recording<'a> {\n\n /// Get current recording status.\n\n pub async fn get_recording_status(&self) -> Result<responses::RecordingStatus> {\n\n self.client\n\n .send_message(RequestType::GetRecordingStatus)\n\n .await\n\n }\n\n\n", "file_path": "src/client/recording.rs", "rank": 41, "score": 33834.30059517324 }, { "content": "use super::Client;\n\nuse crate::requests::RequestType;\n\nuse crate::responses;\n\nuse crate::Result;\n\n\n\n/// API functions related to outputs.\n\npub struct Outputs<'a> {\n\n pub(super) client: &'a Client,\n\n}\n\n\n\nimpl<'a> Outputs<'a> {\n\n /// List existing outputs.\n\n pub async fn list_outputs(&self) -> Result<Vec<responses::Output>> {\n\n self.client\n\n .send_message::<responses::Outputs>(RequestType::ListOutputs)\n\n .await\n\n .map(|o| o.outputs)\n\n }\n\n\n\n /// Get information about a single output.\n", "file_path": "src/client/outputs.rs", "rank": 42, "score": 33834.188226737206 }, { "content": "use chrono::Duration;\n\nuse serde::Serialize;\n\n\n\nuse super::Client;\n\nuse crate::requests::RequestType;\n\nuse crate::responses;\n\nuse crate::{Error, Result};\n\n\n\n/// API functions related to transitions.\n\npub struct Transitions<'a> {\n\n pub(super) client: &'a Client,\n\n}\n\n\n\nimpl<'a> Transitions<'a> {\n\n /// List of all transitions available in the frontend's dropdown menu.\n\n pub async fn get_transition_list(&self) -> Result<responses::TransitionList> {\n\n self.client\n\n .send_message(RequestType::GetTransitionList)\n\n .await\n\n }\n", "file_path": "src/client/transitions.rs", "rank": 43, "score": 33834.175303117656 }, { "content": " ///\n\n /// - `scene`: Name of the scene to reorder (defaults to current).\n\n /// - `items`: Ordered list of objects with name and/or id specified. Id preferred due to\n\n /// uniqueness per scene\n\n pub async fn reorder_scene_items(\n\n &self,\n\n scene: Option<&str>,\n\n items: &[SceneItem<'_>],\n\n ) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::ReorderSceneItems { scene, items })\n\n .await\n\n }\n\n\n\n /// Set a scene to use a specific transition override.\n\n pub async fn set_scene_transition_override(\n\n &self,\n\n scene_transition: SceneTransitionOverride<'_>,\n\n ) -> Result<()> {\n\n self.client\n", "file_path": "src/client/scenes.rs", "rank": 44, "score": 33833.84841638954 }, { "content": " /// Get the audio sync offset of a specified source.\n\n ///\n\n /// - `source`: Source name.\n\n pub async fn get_sync_offset(&self, source: &str) -> Result<responses::SyncOffset> {\n\n self.client\n\n .send_message(RequestType::GetSyncOffset { source })\n\n .await\n\n }\n\n\n\n /// Get settings of the specified source.\n\n ///\n\n /// - `source_name`: Source name.\n\n /// - `source_type`: Type of the specified source. Useful for type-checking if you expect a\n\n /// specific settings schema.\n\n pub async fn get_source_settings<T>(\n\n &self,\n\n source_name: &str,\n\n source_type: Option<&str>,\n\n ) -> Result<responses::SourceSettings<T>>\n\n where\n", "file_path": "src/client/sources.rs", "rank": 45, "score": 33833.65404122795 }, { "content": " /// - `new_name`: New source name.\n\n pub async fn set_source_name(&self, source_name: &str, new_name: &str) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetSourceName {\n\n source_name,\n\n new_name,\n\n })\n\n .await\n\n }\n\n\n\n /// Set the audio sync offset of a specified source.\n\n ///\n\n /// - `source`: Source name.\n\n /// - `offset`: The desired audio sync offset (in nanoseconds).\n\n pub async fn set_sync_offset(&self, source: &str, offset: Duration) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetSyncOffset { source, offset })\n\n .await\n\n }\n\n\n", "file_path": "src/client/sources.rs", "rank": 46, "score": 33833.205353707934 }, { "content": " /// Get the current scene's name and source items.\n\n pub async fn get_current_scene(&self) -> Result<responses::CurrentScene> {\n\n self.client.send_message(RequestType::GetCurrentScene).await\n\n }\n\n\n\n /// Get a list of scenes in the currently active profile.\n\n pub async fn get_scene_list(&self) -> Result<responses::SceneList> {\n\n self.client.send_message(RequestType::GetSceneList).await\n\n }\n\n\n\n /// Create a new scene scene.\n\n ///\n\n /// - `scene_name`: Name of the scene to create.\n\n pub async fn create_scene(&self, scene_name: &str) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::CreateScene { scene_name })\n\n .await\n\n }\n\n\n\n /// Changes the order of scene items in the requested scene.\n", "file_path": "src/client/scenes.rs", "rank": 47, "score": 33833.06143464786 }, { "content": " .await\n\n }\n\n\n\n /// Send the provided text as embedded CEA-608 caption data.\n\n ///\n\n /// - `text`: Captions text.\n\n pub async fn send_captions(&self, text: &str) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SendCaptions { text })\n\n .await\n\n }\n\n}\n", "file_path": "src/client/streaming.rs", "rank": 48, "score": 33833.00100992223 }, { "content": " ) -> Result<responses::Volume> {\n\n self.client\n\n .send_message(RequestType::GetVolume {\n\n source,\n\n use_decibel,\n\n })\n\n .await\n\n }\n\n\n\n /// Set the volume of the specified source. Default request format uses mul, NOT SLIDER\n\n /// PERCENTAGE.\n\n pub async fn set_volume(&self, volume: Volume<'_>) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetVolume(volume))\n\n .await\n\n }\n\n\n\n /// Get the mute status of a specified source.\n\n ///\n\n /// - `source`: Source name.\n", "file_path": "src/client/sources.rs", "rank": 49, "score": 33832.70996700551 }, { "content": " .await\n\n }\n\n\n\n /// Refreshes the specified browser source.\n\n ///\n\n /// - `source_name`: Source name.\n\n pub async fn refresh_browser_source(&self, source_name: &str) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::RefreshBrowserSource { source_name })\n\n .await\n\n }\n\n}\n", "file_path": "src/client/sources.rs", "rank": 50, "score": 33832.66849229351 }, { "content": " .map(|sl| sl.sources)\n\n }\n\n\n\n /// Get a list of all available sources types.\n\n pub async fn get_sources_types_list(&self) -> Result<Vec<responses::SourceTypeItem>> {\n\n self.client\n\n .send_message::<responses::SourceTypesList>(RequestType::GetSourceTypesList)\n\n .await\n\n .map(|stl| stl.types)\n\n }\n\n\n\n /// Get the volume of the specified source. Default response uses mul format, NOT SLIDER\n\n /// PERCENTAGE.\n\n ///\n\n /// - `source`: Source name.\n\n /// - `use_decibel`: Output volume in decibels of attenuation instead of amplitude/mul.\n\n pub async fn get_volume(\n\n &self,\n\n source: &str,\n\n use_decibel: Option<bool>,\n", "file_path": "src/client/sources.rs", "rank": 51, "score": 33832.43662287761 }, { "content": " /// Toggle recording on or off (depending on the current recording state).\n\n pub async fn start_stop_recording(&self) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::StartStopRecording)\n\n .await\n\n }\n\n\n\n /// Start recording. Will return an `error` if recording is already active.\n\n pub async fn start_recording(&self) -> Result<()> {\n\n self.client.send_message(RequestType::StartRecording).await\n\n }\n\n\n\n /// Stop recording. Will return an `error` if recording is not active.\n\n pub async fn stop_recording(&self) -> Result<()> {\n\n self.client.send_message(RequestType::StopRecording).await\n\n }\n\n\n\n /// Pause the current recording. Returns an `error` if recording is not active or already\n\n /// paused.\n\n pub async fn pause_recording(&self) -> Result<()> {\n", "file_path": "src/client/recording.rs", "rank": 52, "score": 33832.430255012994 }, { "content": " }\n\n }\n\n\n\n /// Login to the OBS websocket if an authentication is required.\n\n pub async fn login(&self, password: Option<impl AsRef<str>>) -> Result<()> {\n\n let auth_required = self.general().get_auth_required().await?;\n\n\n\n if let AuthRequired {\n\n auth_required: true,\n\n challenge: Some(challenge),\n\n salt: Some(salt),\n\n } = auth_required\n\n {\n\n match password {\n\n Some(password) => {\n\n let auth = Self::create_auth_response(&challenge, &salt, password.as_ref());\n\n self.general().authenticate(&auth).await?;\n\n }\n\n None => return Err(Error::NoPassword),\n\n }\n", "file_path": "src/client/mod.rs", "rank": 53, "score": 33832.349802461875 }, { "content": " properties: TextFreetype2Properties<'_>,\n\n ) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetTextFreetype2Properties(properties))\n\n .await\n\n }\n\n\n\n /// Get configured special sources like Desktop Audio and Mic/Aux sources.\n\n pub async fn get_special_sources(&self) -> Result<responses::SpecialSources> {\n\n self.client\n\n .send_message(RequestType::GetSpecialSources)\n\n .await\n\n }\n\n\n\n /// List filters applied to a source.\n\n ///\n\n /// - `source_name`: Source name.\n\n pub async fn get_source_filters(\n\n &self,\n\n source_name: &str,\n", "file_path": "src/client/sources.rs", "rank": 54, "score": 33832.330543675744 }, { "content": " &self,\n\n source_name: &str,\n\n filter_name: &str,\n\n ) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::RemoveFilterFromSource {\n\n source_name,\n\n filter_name,\n\n })\n\n .await\n\n }\n\n\n\n /// Move a filter in the chain (absolute index positioning).\n\n pub async fn reorder_source_filter(&self, reorder_filter: ReorderFilter<'_>) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::ReorderSourceFilter(reorder_filter))\n\n .await\n\n }\n\n\n\n /// Move a filter in the chain (relative positioning).\n", "file_path": "src/client/sources.rs", "rank": 55, "score": 33832.26661029653 }, { "content": " ///\n\n /// - `output_name`: Output name.\n\n pub async fn get_output_info(&self, output_name: &str) -> Result<responses::Output> {\n\n self.client\n\n .send_message::<responses::OutputInfo>(RequestType::GetOutputInfo { output_name })\n\n .await\n\n .map(|o| o.output_info)\n\n }\n\n\n\n /// Start an output.\n\n ///\n\n /// Note: Controlling outputs is an experimental feature of obs-websocket. Some plugins which\n\n /// add outputs to OBS may not function properly when they are controlled in this way.\n\n ///\n\n /// - `output_name`: Output name.\n\n pub async fn start_output(&self, output_name: &str) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::StartOutput { output_name })\n\n .await\n\n }\n", "file_path": "src/client/outputs.rs", "rank": 56, "score": 33832.15352500415 }, { "content": " .send_message(RequestType::SetSceneTransitionOverride(scene_transition))\n\n .await\n\n }\n\n\n\n /// Remove any transition override on a scene.\n\n ///\n\n /// - `scene_name`: Name of the scene to remove the override from.\n\n pub async fn remove_scene_transition_override(&self, scene_name: &str) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::RemoveSceneTransitionOverride { scene_name })\n\n .await\n\n }\n\n\n\n /// Get the current scene transition override.\n\n ///\n\n /// - `scene_name`: Name of the scene to get the override for.\n\n pub async fn get_scene_transition_override(\n\n &self,\n\n scene_name: &str,\n\n ) -> Result<responses::SceneTransitionOverride> {\n\n self.client\n\n .send_message(RequestType::GetSceneTransitionOverride { scene_name })\n\n .await\n\n }\n\n}\n", "file_path": "src/client/scenes.rs", "rank": 57, "score": 33832.00947762028 }, { "content": "\n\n /// Get the name of the currently selected transition in the frontend's dropdown menu.\n\n pub async fn get_current_transition(&self) -> Result<responses::CurrentTransition> {\n\n self.client\n\n .send_message(RequestType::GetCurrentTransition)\n\n .await\n\n }\n\n\n\n /// Set the active transition.\n\n ///\n\n /// - `transition_name`: The name of the transition.\n\n pub async fn set_current_transition(&self, transition_name: &str) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetCurrentTransition { transition_name })\n\n .await\n\n }\n\n\n\n /// Set the duration of the currently selected transition if supported.\n\n ///\n\n /// - `duration`: Desired duration of the transition (in milliseconds).\n", "file_path": "src/client/transitions.rs", "rank": 59, "score": 33831.94331180599 }, { "content": " /// - `source_name`: Source name.\n\n /// - `monitor_type`: The monitor type to use. Options: `none`, `monitorOnly`,\n\n /// `monitorAndOutput`.\n\n pub async fn set_audio_monitor_type(\n\n &self,\n\n source_name: &str,\n\n monitor_type: MonitorType,\n\n ) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetAudioMonitorType {\n\n source_name,\n\n monitor_type,\n\n })\n\n .await\n\n }\n\n\n\n /// Get the default settings for a given source type.\n\n ///\n\n /// - `source_kind`: Source kind. Also called \"source id\" in libobs terminology.\n\n pub async fn get_source_default_settings(\n", "file_path": "src/client/sources.rs", "rank": 60, "score": 33831.81057246611 }, { "content": " .await\n\n }\n\n\n\n /// Get the current properties of a Text GDI Plus source.\n\n ///\n\n /// - `source`: Source name.\n\n pub async fn get_text_gdi_plus_properties(\n\n &self,\n\n source: &str,\n\n ) -> Result<responses::TextGdiPlusProperties> {\n\n self.client\n\n .send_message(RequestType::GetTextGdiPlusProperties { source })\n\n .await\n\n }\n\n\n\n /// Set the current properties of a Text GDI Plus source.\n\n pub async fn set_text_gdi_plus_properties(\n\n &self,\n\n properties: TextGdiPlusProperties<'_>,\n\n ) -> Result<()> {\n", "file_path": "src/client/sources.rs", "rank": 61, "score": 33831.67460252068 }, { "content": " self.client\n\n .send_message(RequestType::SetTextGdiPlusProperties(Box::new(properties)))\n\n .await\n\n }\n\n\n\n /// Get the current properties of a Text Freetype 2 source.\n\n ///\n\n /// - `source`: Source name.\n\n pub async fn get_text_freetype2_properties(\n\n &self,\n\n source: &str,\n\n ) -> Result<responses::TextFreetype2Properties> {\n\n self.client\n\n .send_message(RequestType::GetTextFreetype2Properties { source })\n\n .await\n\n }\n\n\n\n /// Set the current properties of a Text Freetype 2 source.\n\n pub async fn set_text_freetype2_properties(\n\n &self,\n", "file_path": "src/client/sources.rs", "rank": 62, "score": 33831.48287006097 }, { "content": " pub async fn set_transition_duration(&self, duration: Duration) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetTransitionDuration { duration })\n\n .await\n\n }\n\n\n\n /// Get the duration of the currently selected transition if supported.\n\n pub async fn get_transition_duration(&self) -> Result<Duration> {\n\n self.client\n\n .send_message::<responses::TransitionDuration>(RequestType::GetTransitionDuration)\n\n .await\n\n .map(|td| td.transition_duration)\n\n }\n\n\n\n /// Get the position of the current transition.\n\n pub async fn get_transition_position(&self) -> Result<f64> {\n\n self.client\n\n .send_message::<responses::TransitionPosition>(RequestType::GetTransitionPosition)\n\n .await\n\n .map(|tp| tp.position)\n", "file_path": "src/client/transitions.rs", "rank": 63, "score": 33831.37352016567 }, { "content": " pub async fn move_source_filter(&self, move_filter: MoveFilter<'_>) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::MoveSourceFilter(move_filter))\n\n .await\n\n }\n\n\n\n /// Update settings of a filter.\n\n pub async fn set_source_filter_settings(\n\n &self,\n\n settings: SourceFilterSettings<'_>,\n\n ) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetSourceFilterSettings(settings))\n\n .await\n\n }\n\n\n\n /// Change the visibility/enabled state of a filter.\n\n pub async fn set_source_filter_visibility(\n\n &self,\n\n visibility: SourceFilterVisibility<'_>,\n", "file_path": "src/client/sources.rs", "rank": 64, "score": 33830.85335169898 }, { "content": " self.client.send_message(RequestType::PauseRecording).await\n\n }\n\n\n\n /// Resume/unpause the current recording (if paused). Returns an error if recording is not\n\n /// active or not paused.\n\n pub async fn resume_recording(&self) -> Result<()> {\n\n self.client.send_message(RequestType::ResumeRecording).await\n\n }\n\n\n\n /// Please note: if this is called while a recording is in progress, the change won't be applied\n\n /// immediately and will be effective on the next recording.\n\n ///\n\n /// - `rec_folder`: Path of the recording folder.\n\n pub async fn set_recording_folder(&self, rec_folder: &Path) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetRecordingFolder { rec_folder })\n\n .await\n\n }\n\n\n\n /// Get the path of the current recording folder.\n\n pub async fn get_recording_folder(&self) -> Result<PathBuf> {\n\n self.client\n\n .send_message::<responses::RecordingFolder>(RequestType::GetRecordingFolder)\n\n .await\n\n .map(|rf| rf.rec_folder)\n\n }\n\n}\n", "file_path": "src/client/recording.rs", "rank": 65, "score": 33830.64454283413 }, { "content": " ) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetSourceFilterVisibility(visibility))\n\n .await\n\n }\n\n\n\n /// Get the audio monitoring type of the specified source.\n\n ///\n\n /// - `source_name`: Source name.\n\n pub async fn get_audio_monitor_type(&self, source_name: &str) -> Result<MonitorType> {\n\n self.client\n\n .send_message::<responses::AudioMonitorType>(RequestType::GetAudioMonitorType {\n\n source_name,\n\n })\n\n .await\n\n .map(|amt| amt.monitor_type)\n\n }\n\n\n\n /// Set the audio monitoring type of the specified source.\n\n ///\n", "file_path": "src/client/sources.rs", "rank": 67, "score": 33830.596881694226 }, { "content": " ) -> Result<Vec<responses::SourceFilter>> {\n\n self.client\n\n .send_message::<responses::SourceFilters>(RequestType::GetSourceFilters { source_name })\n\n .await\n\n .map(|sf| sf.filters)\n\n }\n\n\n\n /// Get a specific filter that is applied to a source.\n\n ///\n\n /// - `source_name`: Source name.\n\n /// - `filter_name`: Source filter name.\n\n pub async fn get_source_filter_info<T>(\n\n &self,\n\n source_name: &str,\n\n filter_name: &str,\n\n ) -> Result<responses::SourceFilterInfo<T>>\n\n where\n\n T: DeserializeOwned,\n\n {\n\n self.client\n", "file_path": "src/client/sources.rs", "rank": 68, "score": 33830.49637197261 }, { "content": " /// Get the name of the current profile.\n\n pub async fn get_current_profile(&self) -> Result<String> {\n\n self.client\n\n .send_message::<responses::CurrentProfile>(RequestType::GetCurrentProfile)\n\n .await\n\n .map(|cp| cp.profile_name)\n\n }\n\n\n\n /// Get a list of available profiles.\n\n pub async fn list_profiles(&self) -> Result<Vec<responses::Profile>> {\n\n self.client\n\n .send_message::<responses::Profiles>(RequestType::ListProfiles)\n\n .await\n\n .map(|cp| cp.profiles)\n\n }\n\n}\n", "file_path": "src/client/profiles.rs", "rank": 69, "score": 33830.45571872524 }, { "content": " pub async fn get_media_sources_list(&self) -> Result<Vec<responses::MediaSource>> {\n\n self.client\n\n .send_message::<responses::MediaSourcesList>(RequestType::GetMediaSourcesList)\n\n .await\n\n .map(|ms| ms.media_sources)\n\n }\n\n\n\n /// Create a source and add it as a scene item to a scene.\n\n pub async fn create_source(&self, source: CreateSource<'_>) -> Result<i64> {\n\n self.client\n\n .send_message::<responses::SourceItemId>(RequestType::CreateSource(source))\n\n .await\n\n .map(|sii| sii.item_id)\n\n }\n\n\n\n /// List all sources available in the running OBS instance.\n\n pub async fn get_sources_list(&self) -> Result<Vec<responses::SourceListItem>> {\n\n self.client\n\n .send_message::<responses::SourcesList>(RequestType::GetSourcesList)\n\n .await\n", "file_path": "src/client/sources.rs", "rank": 70, "score": 33830.34209608816 }, { "content": " /// will remain unchanged. Returns the updated settings in response. If 'type' is different than\n\n /// the current streaming service type, all settings are required. Returns the full settings of\n\n /// the stream (the same as GetStreamSettings).\n\n pub async fn set_stream_settings(&self, settings: SetStreamSettings<'_>) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetStreamSettings(settings))\n\n .await\n\n }\n\n\n\n /// Get the current streaming server settings.\n\n pub async fn get_stream_settings(&self) -> Result<responses::GetStreamSettings> {\n\n self.client\n\n .send_message(RequestType::GetStreamSettings)\n\n .await\n\n }\n\n\n\n /// Save the current streaming server settings to disk.\n\n pub async fn save_stream_settings(&self) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SaveStreamSettings)\n", "file_path": "src/client/streaming.rs", "rank": 71, "score": 33830.27767993781 }, { "content": " self.client\n\n .send_message(RequestType::StartStopStreaming)\n\n .await\n\n }\n\n\n\n /// Start streaming. Will return an `error` if streaming is already active.\n\n ///\n\n /// - `stream`: Special stream configuration. Note: these won't be saved to OBS' configuration.\n\n pub async fn start_streaming(&self, stream: Option<Stream<'_>>) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::StartStreaming { stream })\n\n .await\n\n }\n\n\n\n /// Stop streaming. Will return an `error` if streaming is not active.\n\n pub async fn stop_streaming(&self) -> Result<()> {\n\n self.client.send_message(RequestType::StopStreaming).await\n\n }\n\n\n\n /// Sets one or more attributes of the current streaming server settings. Any options not passed\n", "file_path": "src/client/streaming.rs", "rank": 72, "score": 33830.164035224276 }, { "content": " }\n\n\n\n /// Get the current settings of a transition.\n\n ///\n\n /// - `transition_name`: Transition name.\n\n pub async fn get_transition_settings(\n\n &self,\n\n transition_name: &str,\n\n ) -> Result<serde_json::Value> {\n\n self.client\n\n .send_message::<responses::TransitionSettings>(RequestType::GetTransitionSettings {\n\n transition_name,\n\n })\n\n .await\n\n .map(|ts| ts.transition_settings)\n\n }\n\n\n\n /// Change the current settings of a transition.\n\n ///\n\n /// - `transition_name`: Transition name.\n", "file_path": "src/client/transitions.rs", "rank": 73, "score": 33829.20971898961 }, { "content": " self.client\n\n .send_message(RequestType::ToggleMute { source })\n\n .await\n\n }\n\n\n\n /// Get the audio's active status of a specified source.\n\n ///\n\n /// - `source_name`: Source name.\n\n pub async fn get_audio_active(&self, source_name: &str) -> Result<bool> {\n\n self.client\n\n .send_message::<responses::AudioActive>(RequestType::GetAudioActive { source_name })\n\n .await\n\n .map(|aa| aa.audio_active)\n\n }\n\n\n\n /// Rename an existing source.\n\n ///\n\n /// Note: If the new name already exists as a source, obs-websocket will return an error.\n\n ///\n\n /// - `source_name`: Source name.\n", "file_path": "src/client/sources.rs", "rank": 74, "score": 33829.06258236873 }, { "content": "\n\n /// Stop an output.\n\n ///\n\n /// Note: Controlling outputs is an experimental feature of obs-websocket. Some plugins which\n\n /// add outputs to OBS may not function properly when they are controlled in this way.\n\n ///\n\n /// - `output_name`: Output name.\n\n /// - `force`: Force stop (default: false).\n\n pub async fn stop_output(&self, output_name: &str, force: Option<bool>) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::StopOutput { output_name, force })\n\n .await\n\n }\n\n}\n", "file_path": "src/client/outputs.rs", "rank": 75, "score": 33828.96660176601 }, { "content": " host,\n\n port,\n\n #[cfg(feature = \"tls\")]\n\n tls: false,\n\n broadcast_capacity: None,\n\n })\n\n .await\n\n }\n\n\n\n /// Connect to a obs-websocket instance with the given configuration.\n\n pub async fn connect_with_config<H: AsRef<str>>(config: ConnectConfig<H>) -> Result<Self> {\n\n let (socket, _) = tokio_tungstenite::connect_async(format!(\n\n \"{}://{}:{}\",\n\n if config.tls() { \"wss\" } else { \"ws\" },\n\n config.host.as_ref(),\n\n config.port\n\n ))\n\n .await\n\n .map_err(Error::Connect)?;\n\n\n", "file_path": "src/client/mod.rs", "rank": 76, "score": 33828.372534766444 }, { "content": " &self,\n\n source_kind: &str,\n\n ) -> Result<responses::SourceDefaultSettings> {\n\n self.client\n\n .send_message(RequestType::GetSourceDefaultSettings { source_kind })\n\n .await\n\n }\n\n\n\n /// At least [`embed_picture_format`](SourceScreenshot::embed_picture_format) or\n\n /// [`save_to_file_path`](SourceScreenshot::save_to_file_path) must be specified.\n\n ///\n\n /// Clients can specify [`width`](SourceScreenshot::width) and\n\n /// [`height`](SourceScreenshot::height) parameters to receive scaled pictures. Aspect ratio is\n\n /// preserved if only one of these two parameters is specified.\n\n pub async fn take_source_screenshot(\n\n &self,\n\n source_screenshot: SourceScreenshot<'_>,\n\n ) -> Result<responses::SourceScreenshot> {\n\n self.client\n\n .send_message(RequestType::TakeSourceScreenshot(source_screenshot))\n", "file_path": "src/client/sources.rs", "rank": 77, "score": 33827.964980376964 }, { "content": "\n\nimpl<H> ConnectConfig<H>\n\nwhere\n\n H: AsRef<str>,\n\n{\n\n #[cfg(feature = \"tls\")]\n\n fn tls(&self) -> bool {\n\n self.tls\n\n }\n\n\n\n #[cfg(not(feature = \"tls\"))]\n\n fn tls(&self) -> bool {\n\n false\n\n }\n\n}\n\n\n\nimpl Client {\n\n /// Connect to a obs-websocket instance on the given host and port.\n\n pub async fn connect(host: impl AsRef<str>, port: u16) -> Result<Self> {\n\n Self::connect_with_config(ConnectConfig {\n", "file_path": "src/client/mod.rs", "rank": 78, "score": 33827.70874705453 }, { "content": " T: DeserializeOwned,\n\n {\n\n self.client\n\n .send_message(RequestType::GetSourceSettings {\n\n source_name,\n\n source_type,\n\n })\n\n .await\n\n }\n\n\n\n /// Set settings of the specified source.\n\n pub async fn set_source_settings<T>(\n\n &self,\n\n source_settings: SourceSettings<'_>,\n\n ) -> Result<responses::SourceSettings<T>>\n\n where\n\n T: DeserializeOwned,\n\n {\n\n self.client\n\n .send_message(RequestType::SetSourceSettings(source_settings))\n", "file_path": "src/client/sources.rs", "rank": 79, "score": 33827.32598701213 }, { "content": " .send_message(RequestType::GetSourceFilterInfo {\n\n source_name,\n\n filter_name,\n\n })\n\n .await\n\n }\n\n\n\n /// Add a new filter to a source. Available source types along with their settings properties\n\n /// are available from [`get_sources_types_list`](Self::get_sources_types_list).\n\n pub async fn add_filter_to_source(&self, add_filter: AddFilter<'_>) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::AddFilterToSource(add_filter))\n\n .await\n\n }\n\n\n\n /// Remove a filter from a source.\n\n ///\n\n /// - `source_name`: Name of the source from which the specified filter is removed.\n\n /// - `filter_name`: Name of the filter to remove.\n\n pub async fn remove_filter_from_source(\n", "file_path": "src/client/sources.rs", "rank": 80, "score": 33827.14780817434 }, { "content": " /// - `transition_settings`: Transition settings (they can be partial)\n\n pub async fn set_transition_settings<T>(\n\n &self,\n\n transition_name: &str,\n\n transition_settings: &T,\n\n ) -> Result<serde_json::Value>\n\n where\n\n T: Serialize,\n\n {\n\n self.client\n\n .send_message::<responses::TransitionSettings>(RequestType::SetTransitionSettings {\n\n transition_name,\n\n transition_settings: &serde_json::to_value(transition_settings)\n\n .map_err(Error::SerializeCustomData)?,\n\n })\n\n .await\n\n .map(|ts| ts.transition_settings)\n\n }\n\n\n\n /// Release the T-Bar (like a user releasing their mouse button after moving it). *YOU MUST CALL\n", "file_path": "src/client/transitions.rs", "rank": 81, "score": 33827.07990791615 }, { "content": "use chrono::Duration;\n\nuse serde::de::DeserializeOwned;\n\n\n\nuse super::Client;\n\nuse crate::common::MonitorType;\n\nuse crate::requests::{\n\n AddFilter, CreateSource, MoveFilter, ReorderFilter, RequestType, SourceFilterSettings,\n\n SourceFilterVisibility, SourceScreenshot, SourceSettings, TextFreetype2Properties,\n\n TextGdiPlusProperties, Volume,\n\n};\n\nuse crate::responses;\n\nuse crate::Result;\n\n\n\n/// API functions related to sources.\n\npub struct Sources<'a> {\n\n pub(super) client: &'a Client,\n\n}\n\n\n\nimpl<'a> Sources<'a> {\n\n /// List the media state of all media sources (vlc and media source).\n", "file_path": "src/client/sources.rs", "rank": 83, "score": 33826.8300294334 }, { "content": " /// THIS if you called [`set_t_bar_position`](Self::set_t_bar_position) with the `release`\n\n /// parameter set to `false`.*\n\n pub async fn release_t_bar(&self) -> Result<()> {\n\n self.client.send_message(RequestType::ReleaseTBar).await\n\n }\n\n\n\n /// If your code needs to perform multiple successive T-Bar moves (e.g. : in an animation, or in\n\n /// response to a user moving a T-Bar control in your User Interface), set `release` to false\n\n /// and call [`release_t_bar`](Self::release_t_bar) later once the animation/interaction is\n\n /// over.\n\n ///\n\n /// - `position`: T-Bar position. This value must be between 0.0 and 1.0.\n\n /// - `release`: Whether or not the T-Bar gets released automatically after setting its new\n\n /// position (like a user releasing their mouse button after moving the T-Bar). Call\n\n /// [`release_t_bar`](Self::release_t_bar) manually if you set `release` to false. Defaults to\n\n /// true.\n\n pub async fn set_t_bar_position(&self, position: f64, release: Option<bool>) -> Result<()> {\n\n self.client\n\n .send_message(RequestType::SetTBarPosition { position, release })\n\n .await\n\n }\n\n}\n", "file_path": "src/client/transitions.rs", "rank": 84, "score": 33826.48312877779 }, { "content": "\n\n Ok(async_stream::stream! {\n\n while let Ok(event) = receiver.recv().await {\n\n yield event;\n\n }\n\n })\n\n } else {\n\n Err(crate::Error::Disconnected)\n\n }\n\n }\n\n\n\n /// Access general API functions.\n\n pub fn general(&self) -> General<'_> {\n\n General { client: self }\n\n }\n\n\n\n /// Access API functions related to media control.\n\n pub fn media_control(&self) -> MediaControl<'_> {\n\n MediaControl { client: self }\n\n }\n", "file_path": "src/client/mod.rs", "rank": 85, "score": 33826.37902391731 }, { "content": "use semver::{Comparator, Op, Prerelease};\n\nuse serde::de::DeserializeOwned;\n\n#[cfg(feature = \"events\")]\n\nuse tokio::sync::broadcast;\n\nuse tokio::{\n\n net::TcpStream,\n\n sync::{oneshot, Mutex},\n\n task::JoinHandle,\n\n};\n\nuse tokio_tungstenite::{tungstenite::Message, MaybeTlsStream, WebSocketStream};\n\n\n\n#[cfg(feature = \"events\")]\n\nuse crate::events::{Event, EventType};\n\nuse crate::{\n\n requests::{Request, RequestType},\n\n responses::{AuthRequired, Response},\n\n Error, Result,\n\n};\n\n\n\npub use self::{\n", "file_path": "src/client/mod.rs", "rank": 86, "score": 33825.67070885574 }, { "content": "\n\n client.verify_versions().await?;\n\n\n\n Ok(client)\n\n }\n\n\n\n async fn verify_versions(&self) -> Result<()> {\n\n let version = self.general().get_version().await?;\n\n\n\n if !OBS_STUDIO_VERSION.matches(&version.obs_studio_version) {\n\n return Err(Error::ObsStudioVersion(\n\n version.obs_studio_version,\n\n OBS_STUDIO_VERSION,\n\n ));\n\n }\n\n\n\n if !OBS_WEBSOCKET_VERSION.matches(&version.obs_websocket_version) {\n\n return Err(Error::ObsWebsocketVersion(\n\n version.obs_websocket_version,\n\n OBS_WEBSOCKET_VERSION,\n", "file_path": "src/client/mod.rs", "rank": 87, "score": 33825.281267539314 }, { "content": " }\n\n\n\n Ok(())\n\n }\n\n\n\n fn create_auth_response(challenge: &str, salt: &str, password: &str) -> String {\n\n use sha2::{Digest, Sha256};\n\n\n\n let mut hasher = Sha256::new();\n\n hasher.update(password.as_bytes());\n\n hasher.update(salt.as_bytes());\n\n\n\n let mut auth = String::with_capacity(Sha256::output_size() * 4 / 3 + 4);\n\n\n\n base64::encode_config_buf(hasher.finalize_reset(), base64::STANDARD, &mut auth);\n\n\n\n hasher.update(auth.as_bytes());\n\n hasher.update(challenge.as_bytes());\n\n auth.clear();\n\n\n", "file_path": "src/client/mod.rs", "rank": 88, "score": 33824.90090431083 }, { "content": " serde_json::from_value::<Response<T>>(resp)\n\n .map(|r| r.details)\n\n .map_err(Error::DeserializeResponse)\n\n }\n\n\n\n /// Disconnect from obs-websocket and shut down all machinery.\n\n ///\n\n /// This is called automatically when dropping the client but doesn't wait for all background\n\n /// tasks to complete. Therefore, it is recommended to call this manually once the client is\n\n /// no longer needed.\n\n pub fn disconnect(&mut self) -> impl Future {\n\n let handle = self.handle.take().map(|h| {\n\n h.abort();\n\n h\n\n });\n\n\n\n async {\n\n if let Some(h) = handle {\n\n h.await.ok();\n\n }\n", "file_path": "src/client/mod.rs", "rank": 89, "score": 33824.61045204947 }, { "content": " base64::encode_config_buf(hasher.finalize(), base64::STANDARD, &mut auth);\n\n\n\n auth\n\n }\n\n\n\n /// Get a stream of events. Each call to this function creates a new listener, therefore it's\n\n /// recommended to keep the stream around and iterate over it.\n\n ///\n\n /// **Note**: To be able to iterate over the stream you have to pin it with\n\n /// [`futures_util::pin_mut`] for example.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Getting a new stream of events fails with [`Error::Disconnected`] if the client is\n\n /// disconnected from obs-websocket. That can happen either by manually disconnecting, stopping\n\n /// obs-websocket or closing OBS.\n\n #[cfg(feature = \"events\")]\n\n pub fn events(&self) -> Result<impl Stream<Item = Event>> {\n\n if let Some(sender) = &self.event_sender.upgrade() {\n\n let mut receiver = sender.subscribe();\n", "file_path": "src/client/mod.rs", "rank": 90, "score": 33823.20331760463 }, { "content": "\n\n /// Access API functions related to sources.\n\n pub fn sources(&self) -> Sources<'_> {\n\n Sources { client: self }\n\n }\n\n\n\n /// Access API functions related to outputs.\n\n pub fn outputs(&self) -> Outputs<'_> {\n\n Outputs { client: self }\n\n }\n\n\n\n /// Access API functions related to profiles.\n\n pub fn profiles(&self) -> Profiles<'_> {\n\n Profiles { client: self }\n\n }\n\n\n\n /// Access API functions related to recording.\n\n pub fn recording(&self) -> Recording<'_> {\n\n Recording { client: self }\n\n }\n", "file_path": "src/client/mod.rs", "rank": 91, "score": 33821.91054217644 }, { "content": " debug!(\"sending message: {}\", json);\n\n let write_result = self\n\n .write\n\n .lock()\n\n .await\n\n .send(Message::Text(json))\n\n .await\n\n .map_err(Error::Send);\n\n\n\n if let Err(e) = write_result {\n\n self.receivers.lock().await.remove(&id);\n\n return Err(e);\n\n }\n\n\n\n let mut resp = rx.await.map_err(Error::ReceiveMessage)?;\n\n\n\n if let Some(error) = extract_error(&mut resp) {\n\n return Err(Error::Api(error));\n\n }\n\n\n", "file_path": "src/client/mod.rs", "rank": 92, "score": 33821.70983612136 }, { "content": "\n\n /// Access API functions related to streaming.\n\n pub fn streaming(&self) -> Streaming<'_> {\n\n Streaming { client: self }\n\n }\n\n\n\n /// Access API functions related to the studio mode.\n\n pub fn studio_mode(&self) -> StudioMode<'_> {\n\n StudioMode { client: self }\n\n }\n\n\n\n /// Access API functions related to transitions.\n\n pub fn transitions(&self) -> Transitions<'_> {\n\n Transitions { client: self }\n\n }\n\n}\n\n\n", "file_path": "src/client/mod.rs", "rank": 93, "score": 33821.611674934014 }, { "content": "\n\n /// Access API functions related to the replay buffer.\n\n pub fn replay_buffer(&self) -> ReplayBuffer<'_> {\n\n ReplayBuffer { client: self }\n\n }\n\n\n\n /// Access API functions related to scene collections.\n\n pub fn scene_collections(&self) -> SceneCollections<'_> {\n\n SceneCollections { client: self }\n\n }\n\n\n\n /// Access API functions related to scene items.\n\n pub fn scene_items(&self) -> SceneItems<'_> {\n\n SceneItems { client: self }\n\n }\n\n\n\n /// Access API functions related to scenes.\n\n pub fn scenes(&self) -> Scenes<'_> {\n\n Scenes { client: self }\n\n }\n", "file_path": "src/client/mod.rs", "rank": 94, "score": 33821.59520956223 }, { "content": " ));\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n async fn send_message<T>(&self, req: RequestType<'_>) -> Result<T>\n\n where\n\n T: DeserializeOwned,\n\n {\n\n let id = self.id_counter.fetch_add(1, Ordering::SeqCst);\n\n let req = Request {\n\n message_id: &id.to_string(),\n\n ty: req,\n\n };\n\n let json = serde_json::to_string(&req).map_err(Error::SerializeMessage)?;\n\n\n\n let (tx, rx) = oneshot::channel();\n\n self.receivers.lock().await.insert(id, tx);\n\n\n", "file_path": "src/client/mod.rs", "rank": 95, "score": 33821.58790263661 }, { "content": " let (write, mut read) = socket.split();\n\n let receivers = Arc::new(Mutex::new(HashMap::<_, oneshot::Sender<_>>::new()));\n\n let receivers2 = Arc::clone(&receivers);\n\n #[cfg(feature = \"events\")]\n\n let (event_sender, _) =\n\n broadcast::channel(config.broadcast_capacity.unwrap_or(DEFAULT_CAPACITY));\n\n #[cfg(feature = \"events\")]\n\n let event_sender = Arc::new(event_sender);\n\n #[cfg(feature = \"events\")]\n\n let events_tx = Arc::clone(&event_sender);\n\n\n\n let handle = tokio::spawn(async move {\n\n while let Some(Ok(msg)) = read.next().await {\n\n trace!(\"{}\", msg);\n\n let res: Result<(), InnerError> = async {\n\n let text = msg.into_text().map_err(InnerError::IntoText)?;\n\n let text = if text == \"Server stopping\" {\n\n debug!(\"Websocket server is stopping\");\n\n r#\"{\"update-type\": \"ServerStopping\"}\"#.to_string()\n\n } else {\n", "file_path": "src/client/mod.rs", "rank": 96, "score": 33820.731300348685 }, { "content": "//! The client to the obs-websocket API and main entry point.\n\n\n\n#[cfg(feature = \"events\")]\n\nuse std::sync::Weak;\n\nuse std::{\n\n collections::HashMap,\n\n future::Future,\n\n sync::{\n\n atomic::{AtomicU64, Ordering},\n\n Arc,\n\n },\n\n};\n\n\n\n#[cfg(feature = \"events\")]\n\nuse futures_util::stream::Stream;\n\nuse futures_util::{\n\n sink::SinkExt,\n\n stream::{SplitSink, StreamExt},\n\n};\n\nuse log::{debug, error, trace};\n", "file_path": "src/client/mod.rs", "rank": 97, "score": 33820.661742481 }, { "content": " text\n\n };\n\n\n\n let json = serde_json::from_str::<serde_json::Value>(&text)\n\n .map_err(InnerError::DeserializeMessage)?;\n\n\n\n if let Some(message_id) = json\n\n .as_object()\n\n .and_then(|obj| obj.get(\"message-id\"))\n\n .and_then(|id| id.as_str())\n\n .and_then(|id| id.parse().ok())\n\n {\n\n debug!(\"got message with id {}\", message_id);\n\n if let Some(tx) = receivers2.lock().await.remove(&message_id) {\n\n tx.send(json).ok();\n\n }\n\n } else {\n\n #[cfg(feature = \"events\")]\n\n {\n\n let event = serde_json::from_value(json)\n", "file_path": "src/client/mod.rs", "rank": 98, "score": 33819.58218816358 }, { "content": " };\n\n events_tx.send(event).ok();\n\n }\n\n\n\n // clear all outstanding receivers to stop them from waiting forever on responses\n\n // they'll never receive.\n\n receivers2.lock().await.clear();\n\n });\n\n\n\n let write = Mutex::new(write);\n\n let id_counter = AtomicU64::new(1);\n\n\n\n let client = Self {\n\n write,\n\n id_counter,\n\n receivers,\n\n #[cfg(feature = \"events\")]\n\n event_sender: Arc::downgrade(&event_sender),\n\n handle: Some(handle),\n\n };\n", "file_path": "src/client/mod.rs", "rank": 99, "score": 33818.859438128806 } ]
Rust
src/liboak/back/context.rs
ptal/oak
667c625096540600b6c6ac0943126ca092a9f80e
pub use back::continuation::*; use back::name_factory::*; use back::compiler::ExprCompilerFn; use back::compiler::rtype::*; use back::compiler::{recognizer_compiler, parser_compiler}; use back::compiler::value::*; use quote::quote; use syn::parse_quote; pub struct Context<'a> { grammar: &'a TGrammar, closures: Vec<syn::Stmt>, name_factory: NameFactory, free_variables: Vec<Ident>, mark_variables: Vec<Ident>, mut_ref_free_variables: Vec<(Ident, syn::Type)>, num_combinators_compiled: usize } impl<'a> Context<'a> { pub fn new(grammar: &'a TGrammar) -> Self { Context { grammar: grammar, closures: vec![], name_factory: NameFactory::new(), free_variables: vec![], mark_variables: vec![], mut_ref_free_variables: vec![], num_combinators_compiled: 0 } } pub fn into_recognizer_function(self, body: syn::Expr, rule: Rule) -> syn::Item { let recognizer_fn = recognizer_id(rule.ident()); self.function(recognizer_fn, true, body, parse_quote!(())) } pub fn into_parser_alias(self, rule: Rule) -> syn::Item { let id = rule.ident(); let recognizer_fn = recognizer_name(parse_quote!(#id)); let parser_fn = parser_id(id); self.function(parser_fn, false, parse_quote!(#recognizer_fn(state)), parse_quote!(())) } pub fn into_parser_function(self, body: syn::Expr, rule: Rule) -> syn::Item { let parser_fn = parser_id(rule.ident()); let ty = TypeCompiler::compile(self.grammar, rule.expr_idx); self.function(parser_fn, true, body, ty) } fn function(self, name: Ident, state_mut: bool, body: syn::Expr, ty: syn::Type) -> syn::Item { let state_param = self.state_param(state_mut); let stream_ty = self.grammar.stream_type(); let generics = self.grammar.stream_generics(); let closures = self.closures; parse_quote!( #[inline] pub fn #name #generics (#state_param) -> oak_runtime::ParseState<#stream_ty, #ty> { #(#closures)* #body } ) } fn state_param(&self, state_mut: bool) -> syn::FnArg { let mut_kw = if state_mut { Some(quote!(mut)) } else { None }; let ps_ty = self.parse_state_ty(); parse_quote!(#mut_kw state: #ps_ty) } fn parse_state_ty(&self) -> syn::Type { let stream_ty = self.grammar.stream_type(); parse_quote!(oak_runtime::ParseState<#stream_ty, ()>) } pub fn compile(&mut self, compiler: ExprCompilerFn, idx: usize, success: syn::Expr, failure: syn::Expr) -> syn::Expr { let compiler = compiler(&self.grammar, idx); compiler.compile_expr(self, Continuation::new(success, failure)) } pub fn compile_success(&mut self, compiler: ExprCompilerFn, idx: usize, success: syn::Expr, failure: syn::Expr) -> syn::Expr { let expr = self.compile(compiler, idx, success, failure); self.num_combinators_compiled += 1; expr } pub fn compile_recognizer_expr(&mut self, idx: usize) -> syn::Expr { Continuation::new( parse_quote!(state), parse_quote!(state.failure()) ) .compile_success(self, recognizer_compiler, idx) .unwrap_success() } pub fn value_constructor<F>(&mut self, expr_idx: usize, value_ty: syn::Type, value_constructor: F) -> (syn::Expr, Ident) where F: FnOnce(Ident, syn::Expr) -> syn::Expr, { let result_var = self.next_free_var(); let scope = self.open_scope(expr_idx); self.push_mut_ref_fv(result_var.clone(), value_ty); let result_value = tuple_value(self.free_variables()); let body = Continuation::new( value_constructor(result_var.clone(), result_value), parse_quote!(state.failure()) ) .compile_success(self, parser_compiler, expr_idx) .unwrap_success(); self.close_scope(scope); (body, result_var) } pub fn do_not_duplicate_success(&self) -> bool { self.num_combinators_compiled > 0 } pub fn success_as_closure(&mut self, continuation: Continuation) -> Continuation { if self.do_not_duplicate_success() { self.num_combinators_compiled = 0; let closure_name = self.name_factory.next_closure_name(); let args = self.closure_args(); let params = self.closure_params(); continuation.map_success(|success, _| { self.closures.push(parse_quote!(let #closure_name = |#(#params),*| #success;)); parse_quote!(#closure_name(#(#args),*)) }) } else { continuation } } fn closure_params(&self) -> Vec<syn::FnArg> { let stream_ty = self.grammar.stream_type(); vec![self.state_param(true)] .into_iter() .chain(self.mut_ref_free_variables .iter().cloned() .map(|(var, ty)| parse_quote!(#var: &mut #ty))) .chain(self.free_variables .iter() .map(|var| parse_quote!(#var:_))) .chain(self.mark_variables .iter() .map(|var| parse_quote!(#var: #stream_ty))) .collect() } fn closure_args(&self) -> Vec<syn::Expr> { vec![parse_quote!(state)] .into_iter() .chain(self.mut_ref_free_variables .iter().cloned() .map(|(var, _)| parse_quote!(&mut #var))) .chain(self.free_variables .iter() .map(|var| parse_quote!(#var))) .chain(self.mark_variables .iter() .map(|var| parse_quote!(#var.clone()))) .collect() } pub fn next_mark_name(&mut self) -> Ident { self.name_factory.next_mark_name() } pub fn next_counter_name(&mut self) -> Ident { self.name_factory.next_counter_name() } pub fn next_branch_failed_name(&mut self) -> Ident { self.name_factory.next_branch_failed_name() } pub fn next_free_var(&mut self) -> Ident { self.free_variables.pop().expect("Free variables are all bound.") } pub fn next_free_var_skip(&mut self, expr_idx: usize) -> Ident { let card = self.expr_cardinality(expr_idx); let len_fv = self.free_variables.len(); self.free_variables.remove(len_fv-1-card) } pub fn push_mark(&mut self, mark: Ident) { self.mark_variables.push(mark); } pub fn pop_mark(&mut self) { self.mark_variables.pop(); } pub fn free_variables(&self) -> Vec<Ident> { self.free_variables.clone() } pub fn push_mut_ref_fv(&mut self, mut_ref_var: Ident, mut_ref_ty: syn::Type) { self.mut_ref_free_variables.push((mut_ref_var,mut_ref_ty)); } pub fn pop_mut_ref_fv(&mut self) { self.mut_ref_free_variables.pop() .expect("There is no mut ref free variables."); } pub fn expr_cardinality(&self, expr_idx: usize) -> usize { self.grammar[expr_idx].type_cardinality() } pub fn has_unit_type(&self, expr_idx: usize) -> bool { self.grammar[expr_idx].ty == crate::middle::typing::ast::Type::Unit } pub fn open_scope(&mut self, expr_idx: usize) -> Scope { let scope = self.save_scope(); self.num_combinators_compiled = 0; self.mut_ref_free_variables = vec![]; let cardinality = self.expr_cardinality(expr_idx); let free_vars = self.name_factory.fresh_vars(cardinality); self.free_variables = free_vars; scope } pub fn close_scope(&mut self, scope: Scope) { assert!(self.free_variables.is_empty(), "Try to close the scope but all free variables have not been bounded."); self.restore_scope(scope); } pub fn save_scope(&self) -> Scope { Scope::new( self.num_combinators_compiled, self.free_variables.clone(), self.mut_ref_free_variables.clone() ) } pub fn restore_scope(&mut self, scope: Scope) { self.num_combinators_compiled = scope.num_combinators_compiled; self.mut_ref_free_variables = scope.mut_ref_free_variables; self.free_variables = scope.free_variables; } } #[derive(Clone)] pub struct Scope { num_combinators_compiled: usize, free_variables: Vec<Ident>, mut_ref_free_variables: Vec<(Ident, syn::Type)> } impl Scope { fn new(n: usize, fv: Vec<Ident>, mfv: Vec<(Ident, syn::Type)>) -> Self { Scope { num_combinators_compiled: n, free_variables: fv, mut_ref_free_variables: mfv } } }
pub use back::continuation::*; use back::name_factory::*; use back::compiler::ExprCompilerFn; use back::compiler::rtype::*; use back::compiler::{recognizer_compiler, parser_compiler}; use back::compiler::value::*; use quote::quote; use syn::parse_quote; pub struct Context<'a> { grammar: &'a TGrammar, closures: Vec<syn::Stmt>, name_factory: NameFactory, free_variables: Vec<Ident>, mark_variables: Vec<Ident>, mut_ref_free_variables: Vec<(Ident, syn::Type)>, num_combinators_compiled: usize } impl<'a> Context<'a> { pub fn new(grammar: &'a TGrammar) -> Self { Context { grammar: grammar, closures: vec![], name_factory: NameFactory::new(), free_variables: vec![], mark_variables: vec![], mut_ref_free_variables: vec![], num_combinators_compiled: 0 } } pub fn into_recognizer_function(self, body: syn::Expr, rule: Rule) -> syn::Item { let recognizer_fn = recognizer_id(rule.ident()); self.function(recognizer_fn, true, body, parse_quote!(())) } pub fn into_parser_alias(self, rule: Rule) -> syn::Item { let id = rule.ident(); let recognizer_fn = recognizer_name(parse_quote!(#id)); let parser_fn = parser_id(id); self.function(parser_fn, false, parse_quote!(#recognizer_fn(state)), parse_quote!(())) } pub fn into_parser_function(self, body: syn::Expr, rule: Rule) -> syn::Item { let parser_fn = parser_id(rule.ident()); let ty = TypeCompiler::compile(self.grammar, rule.expr_idx); self.function(parser_fn, true, body, ty) } fn function(self, name: Ident, state_mut: bool, body: syn::Expr, ty: syn::Type) -> syn::Item { let state_param = self.state_param(state_mut); let stream_ty = self.grammar.stream_type(); let generics = self.grammar.stream_generics(); let closures = self.closures; parse_quote!( #[inline] pub fn #name #generics (#state_param) -> oak_runtime::ParseState<#stream_ty, #ty> { #(#closures)* #body } ) } fn state_param(&self, state_mut: bool) -> syn::FnArg { let mut_kw = if state_mut { Some(quote!(mut)) } else { None }; let ps_ty = self.parse_state_ty(); parse_quote!(#mut_kw state: #ps_ty) } fn parse_state_ty(&self) -> syn::Type { let stream_ty = self.grammar.stream_type(); parse_quote!(oak_runtime::ParseState<#stream_ty, ()>) } pub fn compile(&mut self, compiler: ExprCompilerFn, idx: usize, success: syn::Expr, failure: syn::Expr) -> syn::Expr { let compiler = compiler(&self.grammar, idx); compiler.compile_expr(self, Continuation::new(success, failure)) } pub fn compile_success(&mut self, compiler: ExprCompilerFn, idx: usize, success: syn::Expr, failure: syn::Expr) -> syn::Expr { let expr = self.compile(compiler, idx, success, failure); self.num_combinators_compiled += 1; expr } pub fn compile_recognizer_expr(&mut self, idx: usize) -> syn::Expr { Continuation::new( parse_quote!(state), parse_quote!(state.failure()) ) .compile_success(self, recognizer_compiler, idx) .unwrap_success() } pub fn value_constructor<F>(&mut self, expr_idx: usize, value_ty: syn::Type, value_constructor: F) -> (syn::Expr, Ident) where F: FnOnce(Ident, syn::Expr) -> syn::Expr, { let result_var = self.next_free_var(); let scope = self.open_scope(expr_idx);
pub fn do_not_duplicate_success(&self) -> bool { self.num_combinators_compiled > 0 } pub fn success_as_closure(&mut self, continuation: Continuation) -> Continuation { if self.do_not_duplicate_success() { self.num_combinators_compiled = 0; let closure_name = self.name_factory.next_closure_name(); let args = self.closure_args(); let params = self.closure_params(); continuation.map_success(|success, _| { self.closures.push(parse_quote!(let #closure_name = |#(#params),*| #success;)); parse_quote!(#closure_name(#(#args),*)) }) } else { continuation } } fn closure_params(&self) -> Vec<syn::FnArg> { let stream_ty = self.grammar.stream_type(); vec![self.state_param(true)] .into_iter() .chain(self.mut_ref_free_variables .iter().cloned() .map(|(var, ty)| parse_quote!(#var: &mut #ty))) .chain(self.free_variables .iter() .map(|var| parse_quote!(#var:_))) .chain(self.mark_variables .iter() .map(|var| parse_quote!(#var: #stream_ty))) .collect() } fn closure_args(&self) -> Vec<syn::Expr> { vec![parse_quote!(state)] .into_iter() .chain(self.mut_ref_free_variables .iter().cloned() .map(|(var, _)| parse_quote!(&mut #var))) .chain(self.free_variables .iter() .map(|var| parse_quote!(#var))) .chain(self.mark_variables .iter() .map(|var| parse_quote!(#var.clone()))) .collect() } pub fn next_mark_name(&mut self) -> Ident { self.name_factory.next_mark_name() } pub fn next_counter_name(&mut self) -> Ident { self.name_factory.next_counter_name() } pub fn next_branch_failed_name(&mut self) -> Ident { self.name_factory.next_branch_failed_name() } pub fn next_free_var(&mut self) -> Ident { self.free_variables.pop().expect("Free variables are all bound.") } pub fn next_free_var_skip(&mut self, expr_idx: usize) -> Ident { let card = self.expr_cardinality(expr_idx); let len_fv = self.free_variables.len(); self.free_variables.remove(len_fv-1-card) } pub fn push_mark(&mut self, mark: Ident) { self.mark_variables.push(mark); } pub fn pop_mark(&mut self) { self.mark_variables.pop(); } pub fn free_variables(&self) -> Vec<Ident> { self.free_variables.clone() } pub fn push_mut_ref_fv(&mut self, mut_ref_var: Ident, mut_ref_ty: syn::Type) { self.mut_ref_free_variables.push((mut_ref_var,mut_ref_ty)); } pub fn pop_mut_ref_fv(&mut self) { self.mut_ref_free_variables.pop() .expect("There is no mut ref free variables."); } pub fn expr_cardinality(&self, expr_idx: usize) -> usize { self.grammar[expr_idx].type_cardinality() } pub fn has_unit_type(&self, expr_idx: usize) -> bool { self.grammar[expr_idx].ty == crate::middle::typing::ast::Type::Unit } pub fn open_scope(&mut self, expr_idx: usize) -> Scope { let scope = self.save_scope(); self.num_combinators_compiled = 0; self.mut_ref_free_variables = vec![]; let cardinality = self.expr_cardinality(expr_idx); let free_vars = self.name_factory.fresh_vars(cardinality); self.free_variables = free_vars; scope } pub fn close_scope(&mut self, scope: Scope) { assert!(self.free_variables.is_empty(), "Try to close the scope but all free variables have not been bounded."); self.restore_scope(scope); } pub fn save_scope(&self) -> Scope { Scope::new( self.num_combinators_compiled, self.free_variables.clone(), self.mut_ref_free_variables.clone() ) } pub fn restore_scope(&mut self, scope: Scope) { self.num_combinators_compiled = scope.num_combinators_compiled; self.mut_ref_free_variables = scope.mut_ref_free_variables; self.free_variables = scope.free_variables; } } #[derive(Clone)] pub struct Scope { num_combinators_compiled: usize, free_variables: Vec<Ident>, mut_ref_free_variables: Vec<(Ident, syn::Type)> } impl Scope { fn new(n: usize, fv: Vec<Ident>, mfv: Vec<(Ident, syn::Type)>) -> Self { Scope { num_combinators_compiled: n, free_variables: fv, mut_ref_free_variables: mfv } } }
self.push_mut_ref_fv(result_var.clone(), value_ty); let result_value = tuple_value(self.free_variables()); let body = Continuation::new( value_constructor(result_var.clone(), result_value), parse_quote!(state.failure()) ) .compile_success(self, parser_compiler, expr_idx) .unwrap_success(); self.close_scope(scope); (body, result_var) }
function_block-function_prefix_line
[ { "content": "pub fn recognizer_compiler(grammar: &TGrammar, idx: usize) -> Box<dyn CompileExpr> {\n\n match grammar.expr_by_index(idx) {\n\n StrLiteral(lit) => Box::new(StrLiteralCompiler::recognizer(lit)),\n\n CharacterClass(classes) => Box::new(CharacterClassCompiler::recognizer(classes)),\n\n AnySingleChar => Box::new(AnySingleCharCompiler::recognizer()),\n\n Sequence(seq) => Box::new(SequenceCompiler::recognizer(seq)),\n\n Choice(choices) => Box::new(ChoiceCompiler::recognizer(choices)),\n\n ZeroOrOne(expr_idx) => Box::new(OptionalCompiler::recognizer(expr_idx)),\n\n ZeroOrMore(expr_idx) => Box::new(RepeatCompiler::recognizer(expr_idx, 0)),\n\n OneOrMore(expr_idx) => Box::new(RepeatCompiler::recognizer(expr_idx, 1)),\n\n NotPredicate(expr_idx) => Box::new(SyntacticPredicateCompiler::recognizer(expr_idx, Kind::Not)),\n\n AndPredicate(expr_idx) => Box::new(SyntacticPredicateCompiler::recognizer(expr_idx, Kind::And)),\n\n NonTerminalSymbol(id) => Box::new(NonTerminalCompiler::recognizer(id)),\n\n ExternalNonTerminalSymbol(path) => Box::new(NonTerminalCompiler::external_recognizer(path)),\n\n SemanticAction(expr_idx, _, _)\n\n | TypeAscription(expr_idx, _)\n\n | SpannedExpr(expr_idx)\n\n | RangeExpr(expr_idx) => recognizer_compiler(grammar, expr_idx),\n\n }\n\n}\n", "file_path": "src/liboak/back/compiler/mod.rs", "rank": 0, "score": 270654.4618237379 }, { "content": "pub fn parser_compiler(grammar: &TGrammar, idx: usize) -> Box<dyn CompileExpr> {\n\n if grammar[idx].ty == Type::Unit {\n\n recognizer_compiler(grammar, idx)\n\n }\n\n else {\n\n match grammar.expr_by_index(idx) {\n\n StrLiteral(lit) => Box::new(StrLiteralCompiler::parser(lit)),\n\n CharacterClass(classes) => Box::new(CharacterClassCompiler::parser(classes)),\n\n AnySingleChar => Box::new(AnySingleCharCompiler::parser()),\n\n Sequence(seq) => Box::new(SequenceCompiler::parser(seq)),\n\n Choice(choices) => Box::new(ChoiceCompiler::parser(choices)),\n\n ZeroOrOne(expr_idx) => Box::new(OptionalCompiler::parser(expr_idx)),\n\n ZeroOrMore(expr_idx) => Box::new(RepeatCompiler::parser(expr_idx, 0)),\n\n OneOrMore(expr_idx) => Box::new(RepeatCompiler::parser(expr_idx, 1)),\n\n NonTerminalSymbol(id) => Box::new(NonTerminalCompiler::parser(id, idx)),\n\n ExternalNonTerminalSymbol(path) => Box::new(NonTerminalCompiler::external_parser(path, idx)),\n\n SemanticAction(expr_idx, boxed, action) => Box::new(SemanticActionCompiler::parser(expr_idx, boxed, action, idx)),\n\n TypeAscription(expr_idx, _) => parser_compiler(grammar, expr_idx),\n\n SpannedExpr(expr_idx) => Box::new(SpannedExprCompiler::parser(expr_idx, false)),\n\n RangeExpr(expr_idx) => Box::new(SpannedExprCompiler::parser(expr_idx, true)),\n\n NotPredicate(_)\n\n | AndPredicate(_) => unreachable!(\n\n \"BUG: Syntactic predicate can not be compiled to parser (they do not generate data).\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/liboak/back/compiler/mod.rs", "rank": 1, "score": 270654.46182373783 }, { "content": "pub fn tuple_value(vars_names: Vec<Ident>) -> syn::Expr\n\n{\n\n parse_quote!((#(#vars_names),*))\n\n}\n\n\n", "file_path": "src/liboak/back/compiler/value.rs", "rank": 2, "score": 267789.4891618263 }, { "content": "pub fn parser_id(id: Ident) -> Ident {\n\n format_ident!(\"parse_{}\", id)\n\n}\n\n\n", "file_path": "src/liboak/back/name_factory.rs", "rank": 3, "score": 246501.06542328562 }, { "content": "pub fn recognizer_id(id: Ident) -> Ident {\n\n format_ident!(\"recognize_{}\", id)\n\n}\n\n\n\npub struct NameFactory\n\n{\n\n prefix_uid: usize,\n\n mark_uid: usize,\n\n branch_failed_uid: usize,\n\n counter_uid: usize,\n\n closure_uid: usize\n\n}\n\n\n\nimpl NameFactory\n\n{\n\n pub fn new() -> NameFactory {\n\n NameFactory {\n\n prefix_uid: 1,\n\n mark_uid: 0,\n\n branch_failed_uid: 0,\n", "file_path": "src/liboak/back/name_factory.rs", "rank": 4, "score": 246501.06542328562 }, { "content": "pub fn rule_duplicate(mut grammar: AGrammar, rules: Vec<Rule>) -> Partial<AGrammar>\n\n{\n\n DuplicateItem::analyse(rules.into_iter(), String::from(\"rule\"))\n\n .map(move |rules| {\n\n grammar.rules = rules.into_iter().map(|x| x.1).collect();\n\n grammar\n\n })\n\n}\n\n\n", "file_path": "src/liboak/middle/analysis/duplicate.rs", "rank": 5, "score": 227650.20025361545 }, { "content": "pub fn tuple_pattern(vars_names: Vec<Ident>) -> syn::Pat\n\n{\n\n parse_quote!((#(#vars_names),*))\n\n}\n", "file_path": "src/liboak/back/compiler/value.rs", "rank": 6, "score": 223405.09523270663 }, { "content": "pub fn compile(grammar: TGrammar) -> proc_macro2::TokenStream\n\n{\n\n compiler::GrammarCompiler::compile(grammar)\n\n}\n", "file_path": "src/liboak/back/mod.rs", "rank": 7, "score": 187020.746954031 }, { "content": "pub fn display_path_cycle(path: &Vec<Ident>) -> String {\n\n let mut path_desc = String::new();\n\n for rule in path {\n\n path_desc.extend(format!(\"{} -> \", rule).chars());\n\n }\n\n path_desc.extend(format!(\"{}\", path[0]).chars());\n\n path_desc\n\n}\n", "file_path": "src/liboak/ast.rs", "rank": 8, "score": 184962.91600232763 }, { "content": "pub fn walk_exprs<R: Default, V: ?Sized>(visitor: &mut V, exprs: Vec<usize>) -> Vec<R> where\n\n V: Visitor<R>\n\n{\n\n exprs.into_iter().map(|expr| visitor.visit_expr(expr)).collect()\n\n}\n", "file_path": "src/liboak/visitor.rs", "rank": 9, "score": 179574.87699311366 }, { "content": "pub fn typecheck(fgrammar: FGrammar) -> TGrammar {\n\n Partial::Value(fgrammar)\n\n .and_then(|grammar| at_least_one_rule_declared(grammar))\n\n .and_then(|grammar| analysis::analyse(grammar))\n\n .ensure(\"aborting due to previous error (analysis phase).\")\n\n .and_then(|grammar| extract_stream_type(grammar))\n\n .and_then(|grammar| typing::type_inference(grammar))\n\n .expect(\"aborting due to previous error (typing phase).\")\n\n}\n\n\n", "file_path": "src/liboak/middle/mod.rs", "rank": 10, "score": 178800.79750795942 }, { "content": "fn bind_var<'a>(context: &mut Context<'a>) -> Ident {\n\n context.next_free_var()\n\n}\n\n\n\npub struct CharacterClassCompiler\n\n{\n\n classes: CharacterClassExpr,\n\n bounded_var: VarInPatternFn\n\n}\n\n\n\nimpl CharacterClassCompiler\n\n{\n\n pub fn recognizer(classes: CharacterClassExpr) -> CharacterClassCompiler {\n\n CharacterClassCompiler {\n\n classes: classes,\n\n bounded_var: bind_x_var\n\n }\n\n }\n\n\n\n pub fn parser(classes: CharacterClassExpr) -> CharacterClassCompiler {\n", "file_path": "src/liboak/back/compiler/character_class.rs", "rank": 11, "score": 178492.6282373792 }, { "content": "fn test_state<'a>(state: ParseState<StrStream<'a>, Expr>)\n\n{\n\n let data = state.unwrap_data();\n\n assert_eq!(data.c2, 'b');\n\n assert_eq!(data.c3, 'c');\n\n assert_eq!(data.full_sp, make_span(0, 4));\n\n assert_eq!(data.span_a, make_span(0, 1));\n\n assert_eq!(data.c3_sp, make_span(2, 3));\n\n}\n\n\n", "file_path": "tests/grammars/stream_span.rs", "rank": 12, "score": 177517.55303144874 }, { "content": "fn bind_x_var<'a>(_context: &mut Context<'a>) -> Ident {\n\n format_ident!(\"x\")\n\n}\n\n\n", "file_path": "src/liboak/back/compiler/character_class.rs", "rank": 13, "score": 171202.2849171383 }, { "content": "fn merge_grammar_attr(grammar: &mut AGrammar, ident: &Ident) {\n\n match &*ident.to_string() {\n\n \"debug_typing\" => {\n\n grammar.merge_print_typing(PrintLevel::Debug);\n\n },\n\n \"show_typing\" => {\n\n grammar.merge_print_typing(PrintLevel::Show);\n\n },\n\n _ => {\n\n warn_ignore_attr(ident.span());\n\n }\n\n }\n\n}\n", "file_path": "src/liboak/middle/analysis/attribute.rs", "rank": 14, "score": 166791.88478374598 }, { "content": "type VarInPatternFn = for <'a> fn(&mut Context<'a>) -> Ident;\n\n\n", "file_path": "src/liboak/back/compiler/character_class.rs", "rank": 15, "score": 165430.92688930276 }, { "content": "pub fn rust_functions_duplicate(mut grammar: AGrammar, items: Vec<syn::Item>) -> Partial<AGrammar>\n\n{\n\n let mut functions = vec![];\n\n let mut others = vec![];\n\n for item in items {\n\n if let syn::Item::Fn(fun) = item {\n\n functions.push(fun);\n\n }\n\n else {\n\n others.push(item);\n\n }\n\n }\n\n DuplicateItem::analyse(functions.into_iter(), String::from(\"rust function\"))\n\n .map(move |functions| {\n\n grammar.rust_functions = functions.into_iter().collect();\n\n grammar.rust_items = others;\n\n grammar\n\n })\n\n}\n\n\n", "file_path": "src/liboak/middle/analysis/duplicate.rs", "rank": 16, "score": 163624.8575552098 }, { "content": "pub fn print_debug(grammar: &IGrammar) {\n\n for i in 0..grammar.exprs.len() {\n\n println!(\"{}: {:?}: {:?}\", i, grammar.exprs[i], grammar.exprs_info[i].ty);\n\n }\n\n}\n", "file_path": "src/liboak/middle/typing/typing_printer.rs", "rank": 17, "score": 153421.79983331944 }, { "content": "pub fn analyse(fgrammar: FGrammar) -> Partial<AGrammar> {\n\n let grammar = AGrammar::new(fgrammar.start_span, fgrammar.exprs, fgrammar.exprs_info);\n\n let frust_items = fgrammar.rust_items;\n\n let fattributes = fgrammar.attributes;\n\n rule_duplicate(grammar, fgrammar.rules)\n\n .and_then(|grammar| rust_functions_duplicate(grammar, frust_items))\n\n .and_then(|grammar| ResolveNonTerminal::resolve(grammar))\n\n .and_then(|grammar| WellFormedness::analyse(grammar))\n\n .and_then(|grammar| UselessChaining::analyse(grammar))\n\n // .and_then(|grammar| UnreachableRule::analyse(grammar)) // This analysis must be reviewed and fixed.\n\n .and_then(|grammar| decorate_with_attributes(grammar, fattributes))\n\n}\n", "file_path": "src/liboak/middle/analysis/mod.rs", "rank": 18, "score": 153231.3701824965 }, { "content": "pub fn decorate_with_attributes(mut grammar: AGrammar,\n\n attributes: Vec<syn::Attribute>) -> Partial<AGrammar>\n\n{\n\n merge_grammar_attributes(&mut grammar, attributes);\n\n Partial::Value(grammar)\n\n}\n\n\n", "file_path": "src/liboak/middle/analysis/attribute.rs", "rank": 19, "score": 149655.6404443946 }, { "content": "pub fn recognizer_name(rule_path: syn::Path) -> syn::Path {\n\n modify_path(rule_path, \"recognize\")\n\n}\n\n\n", "file_path": "src/liboak/back/name_factory.rs", "rank": 20, "score": 146258.13967424873 }, { "content": "pub fn parser_name(rule_path: syn::Path) -> syn::Path {\n\n modify_path(rule_path, \"parse\")\n\n}\n\n\n", "file_path": "src/liboak/back/name_factory.rs", "rank": 21, "score": 146258.13967424873 }, { "content": "fn at_least_one_rule_declared(fgrammar: FGrammar) -> Partial<FGrammar> {\n\n if fgrammar.rules.len() == 0 {\n\n fgrammar.start_span.unstable()\n\n .error(\"At least one rule must be declared.\")\n\n .emit();\n\n Partial::Nothing\n\n } else {\n\n Partial::Value(fgrammar)\n\n }\n\n}\n\n\n", "file_path": "src/liboak/middle/mod.rs", "rank": 22, "score": 142009.83288929943 }, { "content": "pub fn type_inference(agrammar: AGrammar) -> Partial<TGrammar> {\n\n let grammar = IGrammar::from(agrammar);\n\n Depth::infer(grammar)\n\n}\n", "file_path": "src/liboak/middle/typing/mod.rs", "rank": 23, "score": 141933.97675147565 }, { "content": "fn merge_grammar_attributes(grammar: &mut AGrammar, attrs: Vec<syn::Attribute>) {\n\n for attr in attrs {\n\n if let Some(ident) = attr.path.get_ident() {\n\n merge_grammar_attr(grammar, ident);\n\n }\n\n else {\n\n warn_ignore_attr(attr.span());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/liboak/middle/analysis/attribute.rs", "rank": 24, "score": 139341.54957702348 }, { "content": "fn test_state_expr3<'a>(state: ParseState<StrStream<'a>, Span>) {\n\n assert_eq!(state.unwrap_data(), make_span(0,2))\n\n}\n\n\n", "file_path": "tests/grammars/stream_span.rs", "rank": 25, "score": 129374.9436155684 }, { "content": "fn bind_value<'a>(context: &mut Context<'a>) -> syn::Pat {\n\n let var = context.next_free_var();\n\n parse_quote!(#var)\n\n}\n\n\n\npub struct AnySingleCharCompiler\n\n{\n\n matched_pattern: MatchPatternFn\n\n}\n\n\n\nimpl AnySingleCharCompiler\n\n{\n\n pub fn recognizer() -> AnySingleCharCompiler {\n\n AnySingleCharCompiler {\n\n matched_pattern: ignore_value\n\n }\n\n }\n\n\n\n pub fn parser() -> AnySingleCharCompiler {\n\n AnySingleCharCompiler {\n", "file_path": "src/liboak/back/compiler/any_single_char.rs", "rank": 26, "score": 128874.04918443385 }, { "content": "pub fn make_span(lo: usize, hi: usize) -> Span {\n\n mk_sp(\n\n BytePos(lo as u32),\n\n BytePos(hi as u32))\n\n}\n", "file_path": "runtime/src/lib.rs", "rank": 27, "score": 127296.08693489376 }, { "content": "pub trait CompileExpr\n\n{\n\n fn compile_expr<'a>(&self, context: &mut Context<'a>, cont: Continuation) -> syn::Expr;\n\n}\n\n\n\npub type ExprCompilerFn = fn(&TGrammar, usize) -> Box<dyn CompileExpr>;\n\n\n", "file_path": "src/liboak/back/compiler/mod.rs", "rank": 28, "score": 125837.02114365458 }, { "content": "pub fn walk_expr<R: Default, V: ?Sized>(visitor: &mut V, this: usize) -> R where\n\n V: Visitor<R>\n\n{\n\n match visitor.expr_by_index(this) {\n\n StrLiteral(lit) => {\n\n visitor.visit_str_literal(this, lit)\n\n }\n\n AnySingleChar => {\n\n visitor.visit_any_single_char(this)\n\n }\n\n NonTerminalSymbol(rule) => {\n\n visitor.visit_non_terminal_symbol(this, &rule)\n\n }\n\n ExternalNonTerminalSymbol(rule) => {\n\n visitor.visit_external_non_terminal_symbol(this, &rule)\n\n }\n\n Sequence(seq) => {\n\n visitor.visit_sequence(this, seq)\n\n }\n\n Choice(choices) => {\n", "file_path": "src/liboak/visitor.rs", "rank": 29, "score": 125302.1788690517 }, { "content": "fn ignore_value<'a>(_context: &mut Context<'a>) -> syn::Pat {\n\n parse_quote!(_)\n\n}\n\n\n", "file_path": "src/liboak/back/compiler/any_single_char.rs", "rank": 30, "score": 121592.07758487698 }, { "content": "type MatchPatternFn = for <'a> fn(&mut Context<'a>) -> syn::Pat;\n\n\n", "file_path": "src/liboak/back/compiler/any_single_char.rs", "rank": 31, "score": 117860.86917618374 }, { "content": "/// Modify the default Stream type in the grammar if the user redefined it in its item list.\n\nfn extract_stream_type(mut grammar: AGrammar)\n\n -> Partial<AGrammar>\n\n{\n\n let mut stream_redefined = false;\n\n {\n\n let stream_alias =\n\n grammar.rust_items.iter().find_map(|item| {\n\n match item {\n\n &syn::Item::Type(ref ty) => {\n\n if ty.ident.to_string() == \"Stream\" {\n\n Some(ty.clone())\n\n }\n\n else { None }\n\n }\n\n _ => None\n\n }\n\n });\n\n\n\n if let Some(ty) = stream_alias {\n\n grammar.stream_alias = ty;\n\n stream_redefined = true;\n\n }\n\n }\n\n if !stream_redefined {\n\n grammar.rust_items.push(syn::Item::Type(grammar.stream_alias.clone()));\n\n }\n\n Partial::Value(grammar)\n\n}\n", "file_path": "src/liboak/middle/mod.rs", "rank": 32, "score": 107467.80895501985 }, { "content": "struct GrammarInfo\n\n{\n\n name: String,\n\n bulk_file: Option<String>,\n\n recognizer: RecognizerFn\n\n}\n\n\n\nimpl GrammarInfo\n\n{\n\n fn new(name: &str, bulk_file: Option<String>, recognizer: RecognizerFn) -> GrammarInfo {\n\n GrammarInfo {\n\n name: String::from(name),\n\n bulk_file: bulk_file,\n\n recognizer: recognizer\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/testoak.rs", "rank": 33, "score": 98447.15220884372 }, { "content": "#[proc_macro]\n\npub fn oak(input: TokenStream) -> TokenStream {\n\n let ast = parse_macro_input!(input as front::ast::FGrammar);\n\n // println!(\"parsing successful!\");\n\n let tast = middle::typecheck(ast);\n\n // println!(\"typing successful!\");\n\n proc_macro::TokenStream::from(back::compile(tast))\n\n}\n", "file_path": "src/liboak/lib.rs", "rank": 34, "score": 93836.15114467009 }, { "content": "pub trait IntoState<S, T>\n\n{\n\n fn into_state(self) -> ParseState<S, T>;\n\n}\n\n\n\nimpl<S, T, R> IntoState<S, T> for R where\n\n R: Stream<Output=S>,\n\n S: Ord + Clone + HasNext\n\n{\n\n fn into_state(self) -> ParseState<S, T> {\n\n ParseState::new(self.stream())\n\n }\n\n}\n\n\n\npub struct ParseExpectation<S>\n\n{\n\n expected: HashSet<&'static str>,\n\n farthest_read: S\n\n}\n\n\n", "file_path": "runtime/src/parse_state.rs", "rank": 35, "score": 92124.83074008595 }, { "content": "struct Occurence\n\n{\n\n choice: Vec<Vec<(Character,Pattern)>>\n\n}\n\n\n\nimpl Occurence\n\n{\n\n fn push_sequence(&self, other: Occurence) -> Occurence{\n\n let mut res = vec![];\n\n for choice1 in other.choice.clone(){\n\n for choice2 in self.choice.clone(){\n\n let mut tmp = choice2.clone();\n\n for couple in choice1.clone(){\n\n tmp.push(couple)\n\n }\n\n res.push(tmp)\n\n }\n\n }\n\n Occurence{\n\n choice: res\n", "file_path": "src/liboak/middle/analysis/unreachable_rule.rs", "rank": 36, "score": 92036.5623905948 }, { "content": "pub trait ItemIdent\n\n{\n\n fn ident(&self) -> Ident;\n\n}\n\n\n\nimpl ItemIdent for syn::Item\n\n{\n\n fn ident(&self) -> Ident {\n\n use syn::Item::*;\n\n match self {\n\n Const(item) => item.ident.clone(),\n\n Enum(item) => item.ident.clone(),\n\n ExternCrate(item) => item.ident.clone(),\n\n Fn(item) => item.sig.ident.clone(),\n\n ForeignMod(_) => panic!(\"[bug] `ForeignMod` has no identifier (please report this issue).\"),\n\n Impl(_) => panic!(\"[bug] `Impl` has no identifier (please report this issue).\"),\n\n Macro(_) => panic!(\"[bug] `Macro` has no identifier (please report this issue).\"),\n\n Macro2(item) => item.ident.clone(),\n\n Mod(item) => item.ident.clone(),\n\n Static(item) => item.ident.clone(),\n", "file_path": "src/liboak/identifier.rs", "rank": 37, "score": 89582.85091454734 }, { "content": "pub trait ExprByIndex\n\n{\n\n fn expr_by_index(&self, index: usize) -> Expression;\n\n}\n\n\n\npub struct Grammar<ExprInfo>\n\n{\n\n pub start_span: Span,\n\n pub rules: Vec<Rule>,\n\n pub exprs: Vec<Expression>,\n\n pub exprs_info: Vec<ExprInfo>,\n\n pub stream_alias: syn::ItemType,\n\n pub rust_functions: HashMap<Ident, syn::ItemFn>,\n\n pub rust_items: Vec<syn::Item>,\n\n pub attributes: GrammarAttributes\n\n}\n\n\n\nimpl<ExprInfo> Grammar<ExprInfo>\n\n{\n\n pub fn new(start_span: Span, exprs: Vec<Expression>,\n", "file_path": "src/liboak/ast.rs", "rank": 38, "score": 89559.19534923098 }, { "content": "#[test]\n\nfn test_stream_span() {\n\n let state = parse_expr(\"abcb\".into_state());\n\n let state2 = parse_expr2(\"abcb\".into_state());\n\n let state3 = parse_expr3(\"ac\".into_state());\n\n let state4 = parse_expr3(\"bc\".into_state());\n\n test_state(state);\n\n test_state(state2);\n\n test_state_expr3(state3);\n\n test_state_expr3(state4);\n\n}\n", "file_path": "tests/grammars/stream_span.rs", "rank": 39, "score": 87011.61161942474 }, { "content": "type RecognizerFn = Box<dyn for<'a> Fn(ParseState<StrStream<'a>, ()>) -> ParseState<StrStream<'a>, ()>>;\n\n\n", "file_path": "tests/testoak.rs", "rank": 40, "score": 83799.67182435535 }, { "content": "pub trait Visitor<R: Default> : ExprByIndex\n\n{\n\n fn visit_expr(&mut self, this: usize) -> R {\n\n walk_expr(self, this)\n\n }\n\n\n\n fn visit_str_literal(&mut self, _this: usize, _lit: String) -> R { R::default() }\n\n fn visit_non_terminal_symbol(&mut self, _this: usize, _rule: &Ident) -> R { R::default() }\n\n fn visit_external_non_terminal_symbol(&mut self, _this: usize, _rule: &syn::Path) -> R { R::default() }\n\n fn visit_atom(&mut self, _this: usize) -> R { R::default() }\n\n\n\n fn visit_any_single_char(&mut self, this: usize) -> R {\n\n self.visit_atom(this)\n\n }\n\n\n\n fn visit_character_class(&mut self, this: usize, _char_class: CharacterClassExpr) -> R {\n\n self.visit_atom(this)\n\n }\n\n\n\n fn visit_spanned_expr(&mut self, _this: usize, child: usize) -> R {\n", "file_path": "src/liboak/visitor.rs", "rank": 41, "score": 78794.33131348396 }, { "content": " / ident\n\n\n\n type_names = spacing type_name (lparen type_names (comma type_names)* rparen)?\n\n\n\n spacing = [\" \\n\\t\"]*:(^)\n\n\n\n ident = (![\"0-9\"] [\"a-zA-Z0-9_\"]+ spacing):(^)\n\n auto_infer_kw = \"_\" spacing\n\n rparen = \")\" spacing\n\n not_eof = !.\n\n comma = \",\" spacing\n\n}\n", "file_path": "tests/grammars/type_name.rs", "rank": 42, "score": 71427.3461874819 }, { "content": "// Copyright 2014 Pierre Talbot (IRCAM)\n\n\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse oak::oak;\n\n\n\noak! {\n\n lparen = \"(\" spacing\n\n\n\n type_name = auto_infer_kw &(lparen / not_eof / comma)\n", "file_path": "tests/grammars/type_name.rs", "rank": 43, "score": 71423.93385265442 }, { "content": "// Copyright 2018 Chao Lin & William Sergeant (Sorbonne University)\n\n\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse oak::oak;\n\n\n\noak! {\n\n // test0 = \"a\"*\n\n // /\"a\"+ // is detected\n\n\n", "file_path": "tests/grammars/unreachable_rule.rs", "rank": 44, "score": 71342.97974068324 }, { "content": " // / \"single_space\" / \"bot\" / \"top\" / \"ref\" / \"module\"\n\n // / \"run\" / \"true\" / \"false\" / \"unknown\" / \"nothing\"\n\n // / \"universe\" / \"suspend\" / \"abort\" / \"java_kw\"\n\n\n\n // test5 = \"a\"*\n\n // /\"a\"? // is detected\n\n\n\n // test6 = \"a\"\n\n // /!\"a\"\n\n //\n\n // test7 = !\"a\"\n\n // /\"a\"\n\n //\n\n // test8 = \"a\" !\"a\"\n\n // /\"a\"\n\n //\n\n // test9 = \"a\"\n\n // /&\"a\"\n\n //\n\n // test10 = &\"a\"\n\n // /\"a\"\n\n //\n\n // test11 = \"b\"\n\n // /!\"a\"\n\n\n\n // test12 = .\n\n // /\"a\" // is detected\n\n}\n", "file_path": "tests/grammars/unreachable_rule.rs", "rank": 45, "score": 71342.96334447364 }, { "content": " // test1 = \"a\"\n\n // /\"ab\"\n\n //\n\n test2 = \"ab\"\n\n / \"a\"\n\n // r1 = [\"a-c\"] -> () / \"b\" -> () // problem of span\n\n //\n\n // test3 = \"a\"\n\n // /\"a\"\n\n //\n\n // test4 = \"abcd\"\n\n // /\"a\" \"bc\"\n\n //\n\n // test4bis = \"a\" \"bc\"\n\n // / \"abcd\"\n\n\n\n // keyword = \"proc\"/\"par\"/\"space\"/\"end\"/\"pre\"\n\n // / \"read\" / \"write\" / \"readwrite\" / \"or\" / \"and\" / \"not\"\n\n // / \"when\" / \"then\" / \"else\" / \"loop\" / \"pause up\"\n\n // / \"pause\" / \"stop\" / \"in\" / \"word_line\" / \"singe_time\"\n", "file_path": "tests/grammars/unreachable_rule.rs", "rank": 46, "score": 71339.36710570341 }, { "content": " let success = parse_quote!(state.success(#vars));\n\n let failure = parse_quote!(state.failure());\n\n let body = context.compile(parser_compiler,\n\n self.expr(), success, failure);\n\n\n\n context.close_scope(scope);\n\n context.into_parser_function(body, self.rule.clone())\n\n }\n\n }\n\n\n\n fn parser_equals_recognizer(&self) -> bool {\n\n self.grammar[self.expr()].ty == Type::Unit\n\n }\n\n\n\n fn expr(&self) -> usize {\n\n self.rule.expr_idx\n\n }\n\n}\n", "file_path": "src/liboak/back/compiler/rule.rs", "rank": 47, "score": 69185.95387367651 }, { "content": " fn compile_recognizer(&self) -> syn::Item {\n\n let mut context = Context::new(self.grammar);\n\n let success = parse_quote!(state.success(()));\n\n let failure = parse_quote!(state.failure());\n\n\n\n let body = context.compile(recognizer_compiler,\n\n self.expr(), success, failure);\n\n\n\n context.into_recognizer_function(body, self.rule.clone())\n\n }\n\n\n\n fn compile_parser(&self) -> syn::Item {\n\n let mut context = Context::new(self.grammar);\n\n if self.parser_equals_recognizer() {\n\n context.into_parser_alias(self.rule.clone())\n\n }\n\n else {\n\n let scope = context.open_scope(self.expr());\n\n let vars = tuple_value(context.free_variables());\n\n\n", "file_path": "src/liboak/back/compiler/rule.rs", "rank": 48, "score": 69172.46505562533 }, { "content": " rule: Rule\n\n}\n\n\n\nimpl<'a> RuleCompiler<'a>\n\n{\n\n pub fn compile(grammar: &'a TGrammar, rule: Rule) -> Vec<syn::Item> {\n\n let compiler = RuleCompiler::new(grammar, rule);\n\n vec![\n\n compiler.compile_recognizer(),\n\n compiler.compile_parser()\n\n ]\n\n }\n\n\n\n fn new(grammar: &'a TGrammar, rule: Rule) -> Self {\n\n RuleCompiler {\n\n grammar: grammar,\n\n rule: rule\n\n }\n\n }\n\n\n", "file_path": "src/liboak/back/compiler/rule.rs", "rank": 49, "score": 69157.84796142713 }, { "content": "// Copyright 2016 Pierre Talbot (IRCAM)\n\n\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse back::compiler::*;\n\nuse back::compiler::value::*;\n\n\n\npub struct RuleCompiler<'a>\n\n{\n\n grammar: &'a TGrammar,\n", "file_path": "src/liboak/back/compiler/rule.rs", "rank": 50, "score": 69148.82031837347 }, { "content": " quote!(\n\n // #![allow(unused_mut)]\n\n #[allow(unused_imports)]\n\n use oak_runtime::stream::*;\n\n #[allow(unused_imports)]\n\n use oak_runtime::str_stream::StrStream;\n\n #[allow(unused_imports)]\n\n use std::ops::Range;\n\n\n\n #(#module_content)*\n\n )\n\n }\n\n\n\n fn compile_mod_content(&self) -> Vec<syn::Item> {\n\n let mut mod_content = self.grammar.rust_items.clone();\n\n mod_content.extend(self.compile_rules().into_iter());\n\n mod_content.extend(self.grammar.rust_functions.values().cloned()\n\n .map(syn::Item::Fn));\n\n mod_content\n\n }\n\n\n\n fn compile_rules(&self) -> Vec<syn::Item> {\n\n self.grammar.rules.iter()\n\n .flat_map(|rule| RuleCompiler::compile(&self.grammar, rule.clone()).into_iter())\n\n .collect()\n\n }\n\n}\n", "file_path": "src/liboak/back/compiler/grammar.rs", "rank": 51, "score": 68116.90337166724 }, { "content": "// Copyright 2016 Pierre Talbot (IRCAM)\n\n\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\npub use middle::typing::ast::*;\n\nuse back::compiler::rule::*;\n\n\n\nuse quote::quote;\n\n\n\npub struct GrammarCompiler\n", "file_path": "src/liboak/back/compiler/grammar.rs", "rank": 52, "score": 68110.46593785987 }, { "content": "{\n\n grammar: TGrammar\n\n}\n\n\n\nimpl GrammarCompiler\n\n{\n\n pub fn compile(grammar: TGrammar) -> proc_macro2::TokenStream {\n\n let compiler = GrammarCompiler::new(grammar);\n\n let mod_content = compiler.compile_mod_content();\n\n let result = compiler.compile_grammar_module(mod_content);\n\n result\n\n }\n\n\n\n fn new(grammar: TGrammar) -> GrammarCompiler {\n\n GrammarCompiler {\n\n grammar: grammar\n\n }\n\n }\n\n\n\n fn compile_grammar_module(&self, module_content: Vec<syn::Item>) -> proc_macro2::TokenStream {\n", "file_path": "src/liboak/back/compiler/grammar.rs", "rank": 53, "score": 68110.05688848309 }, { "content": "\n\nimpl SpannedExprCompiler\n\n{\n\n pub fn parser(expr_idx: usize, is_ranged: bool) -> SpannedExprCompiler {\n\n SpannedExprCompiler { expr_idx, is_ranged }\n\n }\n\n}\n\n\n\nimpl CompileExpr for SpannedExprCompiler\n\n{\n\n fn compile_expr<'a>(&self, context: &mut Context<'a>,\n\n continuation: Continuation) -> syn::Expr\n\n {\n\n let lo_sp = context.next_mark_name();\n\n // The `n` next variable belongs to expr_idx so we pop the next one after these.\n\n let result = context.next_free_var_skip(self.expr_idx);\n\n\n\n let mut result_expr: syn::Expr = parse_quote!(\n\n Range { start: #lo_sp.clone(), end: state.mark() }\n\n );\n", "file_path": "src/liboak/back/compiler/spanned_expr.rs", "rank": 54, "score": 66884.21216168106 }, { "content": " if !self.is_ranged {\n\n result_expr = parse_quote!((#result_expr).stream_span());\n\n }\n\n\n\n context.push_mark(lo_sp.clone());\n\n\n\n let spanned_expr = continuation\n\n .map_success(|success, _| {\n\n parse_quote!({\n\n let #result = #result_expr;\n\n #success\n\n })\n\n })\n\n .compile_success(context, parser_compiler, self.expr_idx)\n\n .unwrap_success();\n\n context.pop_mark();\n\n parse_quote!({\n\n let #lo_sp = state.mark();\n\n #spanned_expr\n\n })\n\n }\n\n}\n", "file_path": "src/liboak/back/compiler/spanned_expr.rs", "rank": 55, "score": 66877.04071610447 }, { "content": "// Copyright 2016 Pierre Talbot (IRCAM)\n\n\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse back::compiler::*;\n\n\n\npub struct SpannedExprCompiler{\n\n expr_idx: usize,\n\n is_ranged: bool\n\n}\n", "file_path": "src/liboak/back/compiler/spanned_expr.rs", "rank": 56, "score": 66874.09365079632 }, { "content": "fn modify_path(mut path: syn::Path, prefix: &str) -> syn::Path {\n\n let segment = path.segments.last_mut().expect(\"rule path\");\n\n let x = format_ident!(\"{}_{}\", prefix, segment.ident);\n\n segment.ident = x;\n\n path\n\n}\n\n\n", "file_path": "src/liboak/back/name_factory.rs", "rank": 57, "score": 64603.76755042988 }, { "content": "struct TestDisplay\n\n{\n\n terminal: Box<term::StdoutTerminal>,\n\n num_success: u32,\n\n num_failure: u32,\n\n num_system_failure: u32\n\n}\n\n\n\nimpl TestDisplay\n\n{\n\n pub fn new() -> TestDisplay {\n\n TestDisplay{\n\n terminal: term::stdout().expect(\"Could not obtain standard output stream (stdout).\"),\n\n num_success: 0,\n\n num_failure: 0,\n\n num_system_failure: 0\n\n }\n\n }\n\n\n\n pub fn title(&mut self, msg: &str) {\n", "file_path": "tests/testoak.rs", "rank": 58, "score": 63316.33201995019 }, { "content": "struct TestEngine\n\n{\n\n test_path: PathBuf,\n\n grammars: Vec<GrammarInfo>,\n\n display: TestDisplay\n\n}\n\n\n\nimpl TestEngine\n\n{\n\n fn new(test_path: PathBuf) -> TestEngine\n\n {\n\n if !test_path.is_dir() {\n\n panic!(format!(\"`{}` is not a valid grammar directory.\", test_path.display()));\n\n }\n\n TestEngine{\n\n test_path: test_path,\n\n grammars: Vec::new(),\n\n display: TestDisplay::new()\n\n }\n\n }\n", "file_path": "tests/testoak.rs", "rank": 59, "score": 63316.33201995019 }, { "content": "struct Test<'a>\n\n{\n\n info: &'a GrammarInfo,\n\n display: &'a mut TestDisplay,\n\n}\n\n\n\nimpl<'a> Test<'a>\n\n{\n\n\n\n fn test_bulk_file(&mut self, start_msg: String, bulk_file: PathBuf, expectation: ExpectedResult) {\n\n self.display.info(start_msg);\n\n self.test_file(bulk_file, true, expectation);\n\n }\n\n\n\n fn test_directory(&mut self, start_msg: String, directory: PathBuf, expectation: ExpectedResult) {\n\n self.display.info(start_msg);\n\n match read_dir(&directory) {\n\n Ok(dir_entries) => {\n\n for entry in dir_entries.map(Result::unwrap).map(|entry| entry.path()) {\n\n if entry.is_file() {\n", "file_path": "tests/testoak.rs", "rank": 60, "score": 62634.60190379871 }, { "content": "#[derive(Clone, Copy, PartialEq, Eq)]\n\nstruct WFA\n\n{\n\n can_fail: bool,\n\n can_succeed: bool,\n\n always_consume: bool,\n\n never_consume: bool,\n\n}\n\n\n\nimpl WFA\n\n{\n\n fn always_succeed(never_consume: bool) -> Self {\n\n WFA {\n\n can_fail: false,\n\n can_succeed: true,\n\n always_consume: false,\n\n never_consume\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/liboak/middle/analysis/well_formedness.rs", "rank": 61, "score": 59559.550048015495 }, { "content": "#[test]\n\nfn test_data_directory()\n\n{\n\n let data_path = Path::new(\"data/\");\n\n if !data_path.is_dir() {\n\n panic!(format!(\"`{}` is not a valid data directory.\", data_path.display()));\n\n }\n\n let mut test_path = PathBuf::new();\n\n test_path.push(data_path);\n\n test_path.push(Path::new(\"test\"));\n\n let mut test_engine = TestEngine::new(test_path);\n\n test_engine.register(\"ntcc\", None, Box::new(\n\n |s| ntcc::recognize_ntcc(s)));\n\n test_engine.register(\"type_name\", None, Box::new(\n\n |s| type_name::recognize_type_names(s)));\n\n test_engine.register(\"calc\", None, Box::new(\n\n |s| calc::recognize_program(s)));\n\n test_engine.register(\"calc2\", None, Box::new(\n\n |s| calc2::recognize_program(s)));\n\n test_engine.register(\"calc3\", None, Box::new(\n\n |s| calc3::recognize_program(s)));\n", "file_path": "tests/testoak.rs", "rank": 62, "score": 57887.50191184736 }, { "content": "/// Returns `true` if an item can be read from the stream with `Iterator::next`.\n\npub trait HasNext\n\n{\n\n fn has_next(&self) -> bool;\n\n}\n\n\n", "file_path": "runtime/src/stream.rs", "rank": 63, "score": 56989.06804513767 }, { "content": "/// Transforms a value into a stream of type `Output`.\n\npub trait Stream\n\n{\n\n type Output;\n\n fn stream(self) -> Self::Output;\n\n}\n\n\n", "file_path": "runtime/src/stream.rs", "rank": 64, "score": 56984.84905428259 }, { "content": "/// Produces a textual representation of the current position in the stream. For example, it can be `2:5` if the position is at line 2 and column 5.\n\npub trait Location\n\n{\n\n fn location(&self) -> String;\n\n}\n\n\n", "file_path": "runtime/src/stream.rs", "rank": 65, "score": 56984.84905428259 }, { "content": "struct DuplicateItem<Item>\n\n{\n\n items: Vec<(Ident, Item)>,\n\n has_duplicate: bool,\n\n what_is_duplicated: String\n\n}\n\n\n\nimpl<Item> DuplicateItem<Item> where\n\n Item: ItemIdent + Spanned\n\n{\n\n pub fn analyse<ItemIter>(iter: ItemIter, item_kind: String)\n\n -> Partial<Vec<(Ident, Item)>> where\n\n ItemIter: Iterator<Item=Item>\n\n {\n\n DuplicateItem {\n\n items: vec![],\n\n has_duplicate: false,\n\n what_is_duplicated: item_kind\n\n }.populate(iter)\n\n .make()\n", "file_path": "src/liboak/middle/analysis/duplicate.rs", "rank": 66, "score": 56340.173183418614 }, { "content": "pub trait StreamSpan\n\n{\n\n type Output;\n\n fn stream_span(&self) -> Self::Output;\n\n}\n", "file_path": "runtime/src/stream.rs", "rank": 67, "score": 55746.53277501807 }, { "content": "/// Produces a code snippet of size `len_hint` or less starting from the current position in the stream.\n\npub trait CodeSnippet\n\n{\n\n fn code_snippet(&self, len_hint: usize) -> String;\n\n}\n\n\n", "file_path": "runtime/src/stream.rs", "rank": 68, "score": 55746.53277501807 }, { "content": "struct InnerPredicateOrRepeat<'a> {\n\n grammar: &'a AGrammar\n\n}\n\n\n\nimpl<'a> InnerPredicateOrRepeat<'a> {\n\n fn new(grammar: &'a AGrammar) -> Self {\n\n InnerPredicateOrRepeat { grammar }\n\n }\n\n}\n\n\n\nimpl<'a> ExprByIndex for InnerPredicateOrRepeat<'a> {\n\n fn expr_by_index(&self, index: usize) -> Expression {\n\n self.grammar.expr_by_index(index).clone()\n\n }\n\n}\n\n\n\nimpl<'a> Visitor<(PredicateOrRepeat, bool)> for InnerPredicateOrRepeat<'a> {\n\n fn visit_non_terminal_symbol(&mut self, _this: usize, rule: &Ident) -> (PredicateOrRepeat, bool) {\n\n let expr_idx = self.grammar.expr_index_of_rule(rule);\n\n (self.visit_expr(expr_idx).0, true)\n", "file_path": "src/liboak/middle/analysis/useless_chaining.rs", "rank": 69, "score": 55324.07981853867 }, { "content": "/// Consumes `prefix` if it fully matches from the current position in the stream. If it does not match, the stream is not altered and `false` is returned.\n\npub trait ConsumePrefix<P>\n\n{\n\n fn consume_prefix(&mut self, prefix: P) -> bool;\n\n}\n\n\n", "file_path": "runtime/src/stream.rs", "rank": 70, "score": 52961.672350350054 }, { "content": "fn warn_ignore_attr(span: Span) {\n\n span.unstable().warning(format!(\n\n \"unknown attribute: it will be ignored.\"))\n\n .emit();\n\n}\n\n\n", "file_path": "src/liboak/middle/analysis/attribute.rs", "rank": 71, "score": 49071.8003141377 }, { "content": " }\n\n\n\n fn fold_right(front: Vec<(PExpr, BinOp)>, last: PExpr) -> PExpr {\n\n front.into_iter().rev().fold(last,\n\n |accu, (expr, op)| Box::new(BinaryExpr(op, expr, accu)))\n\n }\n\n}\n\n\n\n\n\nfn analyse_state(state: ParseState<StrStream, PExpr>) {\n\n use oak_runtime::parse_state::ParseResult::*;\n\n match state.into_result() {\n\n Success(data) => println!(\"Full match: {:?}\", data),\n\n Partial(data, expectation) => {\n\n println!(\"Partial match: {:?} because: {:?}\", data, expectation);\n\n }\n\n Failure(expectation) => {\n\n println!(\"Failure: {:?}\", expectation);\n\n }\n\n }\n\n}\n\n\n\nfn main() {\n\n analyse_state(parse_program(\"2 * a\".into_state())); // Complete\n\n analyse_state(parse_program(\"2 * \".into_state())); // Partial\n\n analyse_state(parse_program(\" * a\".into_state())); // Erroneous\n\n\n\n let program1 =\n\n \"let a = 5 in \\\n\n let b = 2 in \\\n\n a^2 + b^2 + (a - b)^2 \\\n\n \";\n\n analyse_state(parse_program(program1.into_state()));\n\n\n\n let program2 =\n\n \"let a = \\\n\n let b = 7^3 in 2 * b \\\n\n in \\\n\n a^2 - (let x = a in x * 2) \\\n\n \";\n\n println!(\"{:?}\", parse_program(program2.into_state()).into_result());\n\n}\n\n```\n", "file_path": "doc/src/full-calc-grammar.md", "rank": 72, "score": 48662.410125577284 }, { "content": "# Full Calc Grammar\n\n\n\nThe following code is the grammar of the `Calc` language which is incrementally built and explained in the [previous chapter](learn-oak.md).\n\n\n\n```rust\n\nextern crate oak_runtime;\n\nuse oak_runtime::*;\n\nuse oak::oak;\n\nuse self::Expression::*;\n\nuse self::BinOp::*;\n\nuse std::str::FromStr;\n\n\n\npub type PExpr = Box<Expression>;\n\n\n\n#[derive(Debug)]\n\npub enum Expression {\n\n Variable(String),\n\n Number(u32),\n\n BinaryExpr(BinOp, PExpr, PExpr),\n\n LetIn(String, PExpr, PExpr)\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum BinOp {\n\n Add, Sub, Mul, Div, Exp\n\n}\n\n\n\noak! {\n\n // Optional stream declaration.\n\n type Stream<'a> = StrStream<'a>;\n\n\n\n program = spacing expression\n\n\n\n expression\n\n = term (term_op term)* > fold_left\n\n\n\n term\n\n = exponent (factor_op exponent)* > fold_left\n\n\n\n exponent\n\n = (factor exponent_op)* factor > fold_right\n\n\n\n factor: PExpr\n\n = number > box Number\n\n / identifier > box Variable\n\n / let_expr > box LetIn\n\n / lparen expression rparen\n\n\n\n let_expr = let_kw let_binding in_kw expression\n\n let_binding = identifier bind_op expression\n\n\n\n term_op: BinOp\n\n = add_op > Add\n\n / sub_op > Sub\n\n\n\n factor_op: BinOp\n\n = mul_op > Mul\n\n / div_op > Div\n\n\n\n exponent_op: BinOp = exp_op > Exp\n\n\n\n identifier = !digit !keyword ident_char+ spacing > to_string\n\n ident_char = [\"a-zA-Z0-9_\"]\n\n\n\n digit = [\"0-9\"]\n\n number = digit+ spacing > to_number\n\n spacing = [\" \\n\\r\\t\"]*:(^)\n\n\n\n kw_tail = !ident_char spacing\n\n\n\n keyword = let_kw / in_kw\n\n let_kw = \"let\" kw_tail\n\n in_kw = \"in\" kw_tail\n\n\n\n bind_op = \"=\" spacing\n\n add_op = \"+\" spacing\n\n sub_op = \"-\" spacing\n\n mul_op = \"*\" spacing\n\n div_op = \"/\" spacing\n\n exp_op = \"^\" spacing\n\n lparen = \"(\" spacing\n\n rparen = \")\" spacing\n\n\n\n fn to_number(raw_text: Vec<char>) -> u32 {\n\n u32::from_str(&*to_string(raw_text)).unwrap()\n\n }\n\n\n\n fn to_string(raw_text: Vec<char>) -> String {\n\n raw_text.into_iter().collect()\n\n }\n\n\n\n fn fold_left(head: PExpr, rest: Vec<(BinOp, PExpr)>) -> PExpr {\n\n rest.into_iter().fold(head,\n\n |accu, (op, expr)| Box::new(BinaryExpr(op, accu, expr)))\n", "file_path": "doc/src/full-calc-grammar.md", "rank": 73, "score": 48660.22116083296 }, { "content": "Operator", "file_path": "data/test/type_name/run-pass/simple_name.rs", "rank": 74, "score": 41573.809710597 }, { "content": "Operator(Sum, Plus, Minus, Div)", "file_path": "data/test/type_name/run-pass/name_list.rs", "rank": 75, "score": 41573.809710597 }, { "content": "_(Plus, Minus)", "file_path": "data/test/type_name/run-pass/infer_name_list_head.rs", "rank": 76, "score": 39826.26012808076 }, { "content": " u32::from_str(&*to_string(raw_text)).unwrap()\n\n }\n\n\n\n\n\n fn to_string(raw_text: Vec<char>) -> String {\n\n raw_text.into_iter().collect()\n\n }\n\n\n\n}\n\n\n\n\n\n// fn analyse_state(state: ParseState<StrStream, json::PExpr>) {\n\n// use oak_runtime::parse_state::ParseResult::*;\n\n// match state.into_result() {\n\n// Success(data) => println!(\"Full match: {:?}\", data),\n\n// Partial(data, expectation) => {\n\n// println!(\"Partial match: {:?} because {:?}\", data, expectation);\n\n// }\n\n// Failure(expectation) => {\n\n// println!(\"Failure: {:?}\", expectation);\n", "file_path": "tests/grammars/json.rs", "rank": 77, "score": 36549.61266752591 }, { "content": " Variable(String),\n\n Number(u32),\n\n BinaryExpr(BinOp, PExpr, PExpr),\n\n LetIn(String, PExpr, PExpr)\n\n }\n\n\n\n #[derive(Debug)]\n\n pub enum BinOp {\n\n Add, Sub, Mul, Div, Exp\n\n }\n\n\n\n fn to_number(raw_text: Vec<char>) -> u32 {\n\n u32::from_str(&*to_string(raw_text)).unwrap()\n\n }\n\n\n\n fn number_expr(value: u32) -> PExpr {\n\n Box::new(Number(value))\n\n }\n\n\n\n fn variable_expr(ident: String) -> PExpr {\n", "file_path": "tests/grammars/calc.rs", "rank": 78, "score": 36545.90664621128 }, { "content": " lbracket = \"[\" spacing\n\n rbracket = \"]\" spacing\n\n lbrace = \"{\" spacing\n\n rbrace = \"}\" spacing\n\n\n\n use std::str::FromStr;\n\n\n\n pub type PExpr = Box<JSONExpr>;\n\n\n\n //Enums\n\n #[derive(Debug)]\n\n pub enum JSONExpr {\n\n Str(String),\n\n Number(u32),\n\n Array(Vec<Box<JSONExpr>>),\n\n Object(Option<Box<JSONPair>>)\n\n }\n\n\n\n #[derive(Debug)]\n\n pub enum JSONPair {\n", "file_path": "tests/grammars/json.rs", "rank": 79, "score": 36544.7109507986 }, { "content": " Box::new(Variable(ident))\n\n }\n\n\n\n fn to_string(raw_text: Vec<char>) -> String {\n\n raw_text.into_iter().collect()\n\n }\n\n\n\n fn fold_left(head: PExpr, rest: Vec<(BinOp, PExpr)>) -> PExpr {\n\n rest.into_iter().fold(head,\n\n |accu, (op, expr)| Box::new(BinaryExpr(op, accu, expr)))\n\n }\n\n\n\n fn fold_right(front: Vec<(PExpr, BinOp)>, last: PExpr) -> PExpr {\n\n front.into_iter().rev().fold(last,\n\n |accu, (expr, op)| Box::new(BinaryExpr(op, expr, accu)))\n\n }\n\n\n\n fn let_in_expr(var: String, value: PExpr, expr: PExpr) -> PExpr {\n\n Box::new(LetIn(var, value, expr))\n\n }\n\n\n\n fn add_bin_op() -> BinOp { Add }\n\n fn sub_bin_op() -> BinOp { Sub }\n\n fn mul_bin_op() -> BinOp { Mul }\n\n fn div_bin_op() -> BinOp { Div }\n\n fn exp_bin_op() -> BinOp { Exp }\n\n}\n", "file_path": "tests/grammars/calc.rs", "rank": 80, "score": 36542.45401625896 }, { "content": "// Copyright 2014 Pierre Talbot (IRCAM)\n\n\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse oak::oak;\n\nuse self::Expression::*;\n\nuse self::BinOp::*;\n\nuse grammars::calc2;\n\n\n\npub type PExpr = Box<Expression>;\n", "file_path": "tests/grammars/calc3.rs", "rank": 81, "score": 36541.92842668877 }, { "content": "\n\n#[derive(Debug)]\n\npub enum Expression {\n\n Variable(String),\n\n Number(u32),\n\n BinaryExpr(BinOp, PExpr, PExpr),\n\n LetIn(String, PExpr, PExpr)\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum BinOp {\n\n Add, Sub, Mul, Div, Exp\n\n}\n\n\n\noak! {\n\n // Optional stream declaration.\n\n type Stream<'a> = StrStream<'a>;\n\n\n\n program = spacing expression\n\n\n", "file_path": "tests/grammars/calc2.rs", "rank": 82, "score": 36541.63441434693 }, { "content": "\n\n#[derive(Debug)]\n\npub enum Expression {\n\n Variable(String),\n\n Number(u32),\n\n BinaryExpr(BinOp, PExpr, PExpr),\n\n LetIn(String, PExpr, PExpr)\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum BinOp {\n\n Add, Sub, Mul, Div, Exp\n\n}\n\n\n\noak! {\n\n // Optional stream declaration.\n\n type Stream<'a> = StrStream<'a>;\n\n\n\n program = spacing expression\n\n\n", "file_path": "tests/grammars/calc3.rs", "rank": 83, "score": 36541.63441434693 }, { "content": " let_kw = \"let\" kw_tail\n\n in_kw = \"in\" kw_tail\n\n\n\n bind_op = \"=\" spacing\n\n add_op = \"+\" spacing\n\n sub_op = \"-\" spacing\n\n mul_op = \"*\" spacing\n\n div_op = \"/\" spacing\n\n exp_op = \"^\" spacing\n\n lparen = \"(\" spacing\n\n rparen = \")\" spacing\n\n\n\n use std::str::FromStr;\n\n use self::Expression::*;\n\n use self::BinOp::*;\n\n\n\n pub type PExpr = Box<Expression>;\n\n\n\n #[derive(Debug)]\n\n pub enum Expression {\n", "file_path": "tests/grammars/calc.rs", "rank": 84, "score": 36540.85362228853 }, { "content": "// Copyright 2021 Pierre Talbot\n\n\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse oak::oak;\n\n\n\noak! {\n\n underscore = \"_\"\n\n digits = (underscore* digit)+ > id\n\n digit = [\"0-9\"]\n\n\n\n fn id(v: Vec<char>) -> Vec<char> {\n\n v\n\n }\n\n}", "file_path": "tests/grammars/issue94.rs", "rank": 85, "score": 36540.7242140231 }, { "content": "// {\n\n// \"servlet-name\": \"cofaxTools\",\n\n// \"servlet-class\": \"org.cofax.cms.CofaxToolsServlet\",\n\n// \"init-param\": {\n\n// \"templatePath\": \"toolstemplates/\",\n\n// \"log\": 1,\n\n// \"logLocation\": \"/usr/local/tomcat/logs/CofaxTools.log\",\n\n// \"logMaxSize\": \"\",\n\n// \"dataLog\": 1,\n\n// \"dataLogLocation\": \"/usr/local/tomcat/logs/dataLog.log\",\n\n// \"dataLogMaxSize\": \"\",\n\n// \"removePageCache\": \"/content/admin/remove?cache=pages&id=\",\n\n// \"removeTemplateCache\": \"/content/admin/remove?cache=templates&id=\",\n\n// \"fileTransferFolder\": \"/usr/local/tomcat/webapps/content/fileTransferFolder\",\n\n// \"lookInContext\": 1,\n\n// \"adminGroupID\": 4,\n\n// \"betaServer\": \"true\"}}],\n\n// \"servlet-mapping\": {\n\n// \"cofaxCDS\": \"/\",\n\n// \"cofaxEmail\": \"/cofaxutil/aemail/*\",\n", "file_path": "tests/grammars/json.rs", "rank": 86, "score": 36540.69718554007 }, { "content": "use oak::*;\n\n\n\n\n\noak! {\n\n ra = \"a\" ra / \"b\" .\n\n // rb = \"a\" rb . / \"b\" .\n\n // rc = ra . / .\n\n\n\n rd = \"a\" rd // debatable to allow this rule. However, with Partial match it can make sense...\n\n\n\n re = . re\n\n\n\n factor: Expr\n\n = number > Expr::Number\n\n / identifier > Expr::Variable\n\n / lparen factor rparen\n\n\n\n lparen = \"(\"\n\n rparen = \")\"\n\n number = [\"0-9\"]+\n", "file_path": "tests/grammars/typing.rs", "rank": 87, "score": 36540.643880223455 }, { "content": " identifier = [\"a-z\"]+\n\n\n\n pub enum Expr {\n\n Number(Vec<char>),\n\n Variable(Vec<char>)\n\n }\n\n\n\n rule1 = \"a\" > test2\n\n // rule2 = \"a\" > test3 // Fail due to unit type\n\n // rule3 = \"a\" > test4 // Fail due to unit type\n\n\n\n type MyUnit = ();\n\n fn test2() -> MyUnit {}\n\n fn test3() -> () {}\n\n fn test4() {}\n\n\n\n rule4 = r#\"\"foo\"\"#\n\n rule5 = r##\"foo #\"# bar\"##\n\n}\n", "file_path": "tests/grammars/typing.rs", "rank": 88, "score": 36540.50630024001 }, { "content": "// Copyright 2014 Pierre Talbot (IRCAM)\n\n\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse oak::oak;\n\nuse self::Expression::*;\n\nuse self::BinOp::*;\n\nuse std::str::FromStr;\n\n\n\npub type PExpr = Box<Expression>;\n", "file_path": "tests/grammars/calc2.rs", "rank": 89, "score": 36539.3493983688 }, { "content": "// Copyright 2015 Pierre Talbot (IRCAM)\n\n\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n#![allow(dead_code)]\n\n\n\npub mod ntcc;\n\npub mod type_name;\n\npub mod calc;\n\npub mod calc2;\n", "file_path": "tests/grammars/mod.rs", "rank": 90, "score": 36539.34791942514 }, { "content": " Pair(String, Box<JSONExpr>),\n\n Json(Vec<Box<JSONPair>>)\n\n }\n\n\n\n //Functions\n\n\n\n fn make_json_number(number:u32)-> Box<JSONExpr> {\n\n Box::new(JSONExpr::Number(number))\n\n }\n\n\n\n fn make_json_string(string:String) -> Box<JSONExpr> {\n\n Box::new(JSONExpr::Str(string))\n\n }\n\n\n\n fn make_json_pair(string:String, expr:Box<JSONExpr>) -> Box<JSONPair> {\n\n Box::new(JSONPair::Pair(string,expr))\n\n }\n\n\n\n fn make_json_array(array:Vec<Box<JSONExpr>>, front:Box<JSONExpr>) -> Box<JSONExpr> {\n\n let mut vector = Vec::new();\n", "file_path": "tests/grammars/json.rs", "rank": 91, "score": 36538.81653812832 }, { "content": "\n\n bind_op = \"=\" spacing\n\n add_op = \"+\" spacing\n\n sub_op = \"-\" spacing\n\n mul_op = \"*\" spacing\n\n div_op = \"/\" spacing\n\n exp_op = \"^\" spacing\n\n lparen = \"(\" spacing\n\n rparen = \")\" spacing\n\n\n\n fn to_number(raw_text: Vec<char>) -> u32 {\n\n u32::from_str(&*to_string(raw_text)).unwrap()\n\n }\n\n\n\n fn to_string(raw_text: Vec<char>) -> String {\n\n raw_text.into_iter().collect()\n\n }\n\n\n\n fn fold_left(head: PExpr, rest: Vec<(BinOp, PExpr)>) -> PExpr {\n\n rest.into_iter().fold(head,\n\n |accu, (op, expr)| Box::new(BinaryExpr(op, accu, expr)))\n\n }\n\n\n\n fn fold_right(front: Vec<(PExpr, BinOp)>, last: PExpr) -> PExpr {\n\n front.into_iter().rev().fold(last,\n\n |accu, (expr, op)| Box::new(BinaryExpr(op, expr, accu)))\n\n }\n\n}\n", "file_path": "tests/grammars/calc2.rs", "rank": 92, "score": 36538.778651639266 }, { "content": " div_op = \"/\" spacing\n\n exp_op = \"^\" spacing\n\n lparen = \"(\" spacing\n\n rparen = \")\" spacing\n\n\n\n fn fold_left(head: PExpr, rest: Vec<(BinOp, PExpr)>) -> PExpr {\n\n rest.into_iter().fold(head,\n\n |accu, (op, expr)| Box::new(BinaryExpr(op, accu, expr)))\n\n }\n\n\n\n fn fold_right(front: Vec<(PExpr, BinOp)>, last: PExpr) -> PExpr {\n\n front.into_iter().rev().fold(last,\n\n |accu, (expr, op)| Box::new(BinaryExpr(op, expr, accu)))\n\n }\n\n}\n", "file_path": "tests/grammars/calc3.rs", "rank": 93, "score": 36538.49262100548 }, { "content": "pub mod calc3;\n\npub mod recursive_type;\n\npub mod combinators;\n\npub mod well_formedness;\n\npub mod useless_chaining;\n\npub mod unreachable_rule;\n\npub mod issue94;\n\nmod stream_span;\n\nmod typing;\n", "file_path": "tests/grammars/mod.rs", "rank": 94, "score": 36538.341435245464 }, { "content": " for i in array{\n\n vector.push(i);\n\n }\n\n vector.push(front);\n\n Box::new(JSONExpr::Array(vector))\n\n }\n\n\n\n fn make_json_member(pair: Box<JSONPair>, rest: Vec<Box<JSONPair>>) -> Box<JSONPair> {\n\n let mut vector = vec![pair];\n\n for i in rest{\n\n vector.push(i);\n\n }\n\n Box::new(JSONPair::Json(vector))\n\n }\n\n\n\n fn make_json_object(m: Option<Box<JSONPair>>) -> Box<JSONExpr> {\n\n Box::new(JSONExpr::Object(m))\n\n }\n\n\n\n fn to_number(raw_text: Vec<char>) -> u32 {\n", "file_path": "tests/grammars/json.rs", "rank": 95, "score": 36538.07892077982 }, { "content": "// \"cachePackageTagsStore\": 200,\n\n// \"cachePackageTagsRefresh\": 60,\n\n// \"cacheTemplatesTrack\": 100,\n\n// \"cacheTemplatesStore\": 50,\n\n// \"cacheTemplatesRefresh\": 15,\n\n// \"cachePagesTrack\": 200,\n\n// \"cachePagesStore\": 100,\n\n// \"cachePagesRefresh\": 10,\n\n// \"cachePagesDirtyRead\": 10,\n\n// \"searchEngineListTemplate\": \"forSearchEnginesList.htm\",\n\n// \"searchEngineFileTemplate\": \"forSearchEngines.htm\",\n\n// \"searchEngineRobotsDb\": \"WEB-INF/robots.db\",\n\n// \"useDataStore\": \"true\",\n\n// \"dataStoreClass\": \"org.cofax.SqlDataStore\",\n\n// \"redirectionClass\": \"org.cofax.SqlRedirection\",\n\n// \"dataStoreName\": \"cofax\",\n\n// \"dataStoreDriver\": \"com.microsoft.jdbc.sqlserver.SQLServerDriver\",\n\n// \"dataStoreUrl\": \"jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon\",\n\n// \"dataStoreUser\": \"sa\",\n\n// \"dataStorePassword\": \"dataStoreTestQuery\",\n", "file_path": "tests/grammars/json.rs", "rank": 96, "score": 36537.10359575255 }, { "content": " expression\n\n = term (term_op term)* > fold_left\n\n\n\n term\n\n = exponent (factor_op exponent)* > fold_left\n\n\n\n exponent\n\n = (factor exponent_op)* factor > fold_right\n\n\n\n factor: PExpr\n\n = number > box Number\n\n / identifier > box Variable\n\n / let_expr > box LetIn\n\n / lparen expression rparen\n\n\n\n let_expr = let_kw let_binding in_kw expression\n\n let_binding = identifier bind_op expression\n\n\n\n term_op: BinOp\n\n = add_op > Add\n", "file_path": "tests/grammars/calc2.rs", "rank": 97, "score": 36536.90314038821 }, { "content": " expression\n\n = term (term_op term)* > fold_left\n\n\n\n term\n\n = exponent (factor_op exponent)* > fold_left\n\n\n\n exponent\n\n = (factor exponent_op)* factor > fold_right\n\n\n\n factor: Box<Expression>\n\n = calc2::number > box Number\n\n / calc2::identifier > box Variable\n\n / let_expr > box LetIn\n\n / lparen expression rparen\n\n\n\n let_expr = let_kw let_binding in_kw expression\n\n let_binding = (calc2::identifier:String) bind_op expression\n\n\n\n term_op: BinOp\n\n = add_op > Add\n", "file_path": "tests/grammars/calc3.rs", "rank": 98, "score": 36536.28239586621 }, { "content": "// Copyright 2018 Chao Lin & William Sergeant (Sorbonne University)\n\n\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nextern crate oak_runtime;\n\nuse oak_runtime::*;\n\nuse oak::oak;\n\n\n\noak! {\n\n program = spacing json_expr spacing\n", "file_path": "tests/grammars/json.rs", "rank": 99, "score": 36536.27190575664 } ]
Rust
dpx/src/dpx_dpxconf.rs
mulimoen/tectonic
bea4516e2c7b253bc92dbd0b744a233635db7b0b
/* This is dvipdfmx, an eXtended version of dvipdfm by Mark A. Wicks. Copyright (C) 2002-2016 by Jin-Hwan Cho and Shunsaku Hirata, the dvipdfmx project team. Copyright (C) 1998, 1999 by Mark A. Wicks <mwicks@kettering.edu> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. */ #![allow( mutable_transmutes, non_camel_case_types, non_snake_case, non_upper_case_globals, unused_mut )] use crate::streq_ptr; pub type __off_t = i64; pub type __off64_t = i64; pub type size_t = u64; #[derive(Copy, Clone)] #[repr(C)] pub struct paper { pub name: *const i8, pub pswidth: f64, pub psheight: f64, } #[no_mangle] pub static mut paperspecs: [paper; 22] = [ { let mut init = paper { name: b"letter\x00" as *const u8 as *const i8, pswidth: 612.00f64, psheight: 792.00f64, }; init }, { let mut init = paper { name: b"legal\x00" as *const u8 as *const i8, pswidth: 612.00f64, psheight: 1008.00f64, }; init }, { let mut init = paper { name: b"ledger\x00" as *const u8 as *const i8, pswidth: 1224.00f64, psheight: 792.00f64, }; init }, { let mut init = paper { name: b"tabloid\x00" as *const u8 as *const i8, pswidth: 792.00f64, psheight: 1224.00f64, }; init }, { let mut init = paper { name: b"a6\x00" as *const u8 as *const i8, pswidth: 297.638f64, psheight: 419.528f64, }; init }, { let mut init = paper { name: b"a5\x00" as *const u8 as *const i8, pswidth: 419.528f64, psheight: 595.276f64, }; init }, { let mut init = paper { name: b"a4\x00" as *const u8 as *const i8, pswidth: 595.276f64, psheight: 841.890f64, }; init }, { let mut init = paper { name: b"a3\x00" as *const u8 as *const i8, pswidth: 841.890f64, psheight: 1190.550f64, }; init }, { let mut init = paper { name: b"b6\x00" as *const u8 as *const i8, pswidth: 364.25f64, psheight: 515.91f64, }; init }, { let mut init = paper { name: b"b5\x00" as *const u8 as *const i8, pswidth: 515.91f64, psheight: 728.50f64, }; init }, { let mut init = paper { name: b"b4\x00" as *const u8 as *const i8, pswidth: 728.50f64, psheight: 1031.81f64, }; init }, { let mut init = paper { name: b"b3\x00" as *const u8 as *const i8, pswidth: 1031.81f64, psheight: 1457.00f64, }; init }, { let mut init = paper { name: b"b5var\x00" as *const u8 as *const i8, pswidth: 515.91f64, psheight: 651.97f64, }; init }, { let mut init = paper { name: b"jisb6\x00" as *const u8 as *const i8, pswidth: 364.25f64, psheight: 515.91f64, }; init }, { let mut init = paper { name: b"jisb5\x00" as *const u8 as *const i8, pswidth: 515.91f64, psheight: 728.50f64, }; init }, { let mut init = paper { name: b"jisb4\x00" as *const u8 as *const i8, pswidth: 728.50f64, psheight: 1031.81f64, }; init }, { let mut init = paper { name: b"jisb3\x00" as *const u8 as *const i8, pswidth: 1031.81f64, psheight: 1457.00f64, }; init }, { let mut init = paper { name: b"isob6\x00" as *const u8 as *const i8, pswidth: 354.331f64, psheight: 498.898f64, }; init }, { let mut init = paper { name: b"isob5\x00" as *const u8 as *const i8, pswidth: 498.898f64, psheight: 708.661f64, }; init }, { let mut init = paper { name: b"isob4\x00" as *const u8 as *const i8, pswidth: 708.661f64, psheight: 1000.630f64, }; init }, { let mut init = paper { name: b"isob3\x00" as *const u8 as *const i8, pswidth: 1000.630f64, psheight: 1417.320f64, }; init }, { let mut init = paper { name: 0 as *const i8, pswidth: 0i32 as f64, psheight: 0i32 as f64, }; init }, ]; #[no_mangle] pub unsafe extern "C" fn paperinfo(mut ppformat: *const i8) -> *const paper { if ppformat.is_null() { return 0 as *const paper; } let mut ppinfo = &*paperspecs.as_ptr().offset(0) as *const paper; while !ppinfo.is_null() && !(if !ppinfo.is_null() && !(*ppinfo).name.is_null() { (*ppinfo).name } else { 0 as *const i8 }) .is_null() { if streq_ptr(ppformat, (*ppinfo).name) { break; } ppinfo = if !ppinfo.offset(1).is_null() && !(*ppinfo.offset(1)).name.is_null() { ppinfo.offset(1) } else { 0 as *const paper } } return if !ppinfo.is_null() && !(if !ppinfo.is_null() && !(*ppinfo).name.is_null() { (*ppinfo).name } else { 0 as *const i8 }) .is_null() { ppinfo } else { 0 as *const paper }; } /* HAVE_LIBPAPER */ /* HAVE_LIBPAPER */ #[no_mangle] pub unsafe extern "C" fn dumppaperinfo() { let mut ppinfo = &*paperspecs.as_ptr().offset(0) as *const paper; while !ppinfo.is_null() && !(if !ppinfo.is_null() && !(*ppinfo).name.is_null() { (*ppinfo).name } else { 0 as *const i8 }) .is_null() { let wd = if !ppinfo.is_null() && !(*ppinfo).name.is_null() { (*ppinfo).pswidth } else { 0.0f64 }; let ht = if !ppinfo.is_null() && !(*ppinfo).name.is_null() { (*ppinfo).psheight } else { 0.0f64 }; println!( "{}: {:.2} {:.2} ({:.2}mm {:.2}mm)", if !ppinfo.is_null() && !(*ppinfo).name.is_null() { use std::ffi::CStr; let name = CStr::from_ptr((*ppinfo).name); name.to_string_lossy() } else { use std::borrow::Cow; Cow::Borrowed("(null)") }, wd, ht, 25.4f64 * wd / 72.0f64, 25.4f64 * ht / 72.0f64, ); ppinfo = if !ppinfo.offset(1).is_null() && !(*ppinfo.offset(1)).name.is_null() { ppinfo.offset(1) } else { 0 as *const paper } } }
/* This is dvipdfmx, an eXtended version of dvipdfm by Mark A. Wicks. Copyright (C) 2002-2016 by Jin-Hwan Cho and Shunsaku Hirata, the dvipdfmx project team. Copyright (C) 1998, 1999 by Mark A. Wicks <mwicks@kettering.edu> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. */ #![allow( mutable_transmutes, non_camel_case_types, non_snake_case, non_upper_case_globals, unused_mut )] use crate::streq_ptr; pub type __off_t = i64; pub type __off64_t = i64; pub type size_t = u64; #[derive(Copy, Clone)] #[repr(C)] pub struct paper { pub name: *const i8, pub pswidth: f64, pub psheight: f64, } #[no_mangle] pub static mut paperspecs: [paper; 22] = [ { let mut init = paper { name: b"letter\x00" as *const u8 as *const i8, pswidth: 612.00f64, psheight: 792.00f64, }; init }, { let mut init = paper { name: b"legal\x00" as *const u8 as *const i8, pswidth: 612.00f64, psheight: 1008.00f64, }; init }, { let mut init = paper { name: b"ledger\x00" as *const u8 as *const i8, pswidth: 1224.00f64, psheight: 792.00f64, }; init }, { let mut init = paper { name: b"tabloid\x00" as *const u8 as *const i8, pswidth: 792.00f64, psheight: 1224.00f64, }; init }, { let mut init = paper { name: b"a6\x00" as *const u8 as *const i8, pswidth: 297.638f64, psheight: 419.528f64, }; init }, { let mut init = paper { name: b"a5\x00" as *const u8 as *const i8, pswidth: 419.528f64, psheight: 595.276f64, }; init }, { let mut init = paper { name: b"a4\x00" as *const u8 as
.is_null() { if streq_ptr(ppformat, (*ppinfo).name) { break; } ppinfo = if !ppinfo.offset(1).is_null() && !(*ppinfo.offset(1)).name.is_null() { ppinfo.offset(1) } else { 0 as *const paper } } return if !ppinfo.is_null() && !(if !ppinfo.is_null() && !(*ppinfo).name.is_null() { (*ppinfo).name } else { 0 as *const i8 }) .is_null() { ppinfo } else { 0 as *const paper }; } /* HAVE_LIBPAPER */ /* HAVE_LIBPAPER */ #[no_mangle] pub unsafe extern "C" fn dumppaperinfo() { let mut ppinfo = &*paperspecs.as_ptr().offset(0) as *const paper; while !ppinfo.is_null() && !(if !ppinfo.is_null() && !(*ppinfo).name.is_null() { (*ppinfo).name } else { 0 as *const i8 }) .is_null() { let wd = if !ppinfo.is_null() && !(*ppinfo).name.is_null() { (*ppinfo).pswidth } else { 0.0f64 }; let ht = if !ppinfo.is_null() && !(*ppinfo).name.is_null() { (*ppinfo).psheight } else { 0.0f64 }; println!( "{}: {:.2} {:.2} ({:.2}mm {:.2}mm)", if !ppinfo.is_null() && !(*ppinfo).name.is_null() { use std::ffi::CStr; let name = CStr::from_ptr((*ppinfo).name); name.to_string_lossy() } else { use std::borrow::Cow; Cow::Borrowed("(null)") }, wd, ht, 25.4f64 * wd / 72.0f64, 25.4f64 * ht / 72.0f64, ); ppinfo = if !ppinfo.offset(1).is_null() && !(*ppinfo.offset(1)).name.is_null() { ppinfo.offset(1) } else { 0 as *const paper } } }
*const i8, pswidth: 595.276f64, psheight: 841.890f64, }; init }, { let mut init = paper { name: b"a3\x00" as *const u8 as *const i8, pswidth: 841.890f64, psheight: 1190.550f64, }; init }, { let mut init = paper { name: b"b6\x00" as *const u8 as *const i8, pswidth: 364.25f64, psheight: 515.91f64, }; init }, { let mut init = paper { name: b"b5\x00" as *const u8 as *const i8, pswidth: 515.91f64, psheight: 728.50f64, }; init }, { let mut init = paper { name: b"b4\x00" as *const u8 as *const i8, pswidth: 728.50f64, psheight: 1031.81f64, }; init }, { let mut init = paper { name: b"b3\x00" as *const u8 as *const i8, pswidth: 1031.81f64, psheight: 1457.00f64, }; init }, { let mut init = paper { name: b"b5var\x00" as *const u8 as *const i8, pswidth: 515.91f64, psheight: 651.97f64, }; init }, { let mut init = paper { name: b"jisb6\x00" as *const u8 as *const i8, pswidth: 364.25f64, psheight: 515.91f64, }; init }, { let mut init = paper { name: b"jisb5\x00" as *const u8 as *const i8, pswidth: 515.91f64, psheight: 728.50f64, }; init }, { let mut init = paper { name: b"jisb4\x00" as *const u8 as *const i8, pswidth: 728.50f64, psheight: 1031.81f64, }; init }, { let mut init = paper { name: b"jisb3\x00" as *const u8 as *const i8, pswidth: 1031.81f64, psheight: 1457.00f64, }; init }, { let mut init = paper { name: b"isob6\x00" as *const u8 as *const i8, pswidth: 354.331f64, psheight: 498.898f64, }; init }, { let mut init = paper { name: b"isob5\x00" as *const u8 as *const i8, pswidth: 498.898f64, psheight: 708.661f64, }; init }, { let mut init = paper { name: b"isob4\x00" as *const u8 as *const i8, pswidth: 708.661f64, psheight: 1000.630f64, }; init }, { let mut init = paper { name: b"isob3\x00" as *const u8 as *const i8, pswidth: 1000.630f64, psheight: 1417.320f64, }; init }, { let mut init = paper { name: 0 as *const i8, pswidth: 0i32 as f64, psheight: 0i32 as f64, }; init }, ]; #[no_mangle] pub unsafe extern "C" fn paperinfo(mut ppformat: *const i8) -> *const paper { if ppformat.is_null() { return 0 as *const paper; } let mut ppinfo = &*paperspecs.as_ptr().offset(0) as *const paper; while !ppinfo.is_null() && !(if !ppinfo.is_null() && !(*ppinfo).name.is_null() { (*ppinfo).name } else { 0 as *const i8 })
random
[ { "content": "pub fn hex_to_bytes(text: &str, dest: &mut [u8]) -> Result<()> {\n\n let n = dest.len();\n\n let text_len = text.len();\n\n\n\n if text_len != 2 * n {\n\n return Err(ErrorKind::BadLength(2 * n, text_len).into());\n\n }\n\n\n\n for i in 0..n {\n\n dest[i] = u8::from_str_radix(&text[i * 2..(i + 1) * 2], 16)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n// The specific implementation we're using: SHA256.\n\n\n\nconst N_BYTES: usize = 32;\n\npub const DIGEST_NAME: &str = \"SHA256SUM\";\n\npub const DIGEST_LEN: usize = 64;\n\n\n", "file_path": "src/digest.rs", "rank": 0, "score": 190693.3822029074 }, { "content": "type synctex_recorder_t = Option<unsafe extern \"C\" fn(_: i32) -> ()>;\n\n\n\nconst default_synctex_ctxt: Context = Context {\n\n file: None,\n\n root_name: ptr::null_mut(),\n\n count: 0i32,\n\n node: 0i32,\n\n recorder: None,\n\n tag: 0i32,\n\n line: 0i32,\n\n curh: 0i32,\n\n curv: 0i32,\n\n magnification: 0i32,\n\n unit: 0i32,\n\n total_length: 0,\n\n lastv: -1i32,\n\n form_depth: 0i32,\n\n synctex_tag_counter: 0_u32,\n\n flags: Flags::empty(),\n\n};\n", "file_path": "engine/src/xetex_synctex.rs", "rank": 1, "score": 187574.89352690981 }, { "content": "pub fn agl_name_is_unicode(glyphname: &[u8]) -> bool {\n\n if glyphname.is_empty() {\n\n return false;\n\n }\n\n let len = glyphname\n\n .iter()\n\n .position(|&x| x == b'.')\n\n .unwrap_or(glyphname.len());\n\n /*\n\n * uni02ac is invalid glyph name and mapped to th empty string.\n\n */\n\n if len >= 7 && (len - 3) % 4 == 0 && glyphname.starts_with(b\"uni\") {\n\n let c = glyphname[3];\n\n /*\n\n * Check if the 4th character is uppercase hexadecimal digit.\n\n * \"union\" should not be treated as Unicode glyph name.\n\n */\n\n if c.is_ascii_digit() || c >= b'A' && c <= b'F' {\n\n return true;\n\n } else {\n", "file_path": "dpx/src/dpx_agl.rs", "rank": 2, "score": 184437.18106554932 }, { "content": "#[inline(always)]\n\nfn as_printable_ascii(c: i32) -> Option<u8> {\n\n if c > 0x20 && c < 0x7F {\n\n Some(c as u8)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nimpl<'a, 'b: 'a> State<'a, 'b> {\n\n pub fn finished(self) {\n\n if let Some(oh) = self.cur_output {\n\n let (name, digest) = oh.into_name_digest();\n\n self.events.output_closed(name, digest);\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, 'b: 'a> XdvEvents for State<'a, 'b> {\n\n type Error = Error;\n\n\n", "file_path": "src/engines/spx2html.rs", "rank": 3, "score": 162092.50599737326 }, { "content": "fn scan_otl_tag(mut otl_tags: &[u8]) -> Result<(Vec<u8>, Vec<u8>, Vec<u8>), ()> {\n\n let mut script;\n\n let mut language = vec![b' '; 4];\n\n if otl_tags.is_empty() {\n\n return Err(());\n\n }\n\n /* First parse otl_tags variable */\n\n let mut p = otl_tags;\n\n\n\n if let Some(slen) = p.iter().position(|&x| x == b'.') {\n\n /* Format scrp.lang.feat */\n\n if slen < 5 {\n\n script = Vec::from(&p[..slen]);\n\n } else {\n\n warn!(\"Invalid OTL script tag found: {}\", p.display());\n\n return Err(());\n\n }\n\n p = &p[slen + 1..];\n\n if let Some(llen) = p.iter().position(|&x| x == b'.') {\n\n /* Now lang part */\n", "file_path": "dpx/src/dpx_tt_gsub.rs", "rank": 4, "score": 152930.38468736532 }, { "content": "type id = *mut Object;\n\n\n\nextern \"C\" {\n\n pub type __CFAllocator;\n\n pub type __CFURL;\n\n pub type __CTFontDescriptor;\n\n pub type __CFString;\n\n pub type __CFArray;\n\n pub type __CFDictionary;\n\n pub type __CFSet;\n\n pub type __CTFont;\n\n #[no_mangle]\n\n fn free(_: *mut libc::c_void);\n\n #[no_mangle]\n\n fn malloc(_: libc::c_ulong) -> *mut libc::c_void;\n\n #[no_mangle]\n\n fn strchr(_: *const libc::c_char, _: libc::c_int) -> *mut libc::c_char;\n\n #[no_mangle]\n\n fn strlen(_: *const libc::c_char) -> libc::c_ulong;\n\n #[no_mangle]\n", "file_path": "engine/src/xetex_font_manager_coretext.rs", "rank": 5, "score": 149475.96920560196 }, { "content": "type rust_output_handle_t = *mut libc::c_void;\n\npub type rust_input_handle_t = *mut libc::c_void;\n\n\n\n#[derive(PartialEq)]\n\npub struct OutputHandleWrapper(NonNull<libc::c_void>);\n\n\n\nimpl OutputHandleWrapper {\n\n pub fn new(ptr: rust_output_handle_t) -> Option<Self> {\n\n NonNull::new(ptr).map(|nnp| Self(nnp))\n\n }\n\n}\n\n\n\nimpl Write for OutputHandleWrapper {\n\n fn write(&mut self, buf: &[u8]) -> Result<usize> {\n\n unsafe {\n\n Ok(\n\n ttstub_output_write(self.0.as_ptr(), buf.as_ptr() as *const i8, buf.len() as u64)\n\n as usize,\n\n )\n\n }\n", "file_path": "bridge/src/lib.rs", "rank": 6, "score": 144489.09819678107 }, { "content": "#[derive(Clone, Debug, Eq, PartialEq)]\n\nstruct SharedByteBuffer(Rc<Vec<u8>>);\n\n\n\nimpl SharedByteBuffer {\n\n fn new(data: Vec<u8>) -> SharedByteBuffer {\n\n SharedByteBuffer(Rc::new(data))\n\n }\n\n}\n\n\n\nimpl AsRef<[u8]> for SharedByteBuffer {\n\n fn as_ref(&self) -> &[u8] {\n\n &*self.0\n\n }\n\n}\n\n\n\nimpl InputFeatures for Cursor<SharedByteBuffer> {\n\n fn get_size(&mut self) -> Result<usize> {\n\n Ok(self.get_ref().0.len())\n\n }\n\n\n\n fn try_seek(&mut self, pos: SeekFrom) -> Result<u64> {\n", "file_path": "src/io/stdstreams.rs", "rank": 7, "score": 144393.9015064857 }, { "content": "pub fn bytes_to_hex(bytes: &[u8]) -> String {\n\n bytes\n\n .iter()\n\n .map(|b| format!(\"{:02x}\", b))\n\n .collect::<Vec<_>>()\n\n .concat()\n\n}\n\n\n", "file_path": "src/digest.rs", "rank": 8, "score": 142833.1413145302 }, { "content": "fn xtol(mut buf: &[u8]) -> i32 {\n\n let mut v: i32 = 0i32;\n\n for &b in buf {\n\n v <<= 4;\n\n if b.is_ascii_digit() {\n\n v += (b - b'0') as i32;\n\n } else if b >= b'A' && b <= b'F' {\n\n v += (b - b'A' + 10) as i32;\n\n } else {\n\n return -1;\n\n }\n\n }\n\n v\n\n}\n\n\n\nunsafe fn put_unicode_glyph(name: &[u8], mut dstpp: *mut *mut u8, mut limptr: *mut u8) -> i32 {\n\n let mut len = 0;\n\n let mut p = name;\n\n if p[1] != b'n' {\n\n p = &p[1..];\n", "file_path": "dpx/src/dpx_agl.rs", "rank": 9, "score": 140514.51107537074 }, { "content": "fn skip_modifier<'a>(buf: &'a [u8]) -> (&'a [u8], usize) {\n\n let mut slen = 0;\n\n for s in MODIFIERS.iter() {\n\n if buf.starts_with(s) {\n\n slen = s.len();\n\n break;\n\n }\n\n }\n\n (&buf[slen..], slen)\n\n}\n", "file_path": "dpx/src/dpx_agl.rs", "rank": 10, "score": 138254.28482163395 }, { "content": "pub fn get_unique_time_if_given() -> Option<SystemTime> {\n\n use std::time::Duration;\n\n\n\n let env = std::env::var(\"SOURCE_DATE_EPOCH\");\n\n\n\n env.ok()\n\n .map(|x| {\n\n x.trim()\n\n .parse::<u64>()\n\n .ok()\n\n .map(|x| SystemTime::UNIX_EPOCH.checked_add(Duration::new(x, 0)))\n\n })\n\n .unwrap_or(None)\n\n .unwrap_or(None)\n\n}\n\n\n\nstatic mut unique_tag_state: i32 = 1i32;\n\nstatic mut unique_tags_deterministic: i32 = 0i32;\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn pdf_font_reset_unique_tag_state() {\n", "file_path": "dpx/src/dpx_pdffont.rs", "rank": 11, "score": 137305.19741023888 }, { "content": "/// A hack to allow casting of Bundles to IoProviders.\n\n///\n\n/// The code that sets up the I/O stack is handed a reference to a Bundle\n\n/// trait object. For the actual I/O, it needs to convert this to an\n\n/// IoProvider trait object. [According to\n\n/// StackExchange](https://stackoverflow.com/a/28664881/3760486), the\n\n/// following pattern is the least-bad way to achieve the necessary upcasting.\n\npub trait AsIoProviderMut {\n\n /// Represent this value as an IoProvider trait object.\n\n fn as_ioprovider_mut(&mut self) -> &mut dyn IoProvider;\n\n}\n\n\n\nimpl<T: IoProvider> AsIoProviderMut for T {\n\n fn as_ioprovider_mut(&mut self) -> &mut dyn IoProvider {\n\n self\n\n }\n\n}\n\n\n", "file_path": "src/io/mod.rs", "rank": 12, "score": 134236.3886603842 }, { "content": "fn convert_tag(tag: &mut [u8; 4], u_tag: u32) {\n\n *tag = u_tag.to_be_bytes();\n\n}\n\n\n\n/*\n\n * Computes the max power of 2 <= n\n\n */\n\nunsafe fn max2floor(mut n: u32) -> u32 {\n\n let mut val: i32 = 1i32;\n\n while n > 1_u32 {\n\n n = n.wrapping_div(2_u32);\n\n val *= 2i32\n\n }\n\n val as u32\n\n}\n\n/*\n\n * Computes the log2 of the max power of 2 <= n\n\n */\n\nunsafe fn log2floor(mut n: u32) -> u32 {\n\n let mut val: u32 = 0_u32;\n", "file_path": "dpx/src/dpx_sfnt.rs", "rank": 13, "score": 129622.98698968318 }, { "content": "/// A trait for types that can read or write files needed by the TeX engine.\n\npub trait IoProvider: AsIoProviderMut {\n\n fn output_open_name(&mut self, _name: &OsStr) -> OpenResult<OutputHandle> {\n\n OpenResult::NotAvailable\n\n }\n\n\n\n fn output_open_stdout(&mut self) -> OpenResult<OutputHandle> {\n\n OpenResult::NotAvailable\n\n }\n\n\n\n fn input_open_name(\n\n &mut self,\n\n _name: &OsStr,\n\n _status: &mut dyn StatusBackend,\n\n ) -> OpenResult<InputHandle> {\n\n OpenResult::NotAvailable\n\n }\n\n\n\n /// Open the \"primary\" input file, which in the context of TeX is the main\n\n /// input that it's given. When the build is being done using the\n\n /// filesystem and the input is a file on the filesystem, this function\n", "file_path": "src/io/mod.rs", "rank": 14, "score": 125836.00530370105 }, { "content": "/// Compile LaTeX text to a PDF.\n\n///\n\n/// This function is an all-in-one interface to the main Tectonic workflow. Given\n\n/// a string representing a LaTeX input file, it will compile it to a PDF and return\n\n/// a byte vector corresponding to the resulting file:\n\n///\n\n/// ```\n\n/// let latex = r#\"\n\n/// \\documentclass{article}\n\n/// \\begin{document}\n\n/// Hello, world!\n\n/// \\end{document}\n\n/// \"#;\n\n///\n\n/// # tectonic::test_util::activate_test_mode_augmented(env!(\"CARGO_MANIFEST_DIR\"));\n\n/// let pdf_data: Vec<u8> = tectonic::latex_to_pdf(latex).expect(\"processing failed\");\n\n/// println!(\"Output PDF size is {} bytes\", pdf_data.len());\n\n/// ```\n\n///\n\n/// The compilation uses the default bundle, the location of which is embedded\n\n/// in the crate or potentially specified in the user’s configuration file.\n\n/// The current working directory will be searched for any `\\\\input` files.\n\n/// Messages aimed at the user are suppressed, but (in the default\n\n/// configuration) network I/O may occur to pull down needed resource files.\n\n/// No outputs are written to disk; all supporting files besides the PDF\n\n/// document are discarded. The XeTeX engine is run multiple times if needed\n\n/// to get the output file to converge.\n\n///\n\n/// For more sophisticated uses, use the [`driver`] module, which provides a\n\n/// high-level interface for driving the typesetting engines with much more\n\n/// control over their behavior.\n\n///\n\n/// Note that the current engine implementations use lots of global state, so\n\n/// they are not thread-safe. This crate uses a global mutex to serialize\n\n/// invocations of the engines. This means that if you call this function from\n\n/// multiple threads simultaneously, the bulk of the work will be done in\n\n/// serial. The aim is to lift this limitation one day, but it will require\n\n/// extensive work on the underlying C/C++ code.\n\npub fn latex_to_pdf<T: AsRef<str>>(latex: T) -> Result<Vec<u8>> {\n\n use std::ffi::OsStr;\n\n\n\n let mut status = status::NoopStatusBackend::new();\n\n\n\n let auto_create_config_file = false;\n\n let config = ctry!(config::PersistentConfig::open(auto_create_config_file);\n\n \"failed to open the default configuration file\");\n\n\n\n let only_cached = false;\n\n let bundle = ctry!(config.default_bundle(only_cached, &mut status);\n\n \"failed to load the default resource bundle\");\n\n\n\n let format_cache_path = ctry!(config.format_cache_path();\n\n \"failed to set up the format cache\");\n\n\n\n let mut files = {\n\n // Looking forward to non-lexical lifetimes!\n\n let mut sb = driver::ProcessingSessionBuilder::default();\n\n sb.bundle(bundle)\n", "file_path": "src/lib.rs", "rank": 15, "score": 118926.19429242914 }, { "content": "\tUInt32\tversion;\t\t\t\t/* version = kCurrentTableVersion */\n", "file_path": "engine/tectonic/teckit-Format.h", "rank": 16, "score": 90631.83220267965 }, { "content": "\t\tUInt8\ttype;\t\t\t\t/* see kRepElem_... below */\n", "file_path": "engine/tectonic/teckit-Format.h", "rank": 17, "score": 90336.33542817865 }, { "content": "fn skip_capital<'a>(p: &'a [u8]) -> (&'a [u8], usize) {\n\n if p.starts_with(b\"AE\") || p.starts_with(b\"OE\") {\n\n (&p[2..], 2)\n\n } else if p.starts_with(b\"Eth\") {\n\n (&p[3..], 3)\n\n } else if p.starts_with(b\"Thorn\") {\n\n (&p[5..], 5)\n\n } else if p.len() >= 1 {\n\n if p[0].is_ascii_uppercase() {\n\n (&p[1..], 1)\n\n } else {\n\n (p, 0)\n\n }\n\n } else {\n\n (p, 0)\n\n }\n\n}\n", "file_path": "dpx/src/dpx_agl.rs", "rank": 18, "score": 89139.80987969796 }, { "content": "#[derive(Clone, Debug, Eq, PartialEq)]\n\nstruct FileSummary {\n\n write_digest: Option<DigestData>,\n\n}\n\n\n\nimpl FileSummary {\n\n fn new() -> FileSummary {\n\n FileSummary { write_digest: None }\n\n }\n\n}\n\n\n", "file_path": "tests/formats.rs", "rank": 19, "score": 89050.6699213537 }, { "content": "#[derive(Clone, Debug, Eq, PartialEq)]\n\nstruct State {\n\n pub h: i32,\n\n pub v: i32,\n\n pub w: i32,\n\n pub x: i32,\n\n pub y: i32,\n\n pub z: i32,\n\n}\n\n\n\nimpl State {\n\n pub fn new() -> Self {\n\n State {\n\n h: 0,\n\n v: 0,\n\n w: 0,\n\n x: 0,\n\n y: 0,\n\n z: 0,\n\n }\n\n }\n\n}\n\n\n\n/// A simple cursor on a buffer.\n", "file_path": "xdv/src/lib.rs", "rank": 20, "score": 89050.6699213537 }, { "content": "struct TestCase {\n\n stem: String,\n\n}\n\n\n\nimpl TestCase {\n\n fn new(stem: &str) -> Self {\n\n TestCase {\n\n stem: stem.to_owned(),\n\n }\n\n }\n\n\n\n fn go(&mut self) {\n\n util::set_test_root();\n\n\n\n let mut p = test_path(&[\"bibtex\"]);\n\n\n\n p.push(&self.stem);\n\n\n\n p.set_extension(\"aux\");\n\n let auxname = p.file_name().unwrap().to_str().unwrap().to_owned();\n", "file_path": "tests/bibtex.rs", "rank": 21, "score": 89043.98910440826 }, { "content": "struct Stats {}\n\n\n\nimpl Stats {\n\n pub fn new() -> Self {\n\n Stats {}\n\n }\n\n}\n\n\n\nimpl tectonic_xdv::XdvEvents for Stats {\n\n type Error = Error;\n\n\n\n fn handle_header(&mut self, filetype: FileType, comment: &[u8]) -> Result<(), Self::Error> {\n\n println!(\"file type: {}\", filetype);\n\n\n\n match str::from_utf8(comment) {\n\n Ok(s) => {\n\n println!(\"comment: {}\", s);\n\n }\n\n Err(e) => {\n\n println!(\"cannot parse comment: {}\", e);\n", "file_path": "xdv/examples/xdvdump.rs", "rank": 22, "score": 89043.98910440826 }, { "content": "#[derive(Clone, Debug)]\n\nstruct TarIndex {\n\n tar: Vec<u8>,\n\n index: Vec<u8>,\n\n map: HashMap<(u64, u64), String>,\n\n}\n\n\n\nimpl TarIndex {\n\n fn from_dir<P: AsRef<Path>>(path: P) -> io::Result<TarIndex> {\n\n let path = path.as_ref();\n\n let mut builder = TarIndexBuilder::new();\n\n for de in path.read_dir()? {\n\n let path = de?.path();\n\n let content = fs::read(&path)?;\n\n builder.push(path.file_name().unwrap().to_str().unwrap(), &content);\n\n }\n\n\n\n builder.push(\n\n tectonic::digest::DIGEST_NAME,\n\n b\"0000000000000000000000000000000000000000000000000000000000000000\",\n\n );\n\n\n\n Ok(builder.finish())\n\n }\n\n}\n\n\n", "file_path": "tests/cached_itarbundle.rs", "rank": 23, "score": 87468.12953099453 }, { "content": "#[derive(Clone, Debug)]\n\nstruct VcPkgState {\n\n include_paths: Vec<PathBuf>,\n\n}\n\n\n\n/// State for discovering and managing our dependencies, which may vary\n\n/// depending on the framework that we're using to discover them.\n\n///\n\n/// The basic gameplan is that we probe our dependencies to check that they're\n\n/// available and pull out the C/C++ include directories; then we emit info\n\n/// for building our C/C++ libraries; then we emit info for our dependencies.\n\n/// Building stuff pretty much always requires some level of hackery, though,\n\n/// so we don't try to be purist about the details.\n", "file_path": "engine/build.rs", "rank": 24, "score": 87468.12953099453 }, { "content": "struct TestCase {\n\n stem: String,\n\n expected_result: Result<TexResult>,\n\n check_synctex: bool,\n\n check_pdf: bool,\n\n extra_io: Vec<Box<dyn IoProvider>>,\n\n}\n\n\n\nimpl TestCase {\n\n fn new(stem: &str) -> Self {\n\n TestCase {\n\n stem: stem.to_owned(),\n\n expected_result: Ok(TexResult::Spotless),\n\n check_synctex: false,\n\n check_pdf: false,\n\n extra_io: Vec::new(),\n\n }\n\n }\n\n\n\n fn check_synctex(&mut self, check_synctex: bool) -> &mut Self {\n", "file_path": "tests/tex-outputs.rs", "rank": 25, "score": 87461.3050429533 }, { "content": "struct Colordef {\n\n key: &'static str,\n\n color: PdfColor,\n\n}\n\n\n\nimpl Colordef {\n\n const fn new(key: &'static str, color: PdfColor) -> Self {\n\n Colordef { key, color }\n\n }\n\n}\n\n\n\nconst COLORDEFS: [Colordef; 68] = [\n\n Colordef::new(\"GreenYellow\", PdfColor::Cmyk(0.15, 0.0, 0.69, 0.0)),\n\n Colordef::new(\"Yellow\", PdfColor::Cmyk(0.0, 0.0, 1.0, 0.0)),\n\n Colordef::new(\"Goldenrod\", PdfColor::Cmyk(0.0, 0.1, 0.84, 0.0)),\n\n Colordef::new(\"Dandelion\", PdfColor::Cmyk(0.0, 0.29, 0.84, 0.0)),\n\n Colordef::new(\"Apricot\", PdfColor::Cmyk(0.0, 0.32, 0.52, 0.0)),\n\n Colordef::new(\"Peach\", PdfColor::Cmyk(0.0, 0.5, 0.7, 0.0)),\n\n Colordef::new(\"Melon\", PdfColor::Cmyk(0.0, 0.46, 0.5, 0.0)),\n\n Colordef::new(\"YellowOrange\", PdfColor::Cmyk(0.0, 0.42, 1.0, 0.0)),\n", "file_path": "dpx/src/specials/util.rs", "rank": 26, "score": 87461.3050429533 }, { "content": "#[derive(Debug)]\n\nstruct PkgConfigState {\n\n libs: pkg_config::Library,\n\n}\n\n\n\n// Need a way to check that the vcpkg harfbuzz port has graphite2 and icu options enabled.\n\n#[cfg(not(target_os = \"macos\"))]\n\nconst VCPKG_LIBS: &[&'static str] = &[\"fontconfig\", \"harfbuzz\", \"freetype\", \"graphite2\"];\n\n\n\n#[cfg(target_os = \"macos\")]\n\nconst VCPKG_LIBS: &[&'static str] = &[\"harfbuzz\", \"freetype\", \"graphite2\"];\n\n\n\n/// Build-script state when using vcpkg as the backend.\n", "file_path": "engine/build.rs", "rank": 27, "score": 87461.3050429533 }, { "content": "#[derive(Copy, Clone)]\n\n#[repr(C, packed(2))]\n\nstruct FixedPoint {\n\n x: Fixed,\n\n y: Fixed,\n\n}\n\n\n\npub type str_number = int32_t;\n\n/* tectonic/core-strutils.h: miscellaneous C string utilities\n\n Copyright 2016-2018 the Tectonic Project\n\n Licensed under the MIT License.\n\n*/\n\n/* Note that we explicitly do *not* change this on Windows. For maximum\n\n * portability, we should probably accept *either* forward or backward slashes\n\n * as directory separators. */\n\nuse crate::{streq_ptr, strstartswith};\n\n/* ***************************************************************************\\\n\n Part of the XeTeX typesetting system\n\n Copyright (c) 1994-2008 by SIL International\n\n Copyright (c) 2009 by Jonathan Kew\n\n Copyright (c) 2012, 2013 by Jiang Jiang\n\n Copyright (c) 2012-2015 by Khaled Hosny\n", "file_path": "engine/src/xetex_aatfont.rs", "rank": 28, "score": 85981.87260048195 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct FileInfo {\n\n offset: u64,\n\n length: u64,\n\n}\n\n\n", "file_path": "src/io/cached_itarbundle.rs", "rank": 29, "score": 85975.570205366 }, { "content": "#[derive(Clone, Debug)]\n\nstruct CacheContent {\n\n digest_text: String,\n\n redirect_url: String,\n\n index: HashMap<String, FileInfo>,\n\n}\n\n\n", "file_path": "src/io/cached_itarbundle.rs", "rank": 30, "score": 85968.85934645252 }, { "content": "#[repr(C)]\n\nstruct TectonicBridgeApi {\n\n context: *const libc::c_void,\n\n issue_warning: *const libc::c_void,\n\n issue_error: *const libc::c_void,\n\n get_file_md5: *const libc::c_void,\n\n get_data_md5: *const libc::c_void,\n\n output_open: *const libc::c_void,\n\n output_open_stdout: *const libc::c_void,\n\n output_putc: *const libc::c_void,\n\n output_write: *const libc::c_void,\n\n output_flush: *const libc::c_void,\n\n output_close: *const libc::c_void,\n\n input_open: *const libc::c_void,\n\n input_open_primary: *const libc::c_void,\n\n input_get_size: *const libc::c_void,\n\n input_seek: *const libc::c_void,\n\n input_read: *const libc::c_void,\n\n input_getc: *const libc::c_void,\n\n input_ungetc: *const libc::c_void,\n\n input_close: *const libc::c_void,\n", "file_path": "src/engines/mod.rs", "rank": 31, "score": 85968.71472211597 }, { "content": "struct TarIndexService {\n\n tar_index: Mutex<TarIndex>,\n\n requests: Mutex<Vec<TectonicRequest>>,\n\n local_addr: Mutex<Option<SocketAddr>>,\n\n}\n\n\n", "file_path": "tests/cached_itarbundle.rs", "rank": 32, "score": 85962.0348584113 }, { "content": "struct MemoryIoItem {\n\n // TODO: smarter buffering structure than Vec<u8>? E.g., linked list of 4k\n\n // chunks or something. In the current scheme reallocations will get\n\n // expensive.\n\n files: Rc<RefCell<HashMap<OsString, Vec<u8>>>>,\n\n name: OsString,\n\n state: Cursor<Vec<u8>>,\n\n}\n\n\n\nimpl MemoryIoItem {\n\n pub fn new(\n\n files: &Rc<RefCell<HashMap<OsString, Vec<u8>>>>,\n\n name: &OsStr,\n\n truncate: bool,\n\n ) -> MemoryIoItem {\n\n let cur = match files.borrow_mut().remove(name) {\n\n Some(data) => {\n\n if truncate {\n\n Vec::new()\n\n } else {\n", "file_path": "src/io/memory.rs", "rank": 33, "score": 85962.0348584113 }, { "content": "/// Build a fake tarindex by concatenating files.\n\nstruct TarIndexBuilder {\n\n tar: Vec<u8>,\n\n index: GzEncoder<Vec<u8>>,\n\n /// Map from (offset, length) to file name.\n\n map: HashMap<(u64, u64), String>,\n\n}\n\n\n\nimpl TarIndexBuilder {\n\n fn new() -> TarIndexBuilder {\n\n let tar = Vec::new();\n\n let index = GzBuilder::new()\n\n .filename(\"bundle.tar.index.gz\")\n\n .write(Vec::new(), flate2::Compression::default());\n\n let map = HashMap::new();\n\n\n\n TarIndexBuilder { tar, index, map }\n\n }\n\n\n\n /// Add a file.\n\n fn push(&mut self, name: &str, content: &[u8]) -> &mut Self {\n", "file_path": "tests/cached_itarbundle.rs", "rank": 34, "score": 85962.0348584113 }, { "content": "pub trait DefinitelySame {\n\n fn definitely_same(&self, other: &Self) -> bool;\n\n}\n\n\n\n// Rust currently thinks that this impl conflicts with the one that we\n\n// provide for Result ... I am pretty sure that's not the case since the\n\n// Result PartialEq impl requires that T and E be PartialEq too, whereas\n\n// our definition works for subtypes that are DefinitelySame but\n\n// not PartialEq too.\n\n//\n\n//impl<T: PartialEq> DefinitelySame for T {\n\n// fn definitely_same(&self, other: &T) -> bool {\n\n// self == other\n\n// }\n\n//}\n\n\n\nimpl DefinitelySame for ErrorKind {\n\n fn definitely_same(&self, other: &Self) -> bool {\n\n if let ErrorKind::Msg(ref s) = *self {\n\n return if let ErrorKind::Msg(ref o) = *other {\n", "file_path": "src/errors.rs", "rank": 35, "score": 84754.48322091051 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct LocalCacheItem {\n\n _length: u64,\n\n digest: DigestData,\n\n}\n\n\n", "file_path": "src/io/cached_itarbundle.rs", "rank": 36, "score": 84553.28883610276 }, { "content": "#[derive(Clone, Debug, Eq, PartialEq)]\n\nstruct Error(String);\n\n\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(e: io::Error) -> Self {\n\n Error(format!(\"{}\", e)) // note: weirdly, can't use `Self` on this line\n\n }\n\n}\n\n\n\nimpl From<XdvError> for Error {\n\n fn from(e: XdvError) -> Self {\n\n Error(format!(\"{}\", e))\n\n }\n\n}\n\n\n", "file_path": "xdv/examples/xdvdump.rs", "rank": 37, "score": 83327.82565381327 }, { "content": "type scaled_t = int32_t;\n", "file_path": "engine/src/xetex_aatfont.rs", "rank": 38, "score": 81909.22529051497 }, { "content": "/// Types implementing this trait accept events from the XDV parser.\n\npub trait XdvEvents {\n\n /// An error type returned by the handler functions defined in this trait. It\n\n /// must implement `From<XdvError>`.\n\n type Error: Debug + From<XdvError>;\n\n\n\n /// Handle the XDV header\n\n #[allow(unused)] // <= prevents underscore-prefixed names from showing up in docs\n\n fn handle_header(&mut self, filetype: FileType, comment: &[u8]) -> Result<(), Self::Error> {\n\n Ok(())\n\n }\n\n\n\n /// Begin a new page.\n\n #[allow(unused)]\n\n fn handle_begin_page(\n\n &mut self,\n\n counters: &[i32],\n\n previous_bop: i32,\n\n ) -> Result<(), Self::Error> {\n\n Ok(())\n\n }\n", "file_path": "xdv/src/lib.rs", "rank": 39, "score": 81679.4689374995 }, { "content": "pub trait StatusBackend {\n\n /// Report a message to the status backend.\n\n fn report(&mut self, kind: MessageKind, args: Arguments, err: Option<&Error>);\n\n\n\n /// Issue a note-level status, idealy highlighting a particular phrase.\n\n ///\n\n /// This is a bit of a hack. For [`driver::ProcessingSession::run`], I\n\n /// like the UX when we issue notes in this style. It's a bit more\n\n /// high-level than intended for this trait, but we can provide a nice\n\n /// sensible default implementation, so whatever.\n\n fn note_highlighted(&mut self, before: &str, highlighted: &str, after: &str) {\n\n self.report(\n\n MessageKind::Note,\n\n format_args!(\"{}{}{}\", before, highlighted, after),\n\n None,\n\n )\n\n }\n\n}\n\n\n\n/// Report a formatted informational message to the user.\n", "file_path": "src/status/mod.rs", "rank": 40, "score": 81672.89724579843 }, { "content": "pub trait Pack {\n\n fn pack(&mut self, dest: &mut [u8]) -> usize;\n\n}\n\n\n\n/// Rewrittened cff_index\n\n#[derive(Clone)]\n\npub struct CffIndex {\n\n pub count: u16, // ??\n\n pub offsize: c_offsize,\n\n pub offset: Vec<l_offset>,\n\n pub data: Vec<u8>,\n\n}\n\nimpl CffIndex {\n\n // cff_index_new\n\n pub fn new(count: u16) -> Box<Self> {\n\n let offset = if count > 0 {\n\n let mut offset = vec![0; count as usize + 1];\n\n offset[0] = 1;\n\n offset\n\n } else {\n", "file_path": "dpx/src/dpx_cff.rs", "rank": 41, "score": 81672.89724579843 }, { "content": "pub trait FromBEByteSlice {\n\n fn from_be_byte_slice(b: &[u8]) -> Self;\n\n}\n\nimpl FromBEByteSlice for u32 {\n\n fn from_be_byte_slice(b: &[u8]) -> Self {\n\n let mut dst: [u8; 4] = unsafe { MaybeUninit::uninit().assume_init() };\n\n dst.copy_from_slice(b);\n\n u32::from_be_bytes(dst)\n\n }\n\n}\n\nimpl FromBEByteSlice for u16 {\n\n fn from_be_byte_slice(b: &[u8]) -> Self {\n\n let mut dst: [u8; 2] = unsafe { MaybeUninit::uninit().assume_init() };\n\n dst.copy_from_slice(b);\n\n u16::from_be_bytes(dst)\n\n }\n\n}\n\n\n\npub mod dpx_agl;\n\npub mod dpx_bmpimage;\n", "file_path": "dpx/src/lib.rs", "rank": 42, "score": 81672.89724579843 }, { "content": "type Fract = SInt32;\n", "file_path": "engine/src/xetex_aatfont.rs", "rank": 43, "score": 80486.84197972206 }, { "content": "type Fixed = SInt32;\n", "file_path": "engine/src/xetex_aatfont.rs", "rank": 44, "score": 80486.84197972206 }, { "content": "/// Set the magic environment variable that enables the testing infrastructure\n\n/// embedded in the main Tectonic crate. This function is separated out from\n\n/// the main crate because it embeds `CARGO_MANIFEST_DIR`, which is not\n\n/// something we want to leak into the binary artifacts we produce.\n\npub fn set_test_root() {\n\n ::tectonic::test_util::set_test_root_augmented(env!(\"CARGO_MANIFEST_DIR\"));\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 45, "score": 80250.78582870727 }, { "content": "pub trait FromLEByteSlice {\n\n fn from_le_byte_slice(b: &[u8]) -> Self;\n\n}\n\nimpl FromLEByteSlice for u32 {\n\n fn from_le_byte_slice(b: &[u8]) -> Self {\n\n let mut dst: [u8; 4] = unsafe { MaybeUninit::uninit().assume_init() };\n\n dst.copy_from_slice(b);\n\n u32::from_le_bytes(dst)\n\n }\n\n}\n\nimpl FromLEByteSlice for u16 {\n\n fn from_le_byte_slice(b: &[u8]) -> Self {\n\n let mut dst: [u8; 2] = unsafe { MaybeUninit::uninit().assume_init() };\n\n dst.copy_from_slice(b);\n\n u16::from_le_bytes(dst)\n\n }\n\n}\n", "file_path": "dpx/src/lib.rs", "rank": 46, "score": 80250.78582870727 }, { "content": "pub trait IoEventBackend {\n\n /// This function is called when a file is opened for output.\n\n fn output_opened(&mut self, _name: &OsStr) {}\n\n\n\n /// This function is called when the wrapped \"standard output\"\n\n /// (\"console\", \"terminal\") stream is opened.\n\n fn stdout_opened(&mut self) {}\n\n\n\n /// This function is called when an output file is closed. The \"digest\"\n\n /// argument specifies the cryptographic digest of the data that were\n\n /// written. Note that this function takes ownership of the name and\n\n /// digest.\n\n fn output_closed(&mut self, _name: OsString, _digest: DigestData) {}\n\n\n\n /// This function is called when a file is opened for input.\n\n fn input_opened(&mut self, _name: &OsStr, _origin: InputOrigin) {}\n\n\n\n /// This function is called when the \"primary input\" stream is opened.\n\n fn primary_input_opened(&mut self, _origin: InputOrigin) {}\n\n\n", "file_path": "src/engines/mod.rs", "rank": 47, "score": 80250.78582870727 }, { "content": "struct State<'a, 'b: 'a> {\n\n outname: String,\n\n io: &'a mut IoStack<'b>,\n\n events: &'a mut dyn IoEventBackend,\n\n status: &'a mut dyn StatusBackend,\n\n cur_output: Option<OutputHandle>,\n\n warned_lost_chars: bool,\n\n buf: Vec<u8>,\n\n}\n\n\n\nimpl<'a, 'b: 'a> State<'a, 'b> {\n\n pub fn new(\n\n outname: String,\n\n io: &'a mut IoStack<'b>,\n\n events: &'a mut dyn IoEventBackend,\n\n status: &'a mut dyn StatusBackend,\n\n ) -> Self {\n\n Self {\n\n outname,\n\n io,\n\n events,\n\n status,\n\n cur_output: None,\n\n warned_lost_chars: false,\n\n buf: Vec::new(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/engines/spx2html.rs", "rank": 48, "score": 79270.91186823834 }, { "content": "/// Activate this crate's \"test mode\", if-and-only-if the magic testing\n\n/// environment variable has been set. This allows testing of the Tectonic\n\n/// executable in a transparent way — notably, we avoid embedding the build\n\n/// prefix in the resulting binary, and we don't need to pass it any magical\n\n/// command line arguments.\n\npub fn maybe_activate_test_mode() {\n\n if env::var_os(TEST_ROOT_ENV_VAR).is_none() {\n\n return;\n\n }\n\n\n\n crate::config::activate_config_test_mode(true);\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 49, "score": 78905.63864394772 }, { "content": "pub fn create() -> DigestComputer {\n\n Default::default()\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub struct DigestData([u8; N_BYTES]);\n\n\n\nimpl DigestData {\n\n pub fn zeros() -> DigestData {\n\n DigestData([0u8; N_BYTES])\n\n }\n\n\n\n pub fn of_nothing() -> DigestData {\n\n let dc = create();\n\n Self::from(dc)\n\n }\n\n\n\n /// Given a base path, create a child path from this digest's value. The\n\n /// child path has a subdirectory from the hex value of the first byte of\n\n /// the digest, then a name consisting of the rest of the hex data. **The\n", "file_path": "src/digest.rs", "rank": 50, "score": 78537.45326715318 }, { "content": "pub fn _dpx_ensure_output_handle() {\n\n if let Some(handle) = unsafe { ttstub_output_open_stdout() } {\n\n unsafe {\n\n _dpx_message_handle = Some(handle);\n\n }\n\n } else {\n\n panic!(\"xdvipdfmx cannot get output logging handle?!\");\n\n }\n\n}\n\nunsafe extern \"C\" fn _dpx_print_to_stdout(\n\n mut fmt: *const i8,\n\n mut argp: ::std::ffi::VaList,\n\n mut warn: bool,\n\n) {\n\n let mut n: i32 = 0;\n\n n = vsnprintf(\n\n _dpx_message_buf.as_mut_ptr() as *mut i8,\n\n ::std::mem::size_of::<[i8; 1024]>() as u64,\n\n fmt,\n\n argp.as_va_list(),\n", "file_path": "dpx/src/dpx_error.rs", "rank": 51, "score": 77614.94246745805 }, { "content": "type int32_t = libc::c_int;\n", "file_path": "engine/src/xetex_aatfont.rs", "rank": 52, "score": 77324.47034007822 }, { "content": "type uint16_t = libc::c_ushort;\n\npub type Boolean = libc::c_uchar;\n", "file_path": "engine/src/xetex_aatfont.rs", "rank": 53, "score": 77324.47034007822 }, { "content": "/// A special IoProvider that can make TeX format files.\n\n///\n\n/// A “bundle” is expected to contain a large number of TeX support files —\n\n/// for instance, a compilation of a TeXLive distribution. In terms of the\n\n/// software architecture, though, what is special about a bundle is that one\n\n/// can generate one or more TeX format files from its contents without\n\n/// reference to any other I/O resources.\n\npub trait Bundle: IoProvider {\n\n /// Get a cryptographic digest summarizing this bundle’s contents.\n\n ///\n\n /// The digest summarizes the exact contents of every file in the bundle.\n\n /// It is computed from the sorted names and SHA256 digests of the\n\n /// component files [as implemented in the script\n\n /// builder/make-zipfile.py](https://github.com/tectonic-typesetting/tectonic-staging/blob/master/builder/make-zipfile.py#L138)\n\n /// in the `tectonic-staging` module.\n\n ///\n\n /// The default implementation gets the digest from a file name\n\n /// `SHA256SUM`, which is expected to contain the digest in hex-encoded\n\n /// format.\n\n fn get_digest(&mut self, status: &mut dyn StatusBackend) -> Result<DigestData> {\n\n let digest_text = match self.input_open_name(OsStr::new(digest::DIGEST_NAME), status) {\n\n OpenResult::Ok(h) => {\n\n let mut text = String::new();\n\n h.take(64).read_to_string(&mut text)?;\n\n text\n\n }\n\n\n", "file_path": "src/io/mod.rs", "rank": 54, "score": 77136.61860455177 }, { "content": "pub trait InputFeatures: Read {\n\n fn get_size(&mut self) -> Result<usize>;\n\n fn try_seek(&mut self, pos: SeekFrom) -> Result<u64>;\n\n}\n\n\n\n/// What kind of source an input file ultimately came from. We keep track of\n\n/// this in order to be able to emit Makefile-style dependencies for input\n\n/// files. Right now, we only provide enough options to achieve this goal; we\n\n/// could add more.\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub enum InputOrigin {\n\n /// This file lives on the filesystem and might change under us. (That is\n\n /// it is not a cached bundle file.)\n\n Filesystem,\n\n\n\n /// This file was never used as an input.\n\n NotInput,\n\n\n\n /// This file is none of the above.\n\n Other,\n", "file_path": "src/io/mod.rs", "rank": 55, "score": 77115.34185006203 }, { "content": "type UInt32 = libc::c_uint;\n", "file_path": "engine/src/xetex_aatfont.rs", "rank": 56, "score": 75973.29923336842 }, { "content": "type SInt32 = libc::c_int;\n\n\n", "file_path": "engine/src/xetex_aatfont.rs", "rank": 57, "score": 75973.29923336842 }, { "content": "// Duplicated from Cargo's own testing code:\n\n// https://github.com/rust-lang/cargo/blob/19fdb308/tests/cargotest/support/mod.rs#L305-L318\n\npub fn cargo_dir() -> PathBuf {\n\n env::var_os(\"CARGO_BIN_PATH\")\n\n .map(PathBuf::from)\n\n .or_else(|| {\n\n env::current_exe().ok().map(|mut path| {\n\n path.pop();\n\n if path.ends_with(\"deps\") {\n\n path.pop();\n\n }\n\n path\n\n })\n\n })\n\n .unwrap_or_else(|| panic!(\"CARGO_BIN_PATH wasn't set. Cannot continue running test\"))\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 58, "score": 75764.42902458424 }, { "content": "#[derive(Debug)]\n\nstruct Cursor<'a, T: XdvEvents> {\n\n /// The *remaining* unprocessed bytes.\n\n buf: &'a [u8],\n\n\n\n /// How many bytes from the *original* chunk buffer have been fully processed.\n\n pub checkpoint: usize,\n\n\n\n /// How many bytes from the *original* chunk buffer we have looked at.\n\n offset: usize,\n\n\n\n /// The offset into the *total stream* that the original chunk buffer was\n\n /// at.\n\n global_offset: u64,\n\n\n\n /// Helps us avoid tedious type annotations.\n\n _events: PhantomData<T>,\n\n}\n\n\n\nimpl<'a, T: XdvEvents> Cursor<'a, T> {\n\n pub fn new(buf: &'a [u8], global_offset: u64) -> Self {\n", "file_path": "xdv/src/lib.rs", "rank": 59, "score": 74766.67625297813 }, { "content": "pub trait Warn<E>: Sized {}\n\n\n", "file_path": "dpx/src/lib.rs", "rank": 60, "score": 74648.21218617905 }, { "content": "fn is_smallcap(glyphname: &[u8]) -> bool {\n\n if glyphname.is_empty() {\n\n return false;\n\n }\n\n let len = glyphname.len();\n\n if len < 6 || glyphname.ends_with(b\"small\") {\n\n return false;\n\n }\n\n let len = len - 5;\n\n let p = &glyphname[..len];\n\n let (p, slen) = skip_modifier(p);\n\n if slen == len {\n\n return true;\n\n } else {\n\n if slen > 0 {\n\n /* ??? */\n\n return false;\n\n }\n\n }\n\n let (mut p, slen) = skip_capital(p);\n", "file_path": "dpx/src/dpx_agl.rs", "rank": 61, "score": 73541.95579379864 }, { "content": "struct ExecutionState<'a, I: 'a + IoProvider> {\n\n io: &'a mut I,\n\n events: &'a mut dyn IoEventBackend,\n\n status: &'a mut dyn StatusBackend,\n\n #[allow(clippy::vec_box)]\n\n input_handles: Vec<Box<InputHandle>>,\n\n #[allow(clippy::vec_box)]\n\n output_handles: Vec<Box<OutputHandle>>,\n\n}\n\n\n\nimpl<'a, I: 'a + IoProvider> ExecutionState<'a, I> {\n\n pub fn new(\n\n io: &'a mut I,\n\n events: &'a mut dyn IoEventBackend,\n\n status: &'a mut dyn StatusBackend,\n\n ) -> ExecutionState<'a, I> {\n\n ExecutionState {\n\n io,\n\n events,\n\n status,\n", "file_path": "src/engines/mod.rs", "rank": 62, "score": 72763.97678490198 }, { "content": "#[doc(hidden)]\n\npub fn activate_config_test_mode(forced: bool) {\n\n CONFIG_TEST_MODE_ACTIVATED.store(forced, Ordering::SeqCst);\n\n}\n\n\n\n#[cfg_attr(feature = \"serde\", derive(Deserialize, Serialize))]\n\npub struct PersistentConfig {\n\n default_bundles: Vec<BundleInfo>,\n\n}\n\n\n\n#[cfg_attr(feature = \"serde\", derive(Deserialize, Serialize))]\n\npub struct BundleInfo {\n\n url: String,\n\n}\n\n\n\nimpl PersistentConfig {\n\n #[cfg(feature = \"serialization\")]\n\n /// Open the per-user configuration file.\n\n ///\n\n /// This file is stored in TOML format. If the configuration file does not\n\n /// exist, no error is signaled — instead, a basic default configuration\n", "file_path": "src/config.rs", "rank": 63, "score": 72012.36882492981 }, { "content": "pub fn user_config() -> Result<PathBuf> {\n\n Ok(app_dirs2::app_root(AppDataType::UserConfig, &APP_INFO)?)\n\n}\n\n\n", "file_path": "src/app_dirs.rs", "rank": 64, "score": 72012.36882492981 }, { "content": "pub fn pdf_dev_get_fixed_point() -> pdf_coord {\n\n let mut gss = unsafe { &mut gs_stack };\n\n let gs = gss.top();\n\n gs.pt_fixee.clone()\n\n}\n", "file_path": "dpx/src/dpx_pdfdraw.rs", "rank": 65, "score": 70975.55919812758 }, { "content": "/// Generate a plain.fmt file using local files only -- a variety of tests\n\n/// need such a file to exist.\n\n///\n\n/// Note that because tests are run in parallel, this can get quite racy. At\n\n/// the moment we just let everybody write and overwrite the file, but we\n\n/// could use a locking scheme to get smarter about this.\n\npub fn ensure_plain_format() -> Result<PathBuf> {\n\n use ::tectonic::engines::NoopIoEventBackend;\n\n use ::tectonic::io::{\n\n try_open_file, FilesystemIo, FilesystemPrimaryInputIo, IoStack, MemoryIo,\n\n };\n\n use ::tectonic::status::NoopStatusBackend;\n\n use ::tectonic::TexEngine;\n\n\n\n let fmt_path = test_path(&[\"plain.fmt\"]);\n\n\n\n if try_open_file(&fmt_path).is_not_available() {\n\n let mut mem = MemoryIo::new(true);\n\n\n\n let mut assets_dir = test_path(&[\"assets\"]);\n\n let mut fs_support = FilesystemIo::new(&assets_dir, false, false, HashSet::new());\n\n\n\n assets_dir.push(\"plain\");\n\n assets_dir.set_extension(\"tex\");\n\n let mut fs_primary = FilesystemPrimaryInputIo::new(&assets_dir);\n\n\n", "file_path": "tests/util/mod.rs", "rank": 66, "score": 70806.91468715439 }, { "content": "pub fn get_user_config() -> Result<PathBuf> {\n\n Ok(app_dirs2::get_app_root(AppDataType::UserConfig, &APP_INFO)?)\n\n}\n\n\n", "file_path": "src/app_dirs.rs", "rank": 67, "score": 70788.7016865338 }, { "content": "/// Similarly, a stunted verion of CliIoEvents.\n\nstruct FormatTestEvents(HashMap<OsString, FileSummary>);\n\n\n\nimpl FormatTestEvents {\n\n fn new() -> FormatTestEvents {\n\n FormatTestEvents(HashMap::new())\n\n }\n\n}\n\n\n\nimpl IoEventBackend for FormatTestEvents {\n\n fn output_opened(&mut self, name: &OsStr) {\n\n self.0.insert(name.to_os_string(), FileSummary::new());\n\n }\n\n\n\n fn output_closed(&mut self, name: OsString, digest: DigestData) {\n\n let summ = self\n\n .0\n\n .get_mut(&name)\n\n .expect(\"closing file that wasn't opened?\");\n\n summ.write_digest = Some(digest);\n\n }\n\n}\n\n\n", "file_path": "tests/formats.rs", "rank": 68, "score": 69739.88195587599 }, { "content": "/// Obtain a path to a testing resource file. The environment variable whose\n\n/// name is stored in the constant `TEST_ROOT_ENV_VAR` must be set to an\n\n/// appropriate directory. (Note: `TEST_ROOT_ENV_VAR` is a constant giving the\n\n/// *name* of the relevant variable — not the name of the variable itself!)\n\npub fn test_path(parts: &[&str]) -> PathBuf {\n\n let mut path = PathBuf::from(env::var_os(TEST_ROOT_ENV_VAR).expect(\n\n \"Tectonic testing infrastructure cannot be used without \\\n\n setting the magic test-root environment variable\",\n\n ));\n\n path.push(parts.iter().collect::<PathBuf>());\n\n path\n\n}\n\n\n\n/// Utility for being able to treat the \"assets/\" directory as a bundle.\n\npub struct TestBundle(FilesystemIo);\n\n\n\nimpl Default for TestBundle {\n\n fn default() -> Self {\n\n TestBundle(FilesystemIo::new(\n\n &test_path(&[\"assets\"]),\n\n false,\n\n false,\n\n HashSet::new(),\n\n ))\n", "file_path": "src/test_util.rs", "rank": 69, "score": 68804.24439718413 }, { "content": "/// Normalize a TeX path in a system independent™ way by stripping any `.`, `..`,\n\n/// or extra separators '/' so that it is of the form\n\n///\n\n/// ```text\n\n/// path/to/my/file.txt\n\n/// ../../path/to/parent/dir/file.txt\n\n/// /absolute/path/to/file.txt\n\n/// ```\n\n///\n\n/// Does not strip whitespace.\n\n///\n\n/// Returns `None` if the path refers to a parent of the root.\n\nfn try_normalize_tex_path(path: &str) -> Option<String> {\n\n use std::iter::repeat;\n\n if path.is_empty() {\n\n return Some(\"\".into());\n\n }\n\n let mut r = Vec::new();\n\n let mut parent_level = 0;\n\n let mut has_root = false;\n\n\n\n // TODO: We need to handle a prefix on Windows (i.e. \"C:\").\n\n\n\n for (i, c) in path.split('/').enumerate() {\n\n match c {\n\n \"\" if i == 0 => {\n\n has_root = true;\n\n r.push(\"\");\n\n }\n\n \"\" | \".\" => {}\n\n \"..\" => {\n\n match r.pop() {\n", "file_path": "src/io/mod.rs", "rank": 70, "score": 67892.38509119571 }, { "content": "fn format_to_extension(format: FileFormat) -> Vec<&'static str> {\n\n match format {\n\n FileFormat::AFM => vec![\"afm\"],\n\n FileFormat::Bib => vec![\"bib\"],\n\n FileFormat::Bst => vec![\"bst\"],\n\n FileFormat::Cmap => vec![\"cmap\"], /* XXX: kpathsea doesn't define any suffixes for this */\n\n FileFormat::Enc => vec![\"enc\"],\n\n FileFormat::Format => vec![\"fmt\"],\n\n FileFormat::FontMap => vec![\"map\"],\n\n FileFormat::MiscFonts => vec![\"miscfonts\"], /* XXX: no kpathsea suffixes */\n\n FileFormat::Ofm => vec![\"ofm\"],\n\n FileFormat::OpenType => vec![\"otf\", \"OTF\"],\n\n FileFormat::Ovf => vec![\"ovf\", \"vf\"],\n\n FileFormat::Pict => vec![\"pdf\", \"jpg\", \"eps\", \"epsi\"], /* XXX: also .eps, .epsi, ... */\n\n FileFormat::Pk => vec![\"pk\"],\n\n FileFormat::ProgramData => vec![\"programdata\"], /* XXX no suffixes */\n\n FileFormat::Sfd => vec![\"sfd\"],\n\n FileFormat::Tex => vec![\"tex\", \"sty\", \"cls\", \"fd\", \"aux\", \"bbl\", \"def\", \"clo\", \"ldf\"],\n\n FileFormat::TexPsHeader => vec![\"pro\"],\n\n FileFormat::TFM => vec![\"tfm\"],\n\n FileFormat::TrueType => vec![\"ttf\", \"ttc\", \"TTF\", \"TTC\", \"dfont\"],\n\n FileFormat::Type1 => vec![\"pfa\", \"pfb\"],\n\n FileFormat::Vf => vec![\"vf\"],\n\n }\n\n}\n\n\n", "file_path": "src/engines/mod.rs", "rank": 71, "score": 66295.44638203162 }, { "content": "fn c_format_to_rust(format: libc::c_int) -> Option<FileFormat> {\n\n match format {\n\n 1 => Some(FileFormat::Pk),\n\n 3 => Some(FileFormat::TFM),\n\n 4 => Some(FileFormat::AFM),\n\n 6 => Some(FileFormat::Bib),\n\n 7 => Some(FileFormat::Bst),\n\n 10 => Some(FileFormat::Format),\n\n 11 => Some(FileFormat::FontMap),\n\n 20 => Some(FileFormat::Ofm),\n\n 23 => Some(FileFormat::Ovf),\n\n 25 => Some(FileFormat::Pict),\n\n 26 => Some(FileFormat::Tex),\n\n 30 => Some(FileFormat::TexPsHeader),\n\n 32 => Some(FileFormat::Type1),\n\n 33 => Some(FileFormat::Vf),\n\n 36 => Some(FileFormat::TrueType),\n\n 39 => Some(FileFormat::ProgramData),\n\n 40 => Some(FileFormat::ProgramData), // NOTE: kpathsea distinguishes text/binary; we don't\n\n 41 => Some(FileFormat::MiscFonts),\n\n 44 => Some(FileFormat::Enc),\n\n 45 => Some(FileFormat::Cmap),\n\n 46 => Some(FileFormat::Sfd),\n\n 47 => Some(FileFormat::OpenType),\n\n _ => None,\n\n }\n\n}\n", "file_path": "src/engines/mod.rs", "rank": 72, "score": 66228.6934092929 }, { "content": "class XeTeXFontInst_Mac : public XeTeXFontInst\n\n{\n\nprotected:\n\n\n\npublic:\n\n XeTeXFontInst_Mac(CTFontDescriptorRef descriptor, float pointSize, int &status);\n\n\n\n virtual ~XeTeXFontInst_Mac();\n\n\n\n virtual void initialize(int &status);\n\n};\n\n*/\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn XeTeXFontInst_Mac_create(\n\n mut descriptor: CTFontDescriptorRef,\n\n mut pointSize: libc::c_float,\n\n mut status: *mut libc::c_int,\n\n) -> *mut XeTeXFontInst_Mac {\n\n let mut value: *mut XeTeXFontInst_Mac =\n\n malloc(::std::mem::size_of::<XeTeXFontInst_Mac>() as libc::c_ulong)\n\n as *mut XeTeXFontInst_Mac;\n\n XeTeXFontInst_Mac_ctor(value, descriptor, pointSize, status);\n\n return value;\n\n}\n", "file_path": "engine/src/xetex_font_info_coretext.rs", "rank": 73, "score": 65638.25920252486 }, { "content": "pub fn user_cache_dir(path: &str) -> Result<PathBuf> {\n\n Ok(app_dirs2::app_dir(AppDataType::UserCache, &APP_INFO, path)?)\n\n}\n", "file_path": "src/app_dirs.rs", "rank": 74, "score": 64874.58254361791 }, { "content": "pub fn get_date_and_time() -> (i32, i32, i32, i32) {\n\n use chrono::prelude::*;\n\n\n\n let tm = Local::now();\n\n\n\n let year = tm.year();\n\n let month = tm.month();\n\n let day = tm.day();\n\n let minutes = tm.hour() * 60 + tm.minute();\n\n\n\n (minutes as _, day as _, month as _, year)\n\n}\n\nunsafe extern \"C\" fn checkpool_pointer(mut pool_ptr_0: pool_pointer, mut len: size_t) {\n\n assert!(\n\n !((pool_ptr_0 as u64).wrapping_add(len) >= pool_size as u64),\n\n \"string pool overflow [{} bytes]\",\n\n pool_size,\n\n );\n\n}\n\n#[no_mangle]\n", "file_path": "engine/src/xetex_texmfmp.rs", "rank": 75, "score": 64874.58254361791 }, { "content": "type InternalResult<T, E> = Result<T, InternalError<E>>;\n\n\n\nimpl<T: Debug + From<XdvError>> From<T> for InternalError<T> {\n\n fn from(e: T) -> Self {\n\n InternalError::Other(e)\n\n }\n\n}\n\n\n", "file_path": "xdv/src/lib.rs", "rank": 76, "score": 63641.570567701434 }, { "content": "fn parse_index_line(line: &str) -> Result<Option<(String, FileInfo)>> {\n\n let mut bits = line.split_whitespace();\n\n\n\n if let (Some(name), Some(offset), Some(length)) = (bits.next(), bits.next(), bits.next()) {\n\n Ok(Some((\n\n name.to_owned(),\n\n FileInfo {\n\n offset: offset.parse::<u64>()?,\n\n length: length.parse::<u64>()?,\n\n },\n\n )))\n\n } else {\n\n // TODO: preserve the warning info or something!\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "src/io/cached_itarbundle.rs", "rank": 77, "score": 61637.398339361396 }, { "content": "/// A combination of the two above functions. Set the \"test root\" variable,\n\n/// making it such that the testing infrastructure in this module can work;\n\n/// and then activate \"test mode\", such that certain other parts of the crate\n\n/// alter their behavior for use in the test environment. This makes for\n\n/// convenient doctests — you can secretly run them in the special test setup\n\n/// with a one-liner:\n\n///\n\n/// # tectonic::test_util::activate_test_mode_augmented(env!(\"CARGO_MANIFEST_DIR\"));\n\npub fn activate_test_mode_augmented<V: AsRef<OsStr>>(root: V) {\n\n set_test_root_augmented(root);\n\n maybe_activate_test_mode();\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 78, "score": 61421.23130097095 }, { "content": "/// Set `TEST_ROOT_ENV_VAR` in the current process to the specified value,\n\n/// with a path element \"tests\" appended. If the variable was previously\n\n/// unset, this will make it such that `test_path()` and the other pieces of\n\n/// testing infrastructure in this module will start working.\n\n///\n\n/// The peculiar form of this function makes for easy one-liners in the test\n\n/// code exploiting the environment variable $CARGO_MANIFEST_DIR.\n\npub fn set_test_root_augmented<V: AsRef<OsStr>>(root: V) {\n\n let mut root = PathBuf::from(root.as_ref());\n\n root.push(\"tests\");\n\n env::set_var(TEST_ROOT_ENV_VAR, root);\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 79, "score": 61416.07365595036 }, { "content": "/// A convenience method to provide a better error message when writing to a created file.\n\nfn file_create_write<P, F, E>(path: P, write_fn: F) -> Result<()>\n\nwhere\n\n P: AsRef<Path>,\n\n F: FnOnce(&mut File) -> std::result::Result<(), E>,\n\n std::result::Result<(), E>: crate::errors::ResultExt<()>,\n\n{\n\n let path = path.as_ref();\n\n let mut f = ctry!(File::create(path); \"couldn't open {} for writing\",\n\n path.display());\n\n ctry!(write_fn(&mut f); \"couldn't write to {}\", path.display());\n\n Ok(())\n\n}\n\n\n", "file_path": "src/io/cached_itarbundle.rs", "rank": 80, "score": 59228.684173818234 }, { "content": "/* This is dvipdfmx, an eXtended version of dvipdfm by Mark A. Wicks.\n\n\n\n Copyright (C) 2002-2016 by Jin-Hwan Cho and Shunsaku Hirata,\n\n the dvipdfmx project team.\n\n\n\n Copyright (C) 1998, 1999 by Mark A. Wicks <mwicks@kettering.edu>\n\n\n\n This program is free software; you can redistribute it and/or modify\n\n it under the terms of the GNU General Public License as published by\n\n the Free Software Foundation; either version 2 of the License, or\n\n (at your option) any later version.\n\n\n\n This program is distributed in the hope that it will be useful,\n\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\n GNU General Public License for more details.\n\n\n\n You should have received a copy of the GNU General Public License\n\n along with this program; if not, write to the Free Software\n\n Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.\n", "file_path": "dpx/src/specials/dvipdfmx.rs", "rank": 81, "score": 58696.28203413873 }, { "content": "/* This is dvipdfmx, an eXtended version of dvipdfm by Mark A. Wicks.\n\n\n\n Copyright (C) 2002-2016 by Jin-Hwan Cho and Shunsaku Hirata,\n\n the dvipdfmx project team.\n\n\n\n Copyright (C) 1998, 1999 by Mark A. Wicks <mwicks@kettering.edu>\n\n\n\n This program is free software; you can redistribute it and/or modify\n\n it under the terms of the GNU General Public License as published by\n\n the Free Software Foundation; either version 2 of the License, or\n\n (at your option) any later version.\n\n\n\n This program is distributed in the hope that it will be useful,\n\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\n GNU General Public License for more details.\n\n\n\n You should have received a copy of the GNU General Public License\n\n along with this program; if not, write to the Free Software\n\n Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.\n", "file_path": "dpx/src/dpx_dvipdfmx.rs", "rank": 82, "score": 58696.28203413873 }, { "content": "fn cache_dir(path: &str, custom_cache_root: Option<&Path>) -> Result<PathBuf> {\n\n if let Some(root) = custom_cache_root {\n\n if !root.is_dir() {\n\n bail!(\"Custom cache path {} is not a directory\", root.display());\n\n }\n\n let full_path = root.join(path);\n\n ctry!(fs::create_dir_all(&full_path); \"failed to create directory {}\", full_path.display());\n\n Ok(full_path)\n\n } else {\n\n app_dirs::user_cache_dir(path)\n\n }\n\n}\n", "file_path": "src/io/cached_itarbundle.rs", "rank": 83, "score": 58542.215836936506 }, { "content": " (*pi).name\n\n } else {\n\n 0 as *const i8\n\n })\n\n .is_null()\n\n {\n\n paper_width = if !pi.is_null() && !(*pi).name.is_null() {\n\n (*pi).pswidth\n\n } else {\n\n 0.0f64\n\n };\n\n paper_height = if !pi.is_null() && !(*pi).name.is_null() {\n\n (*pi).psheight\n\n } else {\n\n 0.0f64\n\n }\n\n } else {\n\n let mut p: *const i8 = paperspec;\n\n let mut endptr: *const i8 = 0 as *const i8;\n\n let mut comma: *const i8 = 0 as *const i8;\n", "file_path": "dpx/src/dpx_dvipdfmx.rs", "rank": 84, "score": 58519.54982452946 }, { "content": "*/\n\n#![allow(\n\n unused_mut\n\n)]\n\n\n\nuse std::ffi::CStr;\n\n\n\nuse super::{spc_arg, spc_env, SpcHandler};\n\nuse crate::dpx_dpxutil::parse_c_ident;\n\nuse crate::dpx_pdfparse::skip_white;\n\nuse crate::spc_warn;\n\nuse libc::{free, memcmp, strlen};\n\n\n\npub type size_t = u64;\n\n\n\n/* tectonic/core-strutils.h: miscellaneous C string utilities\n\n Copyright 2016-2018 the Tectonic Project\n\n Licensed under the MIT License.\n\n*/\n\n/* Note that we explicitly do *not* change this on Windows. For maximum\n", "file_path": "dpx/src/specials/dvipdfmx.rs", "rank": 85, "score": 58514.589275275495 }, { "content": "/* Encryption */\n\nstatic mut do_encryption: i32 = 0i32;\n\nstatic mut key_bits: i32 = 40i32;\n\nstatic mut permission: i32 = 0x3ci32;\n\n/* Page device */\n\n#[no_mangle]\n\npub static mut paper_width: f64 = 595.0f64;\n\n#[no_mangle]\n\npub static mut paper_height: f64 = 842.0f64;\n\nstatic mut x_offset: f64 = 72.0f64;\n\nstatic mut y_offset: f64 = 72.0f64;\n\n#[no_mangle]\n\npub static mut landscape_mode: i32 = 0i32;\n\n#[no_mangle]\n\npub static mut always_embed: i32 = 0i32;\n\n/* always embed fonts, regardless of licensing flags */\n\n/* XXX: there are four quasi-redundant versions of this; grp for K_UNIT__PT */\n\nunsafe fn read_length(mut vp: *mut f64, mut pp: *mut *const i8, mut endptr: *const i8) -> i32 {\n\n let mut q: *mut i8 = 0 as *mut i8;\n\n let mut p: *const i8 = *pp;\n", "file_path": "dpx/src/dpx_dvipdfmx.rs", "rank": 86, "score": 58514.02583495871 }, { "content": "use super::dpx_pdfdev::pdf_rect;\n\n#[no_mangle]\n\npub static mut is_xdv: i32 = 0i32;\n\n#[no_mangle]\n\npub static mut translate_origin: i32 = 0i32;\n\nstatic mut ignore_colors: i8 = 0_i8;\n\nstatic mut annot_grow: f64 = 0.0f64;\n\nstatic mut bookmark_open: i32 = 0i32;\n\nstatic mut mag: f64 = 1.0f64;\n\nstatic mut font_dpi: i32 = 600i32;\n\n/*\n\n * Precision is essentially limited to 0.01pt.\n\n * See, dev_set_string() in pdfdev.c.\n\n */\n\nstatic mut pdfdecimaldigits: i32 = 3i32;\n\n/* Image cache life in hours */\n\n/* 0 means erase all old images and leave new images */\n\n/* -1 means erase all old images and also erase new images */\n\n/* -2 means ignore image cache (default) */\n\nstatic mut image_cache_life: i32 = -2i32;\n", "file_path": "dpx/src/dpx_dvipdfmx.rs", "rank": 87, "score": 58508.34799028795 }, { "content": " warn!(\"Unknown unit of measure: {}\", CStr::from_ptr(q).display(),);\n\n error = -1i32\n\n }\n\n }\n\n free(qq as *mut libc::c_void);\n\n } else {\n\n warn!(\"Missing unit of measure after \\\"true\\\"\");\n\n error = -1i32\n\n }\n\n }\n\n *vp = v * u;\n\n *pp = p;\n\n error\n\n}\n\nunsafe fn select_paper(mut paperspec: *const i8) {\n\n let mut pi: *const paper = 0 as *const paper;\n\n let mut error: i32 = 0i32;\n\n pi = paperinfo(paperspec);\n\n if !pi.is_null()\n\n && !(if !pi.is_null() && !(*pi).name.is_null() {\n", "file_path": "dpx/src/dpx_dvipdfmx.rs", "rank": 88, "score": 58507.78214729899 }, { "content": "use super::dpx_pdfencrypt::{pdf_enc_compute_id_string, pdf_enc_set_passwd, pdf_enc_set_verbose};\n\nuse super::dpx_pdfobj::{\n\n pdf_files_close, pdf_files_init, pdf_get_version, pdf_obj_reset_global_state,\n\n pdf_obj_set_verbose, pdf_set_compression, pdf_set_use_predictor, pdf_set_version,\n\n};\n\nuse super::dpx_tfm::tfm_reset_global_state;\n\nuse super::dpx_vf::vf_reset_global_state;\n\nuse crate::specials::{\n\n spc_exec_at_begin_document, spc_exec_at_end_document, tpic::tpic_set_fill_mode,\n\n};\n\nuse libc::{atof, atoi, free, memcmp, strchr, strcmp, strlen};\n\nuse std::slice::from_raw_parts;\n\n\n\npub type PageRange = page_range;\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub struct page_range {\n\n pub first: i32,\n\n pub last: i32,\n\n}\n", "file_path": "dpx/src/dpx_dvipdfmx.rs", "rank": 89, "score": 58505.273727888605 }, { "content": " b\"dvipdfmx:\\x00\" as *const u8 as *const i8 as *const libc::c_void,\n\n strlen(b\"dvipdfmx:\\x00\" as *const u8 as *const i8),\n\n ) == 0\n\n {\n\n return true;\n\n }\n\n false\n\n}\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn spc_dvipdfmx_setup_handler(\n\n mut sph: *mut SpcHandler,\n\n mut spe: *mut spc_env,\n\n mut ap: *mut spc_arg,\n\n) -> i32 {\n\n let mut error: i32 = -1i32;\n\n assert!(!sph.is_null() && !spe.is_null() && !ap.is_null());\n\n skip_white(&mut (*ap).curptr, (*ap).endptr);\n\n if (*ap)\n\n .curptr\n\n .offset(strlen(b\"dvipdfmx:\\x00\" as *const u8 as *const i8) as isize)\n", "file_path": "dpx/src/specials/dvipdfmx.rs", "rank": 90, "score": 58504.39547206472 }, { "content": " * portability, we should probably accept *either* forward or backward slashes\n\n * as directory separators. */\n\n\n\nunsafe fn spc_handler_null(mut _spe: *mut spc_env, mut args: *mut spc_arg) -> i32 {\n\n (*args).curptr = (*args).endptr;\n\n 0i32\n\n}\n\nconst DVIPDFMX_HANDLERS: [SpcHandler; 1] = [SpcHandler {\n\n key: b\"config\",\n\n exec: Some(spc_handler_null),\n\n}];\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn spc_dvipdfmx_check_special(mut buf: *const i8, mut len: i32) -> bool {\n\n let mut p = buf;\n\n let endptr = p.offset(len as isize);\n\n skip_white(&mut p, endptr);\n\n if p.offset(strlen(b\"dvipdfmx:\\x00\" as *const u8 as *const i8) as isize) <= endptr\n\n && memcmp(\n\n p as *const libc::c_void,\n", "file_path": "dpx/src/specials/dvipdfmx.rs", "rank": 91, "score": 58504.15807091915 }, { "content": " pdf_fontmap_set_verbose(verbose as i32);\n\n dpx_file_set_verbose(verbose as i32);\n\n tt_aux_set_verbose(verbose as i32);\n\n }\n\n pdf_set_compression(if compress as i32 != 0 { 9i32 } else { 0i32 });\n\n pdf_font_set_deterministic_unique_tags(if deterministic_tags as i32 != 0 {\n\n 1i32\n\n } else {\n\n 0i32\n\n });\n\n system_default();\n\n pdf_init_fontmaps();\n\n /* We used to read the config file here. It synthesized command-line\n\n * arguments, so we emulate the default TeXLive config file by copying those\n\n * code bits. */\n\n pdf_set_version(5_u32); /* last page */\n\n select_paper(b\"letter\\x00\" as *const u8 as *const i8);\n\n annot_grow = 0i32 as f64;\n\n bookmark_open = 0i32;\n\n key_bits = 40i32;\n", "file_path": "dpx/src/dpx_dvipdfmx.rs", "rank": 92, "score": 58501.51923867286 }, { "content": " comma = strchr(p, ',' as i32);\n\n endptr = p.offset(strlen(p) as isize);\n\n if comma.is_null() {\n\n panic!(\n\n \"Unrecognized paper format: {}\",\n\n CStr::from_ptr(paperspec).display()\n\n );\n\n }\n\n error = read_length(&mut paper_width, &mut p, comma);\n\n p = comma.offset(1);\n\n error = read_length(&mut paper_height, &mut p, endptr)\n\n }\n\n if error != 0 || paper_width <= 0.0f64 || paper_height <= 0.0f64 {\n\n panic!(\n\n \"Invalid paper size: {} ({:.2}x{:.2}\",\n\n CStr::from_ptr(paperspec).display(),\n\n paper_width,\n\n paper_height,\n\n );\n\n };\n", "file_path": "dpx/src/dpx_dvipdfmx.rs", "rank": 93, "score": 58500.99759736816 }, { "content": " i = i.wrapping_add(1)\n\n }\n\n if page_count < 1_u32 {\n\n panic!(\"No pages fall in range!\");\n\n }\n\n spc_exec_at_end_document();\n\n}\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn dvipdfmx_main(\n\n mut pdf_filename: *const i8,\n\n mut dvi_filename: *const i8,\n\n mut pagespec: *const i8,\n\n mut opt_flags: i32,\n\n mut translate: bool,\n\n mut compress: bool,\n\n mut deterministic_tags: bool,\n\n mut quiet: bool,\n\n mut verbose: u32,\n\n) -> i32 {\n\n let mut enable_object_stream: bool = true; /* This must come before parsing options... */\n", "file_path": "dpx/src/dpx_dvipdfmx.rs", "rank": 94, "score": 58500.23733540939 }, { "content": "/* This is dvipdfmx, an eXtended version of dvipdfm by Mark A. Wicks.\n\n\n\n Copyright (C) 2002-2016 by Jin-Hwan Cho and Shunsaku Hirata,\n\n the dvipdfmx project team.\n\n\n\n Copyright (C) 1998, 1999 by Mark A. Wicks <mwicks@kettering.edu>\n\n\n\n This program is free software; you can redistribute it and/or modify\n\n it under the terms of the GNU General Public License as published by\n\n the Free Software Foundation; either version 2 of the License, or\n\n (at your option) any later version.\n\n\n\n This program is distributed in the hope that it will be useful,\n\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\n GNU General Public License for more details.\n\n\n\n You should have received a copy of the GNU General Public License\n\n along with this program; if not, write to the Free Software\n\n Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.\n", "file_path": "dpx/src/dpx_mpost.rs", "rank": 95, "score": 229.4666557578548 }, { "content": "/* This is dvipdfmx, an eXtended version of dvipdfm by Mark A. Wicks.\n\n\n\n Copyright (C) 2002-2016 by Jin-Hwan Cho and Shunsaku Hirata,\n\n the dvipdfmx project team.\n\n\n\n Copyright (C) 1998, 1999 by Mark A. Wicks <mwicks@kettering.edu>\n\n\n\n This program is free software; you can redistribute it and/or modify\n\n it under the terms of the GNU General Public License as published by\n\n the Free Software Foundation; either version 2 of the License, or\n\n (at your option) any later version.\n\n\n\n This program is distributed in the hope that it will be useful,\n\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\n GNU General Public License for more details.\n\n\n\n You should have received a copy of the GNU General Public License\n\n along with this program; if not, write to the Free Software\n\n Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.\n", "file_path": "dpx/src/specials/dvips.rs", "rank": 96, "score": 229.46665575785474 }, { "content": "/* This is dvipdfmx, an eXtended version of dvipdfm by Mark A. Wicks.\n\n\n\n Copyright (C) 2002-2016 by Jin-Hwan Cho and Shunsaku Hirata,\n\n the dvipdfmx project team.\n\n\n\n Copyright (C) 1998, 1999 by Mark A. Wicks <mwicks@kettering.edu>\n\n\n\n This program is free software; you can redistribute it and/or modify\n\n it under the terms of the GNU General Public License as published by\n\n the Free Software Foundation; either version 2 of the License, or\n\n (at your option) any later version.\n\n\n\n This program is distributed in the hope that it will be useful,\n\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\n GNU General Public License for more details.\n\n\n\n You should have received a copy of the GNU General Public License\n\n along with this program; if not, write to the Free Software\n\n Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.\n", "file_path": "dpx/src/dpx_type0.rs", "rank": 97, "score": 229.46665575785465 }, { "content": "/* This is dvipdfmx, an eXtended version of dvipdfm by Mark A. Wicks.\n\n\n\n Copyright (C) 2002-2016 by Jin-Hwan Cho and Shunsaku Hirata,\n\n the dvipdfmx project team.\n\n\n\n Copyright (C) 1998, 1999 by Mark A. Wicks <mwicks@kettering.edu>\n\n\n\n This program is free software; you can redistribute it and/or modify\n\n it under the terms of the GNU General Public License as published by\n\n the Free Software Foundation; either version 2 of the License, or\n\n (at your option) any later version.\n\n\n\n This program is distributed in the hope that it will be useful,\n\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\n GNU General Public License for more details.\n\n\n\n You should have received a copy of the GNU General Public License\n\n along with this program; if not, write to the Free Software\n\n Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.\n", "file_path": "dpx/src/dpx_vf.rs", "rank": 98, "score": 229.46665575785468 }, { "content": "/* This is dvipdfmx, an eXtended version of dvipdfm by Mark A. Wicks.\n\n\n\n Copyright (C) 2002-2016 by Jin-Hwan Cho and Shunsaku Hirata,\n\n the dvipdfmx project team.\n\n\n\n Copyright (C) 1998, 1999 by Mark A. Wicks <mwicks@kettering.edu>\n\n\n\n This program is free software; you can redistribute it and/or modify\n\n it under the terms of the GNU General Public License as published by\n\n the Free Software Foundation; either version 2 of the License, or\n\n (at your option) any later version.\n\n\n\n This program is distributed in the hope that it will be useful,\n\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\n GNU General Public License for more details.\n\n\n\n You should have received a copy of the GNU General Public License\n\n along with this program; if not, write to the Free Software\n\n Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.\n", "file_path": "dpx/src/dpx_otl_conf.rs", "rank": 99, "score": 229.46665575785468 } ]
Rust
src/nal/sei/buffering_period.rs
fkaa/h264-reader
24bc61f62599ee3d3fe4df7f725a00002ac8d484
use super::SeiCompletePayloadReader; use std::marker; use crate::nal::{sps, pps}; use crate::rbsp::RbspBitReader; use crate::Context; use crate::nal::sei::HeaderType; use crate::rbsp::RbspBitReaderError; use log::*; #[derive(Debug)] enum BufferingPeriodError { ReaderError(RbspBitReaderError), UndefinedSeqParamSetId(pps::ParamSetId), InvalidSeqParamSetId(pps::ParamSetIdError), } impl From<RbspBitReaderError> for BufferingPeriodError { fn from(e: RbspBitReaderError) -> Self { BufferingPeriodError::ReaderError(e) } } impl From<pps::ParamSetIdError> for BufferingPeriodError { fn from(e: pps::ParamSetIdError) -> Self { BufferingPeriodError::InvalidSeqParamSetId(e) } } #[derive(Debug, Eq, PartialEq)] struct InitialCpbRemoval { initial_cpb_removal_delay: u32, initial_cpb_removal_delay_offset: u32, } fn read_cpb_removal_delay_list(r: &mut RbspBitReader<'_>, count: usize, length: u8) -> Result<Vec<InitialCpbRemoval>,RbspBitReaderError> { let mut res = vec!(); for _ in 0..count { res.push(InitialCpbRemoval { initial_cpb_removal_delay: r.read_u32(length)?, initial_cpb_removal_delay_offset: r.read_u32(length)?, }); } Ok(res) } #[derive(Debug, Eq, PartialEq)] struct BufferingPeriod { nal_hrd_bp: Option<Vec<InitialCpbRemoval>>, vcl_hrd_bp: Option<Vec<InitialCpbRemoval>>, } impl BufferingPeriod { fn read<Ctx>(ctx: &Context<Ctx>, buf: &[u8]) -> Result<BufferingPeriod,BufferingPeriodError> { let mut r = RbspBitReader::new(buf); let seq_parameter_set_id = pps::ParamSetId::from_u32(r.read_ue_named("seq_parameter_set_id")?)?; let sps = ctx.sps_by_id(seq_parameter_set_id) .ok_or_else(|| BufferingPeriodError::UndefinedSeqParamSetId(seq_parameter_set_id))?; let vui = sps.vui_parameters.as_ref(); let mut read = |p: &sps::HrdParameters| read_cpb_removal_delay_list( &mut r, p.cpb_specs.len(), p.initial_cpb_removal_delay_length_minus1 + 1, ); let nal_hrd_bp = vui.and_then(|v| v.nal_hrd_parameters.as_ref()).map(&mut read).transpose()?; let vcl_hrd_bp = vui.and_then(|v| v.vcl_hrd_parameters.as_ref()).map(&mut read).transpose()?; Ok(BufferingPeriod { nal_hrd_bp, vcl_hrd_bp, }) } } pub struct BufferingPeriodPayloadReader<Ctx> { phantom: marker::PhantomData<Ctx>, } impl<Ctx> Default for BufferingPeriodPayloadReader<Ctx> { fn default() -> Self { BufferingPeriodPayloadReader { phantom: marker::PhantomData } } } impl<Ctx> SeiCompletePayloadReader for BufferingPeriodPayloadReader<Ctx> { type Ctx = Ctx; fn header(&mut self, ctx: &mut Context<Ctx>, payload_type: HeaderType, buf: &[u8]) { assert_eq!(payload_type, HeaderType::BufferingPeriod); match BufferingPeriod::read(ctx, buf) { Err(e) => error!("Failure reading buffering_period: {:?}", e), Ok(buffering_period) => { info!("TODO: expose buffering_period {:#?}", buffering_period); } } } } #[cfg(test)] mod test { use hex_literal::hex; use super::*; #[test] fn parse() { let mut ctx = Context::default(); let sps_rbsp = hex!(" 4d 60 15 8d 8d 28 58 9d 08 00 00 0f a0 00 07 53 07 00 00 00 92 7c 00 00 12 4f 80 fb dc 18 00 00 0f 42 40 00 07 a1 20 7d ee 07 c6 0c 62 60 "); ctx.put_seq_param_set(sps::SeqParameterSet::from_bytes(&sps_rbsp[..]).unwrap()); let payload = &hex!("d7 e4 00 00 57 e4 00 00 40")[..]; assert_eq!(BufferingPeriod::read(&ctx, payload).unwrap(), BufferingPeriod { nal_hrd_bp: Some(vec![ InitialCpbRemoval { initial_cpb_removal_delay: 45_000, initial_cpb_removal_delay_offset: 0, }, ]), vcl_hrd_bp: Some(vec![ InitialCpbRemoval { initial_cpb_removal_delay: 45_000, initial_cpb_removal_delay_offset: 0, }, ]), }); } }
use super::SeiCompletePayloadReader; use std::marker; use crate::nal::{sps, pps}; use crate::rbsp::RbspBitReader; use crate::Context; use crate::nal::sei::HeaderType; use crate::rbsp::RbspBitReaderError; use log::*; #[derive(Debug)] enum BufferingPeriodError { ReaderError(RbspBitReaderError), UndefinedSeqParamSetId(pps::ParamSetId), InvalidSeqParamSetId(pps::ParamSetIdError), } impl From<RbspBitReaderError> for BufferingPeriodError { fn from(e: RbspBitReaderError) -> Self { BufferingPeriodError::ReaderError(e) } } impl From<pps::ParamSetIdError> for BufferingPeriodError { fn from(e: pps::ParamSetIdError) -> Self { BufferingPeriodError::InvalidSeqParamSetId(e) } } #[derive(Debug, Eq, PartialEq)] struct InitialCpbRemoval { initial_cpb_removal_delay: u32, initial_cpb_removal_delay_offset: u32, } fn read_cpb_removal_delay_list(r: &mut RbspBitReader<'_>, count: usize, length: u8) -> Result<Vec<InitialCpbRemoval>,RbspBitReaderError> { let mut res = vec!(); for _ in 0..count { res.push(InitialCpbRemoval { initial_cpb_removal_delay: r.read_u32(length)?, initial_cpb_removal_delay_offset: r.read_u32(length)?, }); } Ok(res) } #[derive(Debug, Eq, PartialEq)] struct BufferingPeriod { nal_hrd_bp: Option<Vec<InitialCpbRemoval>>, vcl_hrd_bp: Option<Vec<InitialCpbRemoval>>, } impl BufferingPeriod { fn read<Ctx>(ctx: &Context<Ctx>, buf: &[u8]) -> Result<BufferingPeriod,BufferingPeriodError> { let mut r = RbspBitReader::new(buf); let seq_parameter_set_id = pps::ParamSetId::from_u32(r.read_ue_named("seq_parameter_set_id")?)?; let sps = ctx.sps_by_id(seq_parameter_set_id) .ok_or_else(|| BufferingPeriodError::UndefinedSeqParamSetId(seq_parameter_set_id))?; let vui = sps.vui_parameters.as_ref(); let mut read = |p: &sps::HrdParameters| read_cpb_removal_delay_list( &mut r, p.cpb_specs.len(), p.initial_cpb_removal_delay_length_minus1 + 1, ); let nal_hrd_bp = vui.and_then(|v| v.nal_hrd_parameters.as_ref()).map(&mut read).transpose()?; let vcl_hrd_bp = vui.and_then(|v| v.vcl_hrd_parameters.as_ref()).map(&mut read).transpose()?; Ok(BufferingPeriod { nal_hrd_bp, vcl_hrd_bp, }) } } pub struct BufferingPeriodPayloadReader<Ctx> { phantom: marker::PhantomData<Ctx>, } impl<Ctx> Default for BufferingPeriodPayloadReader<Ctx> { fn default() -> Self { BufferingPeriodPayloadReader { phantom: marker::PhantomData } } } impl<Ctx> SeiCompletePayloadReader for BufferingPeriodPayloadReader<Ctx> { type Ctx = Ctx; fn header(&mut self, ctx: &mut Context<Ctx>, payload_type: HeaderType, buf: &[u8]) { assert_eq!(payload_type, HeaderType::BufferingPeriod);
} } #[cfg(test)] mod test { use hex_literal::hex; use super::*; #[test] fn parse() { let mut ctx = Context::default(); let sps_rbsp = hex!(" 4d 60 15 8d 8d 28 58 9d 08 00 00 0f a0 00 07 53 07 00 00 00 92 7c 00 00 12 4f 80 fb dc 18 00 00 0f 42 40 00 07 a1 20 7d ee 07 c6 0c 62 60 "); ctx.put_seq_param_set(sps::SeqParameterSet::from_bytes(&sps_rbsp[..]).unwrap()); let payload = &hex!("d7 e4 00 00 57 e4 00 00 40")[..]; assert_eq!(BufferingPeriod::read(&ctx, payload).unwrap(), BufferingPeriod { nal_hrd_bp: Some(vec![ InitialCpbRemoval { initial_cpb_removal_delay: 45_000, initial_cpb_removal_delay_offset: 0, }, ]), vcl_hrd_bp: Some(vec![ InitialCpbRemoval { initial_cpb_removal_delay: 45_000, initial_cpb_removal_delay_offset: 0, }, ]), }); } }
match BufferingPeriod::read(ctx, buf) { Err(e) => error!("Failure reading buffering_period: {:?}", e), Ok(buffering_period) => { info!("TODO: expose buffering_period {:#?}", buffering_period); } }
if_condition
[ { "content": "fn count_zero_bits<R: BitRead>(r: &mut R, name: &'static str) -> Result<u8, RbspBitReaderError> {\n\n let mut count = 0;\n\n while !r.read_bit()? {\n\n count += 1;\n\n if count > 31 {\n\n return Err(RbspBitReaderError::ExpGolombTooLarge(name));\n\n }\n\n }\n\n Ok(count)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::rc::Rc;\n\n use std::cell::RefCell;\n\n use hex_literal::*;\n\n\n\n struct State {\n\n started: bool,\n", "file_path": "src/rbsp.rs", "rank": 1, "score": 131124.25740496398 }, { "content": "struct ParamSetIter<'buf>(&'buf[u8], UnitType);\n\n\n\nimpl<'buf> ParamSetIter<'buf> {\n\n pub fn new(buf: &'buf[u8], unit_type: UnitType) -> ParamSetIter<'buf> {\n\n ParamSetIter(buf, unit_type)\n\n }\n\n}\n\nimpl<'buf> Iterator for ParamSetIter<'buf>\n\n{\n\n type Item = Result<&'buf[u8], ParamSetError>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.0.is_empty() {\n\n None\n\n } else {\n\n let len = u16::from(self.0[0]) << 8 | u16::from(self.0[1]);\n\n let data = &self.0[2..];\n\n let res = match NalHeader::new(data[0]) {\n\n Ok(nal_header) => {\n\n if nal_header.nal_unit_type() == self.1 {\n", "file_path": "src/avcc.rs", "rank": 2, "score": 126047.9002822754 }, { "content": "/// Removes _Emulation Prevention_ from the given byte sequence of a single NAL unit, returning the\n\n/// NAL units _Raw Byte Sequence Payload_ (RBSP). Expects to be called without the NAL header byte.\n\npub fn decode_nal<'a>(nal_unit: &'a [u8]) -> Cow<'a, [u8]> {\n\n struct DecoderState<'b> {\n\n data: Cow<'b, [u8]>,\n\n index: usize,\n\n }\n\n\n\n impl<'b> DecoderState<'b> {\n\n pub fn new(data: Cow<'b, [u8]>) -> Self {\n\n DecoderState { \n\n data,\n\n index: 0,\n\n }\n\n }\n\n }\n\n\n\n impl<'b> NalHandler for DecoderState<'b> {\n\n type Ctx = ();\n\n\n\n fn start(&mut self, _ctx: &mut Context<Self::Ctx>, _header: NalHeader) {}\n\n\n", "file_path": "src/rbsp.rs", "rank": 3, "score": 94684.68520967415 }, { "content": "fn h264_reader(c: &mut Criterion) {\n\n let mut f = File::open(\"big_buck_bunny_1080p.h264\").expect(\"file not found\");\n\n let len = f.metadata().unwrap().len();\n\n let mut buf = vec![0; usize::try_from(len).unwrap()];\n\n f.read(&mut buf[..]).unwrap();\n\n let mut ctx = Context::default();\n\n let nal_handler = NullNalHandler {\n\n start: 0,\n\n push: 0,\n\n end: 0,\n\n };\n\n let rbsp_nal_reader = NullRbspNalReader {\n\n decoder: RbspDecoder::new(nal_handler),\n\n decoder_started: false,\n\n };\n\n let nal_reader = NullNalReader {\n\n start: 0,\n\n push: 0,\n\n end: 0,\n\n };\n", "file_path": "benches/bench.rs", "rank": 4, "score": 70504.98575609578 }, { "content": "fn parse_nal(c: &mut Criterion) {\n\n let sps = hex!(\n\n \"64 00 16 AC 1B 1A 80 B0 3D FF FF\n\n 00 28 00 21 6E 0C 0C 0C 80 00 01\n\n F4 00 00 27 10 74 30 07 D0 00 07\n\n A1 25 DE 5C 68 60 0F A0 00 0F 42\n\n 4B BC B8 50\");\n\n let mut group = c.benchmark_group(\"parse_nal\");\n\n group.bench_function(\"sps\", |b| b.iter(|| SeqParameterSet::from_bytes(&sps[..]).unwrap()));\n\n}\n\n\n\ncriterion_group!(benches, h264_reader, parse_nal);\n\ncriterion_main!(benches);\n", "file_path": "benches/bench.rs", "rank": 5, "score": 70504.98575609578 }, { "content": "#[derive(Debug, PartialEq)]\n\nstruct SliceType {\n\n family: SliceFamily,\n\n exclusive: SliceExclusive,\n\n}\n\nimpl SliceType {\n\n fn from_id(id: u32) -> Result<SliceType, SliceHeaderError> {\n\n match id {\n\n 0 => Ok(SliceType { family: SliceFamily::P, exclusive: SliceExclusive::NonExclusive }),\n\n 1 => Ok(SliceType { family: SliceFamily::B, exclusive: SliceExclusive::NonExclusive }),\n\n 2 => Ok(SliceType { family: SliceFamily::I, exclusive: SliceExclusive::NonExclusive }),\n\n 3 => Ok(SliceType { family: SliceFamily::SP, exclusive: SliceExclusive::NonExclusive }),\n\n 4 => Ok(SliceType { family: SliceFamily::SI, exclusive: SliceExclusive::NonExclusive }),\n\n 5 => Ok(SliceType { family: SliceFamily::P, exclusive: SliceExclusive::Exclusive }),\n\n 6 => Ok(SliceType { family: SliceFamily::B, exclusive: SliceExclusive::Exclusive }),\n\n 7 => Ok(SliceType { family: SliceFamily::I, exclusive: SliceExclusive::Exclusive }),\n\n 8 => Ok(SliceType { family: SliceFamily::SP, exclusive: SliceExclusive::Exclusive }),\n\n 9 => Ok(SliceType { family: SliceFamily::SI, exclusive: SliceExclusive::Exclusive }),\n\n _ => Err(SliceHeaderError::InvalidSliceType(id))\n\n }\n\n }\n", "file_path": "src/nal/slice/mod.rs", "rank": 6, "score": 68348.54420211987 }, { "content": "#[derive(Debug,PartialEq)]\n\nenum PicOrderCountLsb {\n\n Frame(u32),\n\n FieldsAbsolute { top: u32, bottom_delta: i32 },\n\n FieldsDelta([i32; 2]),\n\n}\n\n\n", "file_path": "src/nal/slice/mod.rs", "rank": 7, "score": 63611.10520938069 }, { "content": "pub trait Register: Default {\n\n type Ctx;\n\n fn handle(&mut self, ctx: &mut Context<Self::Ctx>, country_code: ItuTT35, payload: &[u8]);\n\n}\n\n\n\npub struct UserDataRegisteredItuTT35Reader<R: Register> {\n\n register: R,\n\n}\n\nimpl<R: Register> UserDataRegisteredItuTT35Reader<R> {\n\n pub fn new(register: R) -> UserDataRegisteredItuTT35Reader<R> {\n\n UserDataRegisteredItuTT35Reader {\n\n register,\n\n }\n\n }\n\n}\n\nimpl<R: Register> SeiCompletePayloadReader for UserDataRegisteredItuTT35Reader<R> {\n\n type Ctx = R::Ctx;\n\n\n\n fn header(&mut self, ctx: &mut Context<Self::Ctx>, payload_type: HeaderType, buf: &[u8]) {\n\n assert_eq!(payload_type, HeaderType::UserDataRegisteredItuTT35);\n", "file_path": "src/nal/sei/user_data_registered_itu_t_t35.rs", "rank": 8, "score": 63384.80617952558 }, { "content": "struct InUnitState {\n\n /// The number of bytes to backtrack if the current sequence of `0x00`s\n\n /// doesn't end the NAL unit.\n\n backtrack_bytes: usize,\n\n}\n\n\n", "file_path": "src/annexb.rs", "rank": 11, "score": 45138.13767259798 }, { "content": "#[derive(Debug)]\n\nenum ParseState {\n\n Start,\n\n OneZero,\n\n TwoZero,\n\n}\n\n\n\n/// Push parser which removes _emulation prevention_ as it calls\n\n/// an inner [NalHandler]. Expects to be called without the NAL header byte.\n\npub struct RbspDecoder<R>\n\n where\n\n R: NalHandler\n\n{\n\n state: ParseState,\n\n nal_reader: R,\n\n}\n\nimpl<R> RbspDecoder<R>\n\n where\n\n R: NalHandler\n\n{\n\n pub fn new(nal_reader: R) -> Self {\n", "file_path": "src/rbsp.rs", "rank": 12, "score": 45138.13767259798 }, { "content": "#[derive(Debug)]\n\nenum ParseState {\n\n Start,\n\n StartOneZero,\n\n StartTwoZero,\n\n StartOne,\n\n InUnit,\n\n InUnitOneZero,\n\n InUnitTwoZero,\n\n Error,\n\n End,\n\n}\n\nimpl ParseState {\n\n /// If in a NAL unit (`NalReader`'s `start` has been called but not its `end`),\n\n /// returns an object describing the state.\n\n fn in_unit(&self) -> Option<InUnitState> {\n\n match *self {\n\n ParseState::Start => None,\n\n ParseState::StartOneZero => None,\n\n ParseState::StartTwoZero => None,\n\n ParseState::StartOne => None,\n\n ParseState::InUnit => Some(InUnitState { backtrack_bytes: 0 }),\n\n ParseState::InUnitOneZero => Some(InUnitState { backtrack_bytes: 1 }),\n\n ParseState::InUnitTwoZero => Some(InUnitState { backtrack_bytes: 2 }),\n\n ParseState::Error => None,\n\n ParseState::End => None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/annexb.rs", "rank": 13, "score": 45138.13767259798 }, { "content": "#[derive(Debug,PartialEq)]\n\nenum Field {\n\n Top,\n\n Bottom,\n\n}\n\n\n", "file_path": "src/nal/slice/mod.rs", "rank": 14, "score": 43630.44616959907 }, { "content": "struct NullNalHandler {\n\n start: u64,\n\n push: u64,\n\n end: u64,\n\n}\n\nimpl NalHandler for NullNalHandler {\n\n type Ctx = ();\n\n\n\n fn start(&mut self, _ctx: &mut Context<Self::Ctx>, _header: NalHeader) {\n\n self.start += 1;\n\n }\n\n\n\n fn push(&mut self, _ctx: &mut Context<Self::Ctx>, _buf: &[u8]) {\n\n self.push += 1;\n\n }\n\n\n\n fn end(&mut self, _ctx: &mut Context<Self::Ctx>) {\n\n self.end += 1;\n\n }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 15, "score": 43626.495102454035 }, { "content": "struct NullNalReader {\n\n start: u64,\n\n push: u64,\n\n end: u64,\n\n}\n\nimpl NalReader for NullNalReader {\n\n type Ctx = ();\n\n\n\n fn start(&mut self, _ctx: &mut Context<Self::Ctx>) {\n\n self.start += 1;\n\n }\n\n fn push(&mut self, _ctx: &mut Context<Self::Ctx>, _buf: &[u8]) {\n\n self.push += 1;\n\n }\n\n fn end(&mut self, _ctx: &mut Context<Self::Ctx>) {\n\n self.end += 1;\n\n }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 16, "score": 43626.495102454035 }, { "content": "#[derive(Default)]\n\nstruct AvcBuilder {\n\n started: bool,\n\n cur: Vec<u8>,\n\n}\n\n\n\nimpl h264_reader::annexb::NalReader for AvcBuilder {\n\n type Ctx = Vec<u8>;\n\n\n\n fn start(&mut self, _ctx: &mut Context<Self::Ctx>) {\n\n assert!(!self.started);\n\n self.started = true;\n\n }\n\n fn push(&mut self, _ctx: &mut Context<Self::Ctx>, buf: &[u8]) {\n\n assert!(self.started);\n\n assert!(!buf.is_empty()); // useless empty push.\n\n self.cur.extend_from_slice(buf);\n\n }\n\n fn end(&mut self, ctx: &mut Context<Self::Ctx>) {\n\n assert!(self.started);\n\n self.started = false;\n", "file_path": "fuzz/fuzz_targets/annexb.rs", "rank": 17, "score": 42270.27088462817 }, { "content": "#[derive(Debug,PartialEq)]\n\nenum SliceFamily {\n\n P,\n\n B,\n\n I,\n\n SP,\n\n SI\n\n}\n", "file_path": "src/nal/slice/mod.rs", "rank": 18, "score": 42270.08246166817 }, { "content": "#[derive(Debug,PartialEq)]\n\nenum FieldPic {\n\n Frame,\n\n Field(Field),\n\n}\n\n\n", "file_path": "src/nal/slice/mod.rs", "rank": 19, "score": 42270.08246166817 }, { "content": "#[derive(Debug,PartialEq)]\n\nenum SliceExclusive {\n\n /// All slices in the picture have the same type\n\n Exclusive,\n\n /// Other slices in the picture may have a different type than the current slice\n\n NonExclusive,\n\n}\n", "file_path": "src/nal/slice/mod.rs", "rank": 20, "score": 42270.08246166817 }, { "content": "#[derive(Debug)]\n\nenum ColourPlane {\n\n /// Indicates the _chroma_ colour plane\n\n Y,\n\n /// Indicates the _blue-difference_ colour plane\n\n Cb,\n\n /// Indicates the _red-difference_ colour plane\n\n Cr,\n\n}\n\n#[derive(Debug)]\n\npub enum ColourPlaneError {\n\n InvalidId(u8),\n\n}\n\nimpl ColourPlane {\n\n fn from_id(id: u8) -> Result<ColourPlane, ColourPlaneError> {\n\n match id {\n\n 0 => Ok(ColourPlane::Y),\n\n 1 => Ok(ColourPlane::Cb),\n\n 2 => Ok(ColourPlane::Cr),\n\n _ => Err(ColourPlaneError::InvalidId(id))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/nal/slice/mod.rs", "rank": 21, "score": 42266.13139452314 }, { "content": "#[derive(Debug)]\n\nstruct PredWeight {\n\n weight: i32,\n\n offset: i32,\n\n}\n", "file_path": "src/nal/slice/mod.rs", "rank": 22, "score": 42266.13139452314 }, { "content": "#[derive(Debug)]\n\nenum NalSwitchState {\n\n Start,\n\n Handling(UnitType),\n\n Ignoring,\n\n}\n\n// TODO: generate enum at compile time rather than Vec<Box<>>\n\npub struct NalSwitch<Ctx> {\n\n readers_by_id: Vec<Option<Box<RefCell<dyn NalHandler<Ctx=Ctx>>>>>,\n\n state: NalSwitchState,\n\n}\n\nimpl<Ctx> Default for NalSwitch<Ctx> {\n\n fn default() -> Self {\n\n NalSwitch {\n\n readers_by_id: Vec::new(),\n\n state: NalSwitchState::Start,\n\n }\n\n }\n\n}\n\nimpl<Ctx> NalSwitch<Ctx> {\n\n pub fn put_handler(&mut self, unit_type: UnitType, handler: Box<RefCell<dyn NalHandler<Ctx=Ctx>>>) {\n", "file_path": "src/nal/mod.rs", "rank": 23, "score": 42266.13139452314 }, { "content": "#[derive(Debug, Clone)]\n\nstruct PicScalingMatrix {\n\n // TODO\n\n}\n\nimpl PicScalingMatrix {\n\n fn read(r: &mut RbspBitReader<'_>, sps: &sps::SeqParameterSet, transform_8x8_mode_flag: bool) -> Result<Option<PicScalingMatrix>,PpsError> {\n\n let pic_scaling_matrix_present_flag = r.read_bool()?;\n\n Ok(if pic_scaling_matrix_present_flag {\n\n let mut scaling_list4x4 = vec!();\n\n let mut scaling_list8x8 = vec!();\n\n\n\n let count = if transform_8x8_mode_flag {\n\n if sps.chroma_info.chroma_format == sps::ChromaFormat::YUV444 { 12 } else { 8 }\n\n } else {\n\n 0\n\n };\n\n for i in 0..6+count {\n\n let seq_scaling_list_present_flag = r.read_bool()?;\n\n if seq_scaling_list_present_flag {\n\n if i < 6 {\n\n scaling_list4x4.push(sps::ScalingList::read(r, 16).map_err(PpsError::ScalingMatrix)?);\n", "file_path": "src/nal/pps.rs", "rank": 24, "score": 42266.13139452314 }, { "content": "struct NullRbspNalReader {\n\n decoder: RbspDecoder<NullNalHandler>,\n\n decoder_started: bool,\n\n}\n\nimpl NalReader for NullRbspNalReader {\n\n type Ctx = ();\n\n\n\n fn start(&mut self, _ctx: &mut Context<Self::Ctx>) {\n\n assert!(!self.decoder_started);\n\n }\n\n fn push(&mut self, ctx: &mut Context<Self::Ctx>, mut buf: &[u8]) {\n\n if !self.decoder_started && !buf.is_empty() {\n\n let hdr = NalHeader::new(buf[0]).unwrap();\n\n self.decoder.start(ctx, hdr);\n\n buf = &buf[1..];\n\n self.decoder_started = true;\n\n }\n\n if self.decoder_started {\n\n self.decoder.push(ctx, buf);\n\n }\n\n }\n\n fn end(&mut self, ctx: &mut Context<Self::Ctx>) {\n\n assert!(self.decoder_started);\n\n self.decoder.end(ctx);\n\n self.decoder_started = false;\n\n }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 25, "score": 42266.13139452314 }, { "content": "enum ParseState {\n\n Unstarted,\n\n Start(NalHeader),\n\n Continue(NalHeader),\n\n}\n\n\n", "file_path": "src/nal/slice/mod.rs", "rank": 26, "score": 42266.13139452314 }, { "content": "#[derive(Default)]\n\nstruct NalCapture {\n\n buf: Vec<u8>,\n\n}\n\nimpl NalHandler for NalCapture {\n\n type Ctx = ();\n\n\n\n fn start(&mut self, ctx: &mut h264_reader::Context<Self::Ctx>, header: NalHeader) {\n\n self.buf.clear();\n\n }\n\n\n\n fn push(&mut self, ctx: &mut h264_reader::Context<Self::Ctx>, buf: &[u8]) {\n\n self.buf.extend_from_slice(buf);\n\n }\n\n\n\n fn end(&mut self, ctx: &mut h264_reader::Context<Self::Ctx>) {\n\n }\n\n}\n\n\n", "file_path": "fuzz/fuzz_targets/fuzz_target_1.rs", "rank": 27, "score": 41039.55916907769 }, { "content": "#[derive(Debug)]\n\nstruct PredWeightTable {\n\n luma_log2_weight_denom: u32,\n\n chroma_log2_weight_denom: Option<u32>,\n\n luma_weights: Vec<Option<PredWeight>>,\n\n chroma_weights: Vec<Vec<PredWeight>>,\n\n}\n\nimpl PredWeightTable {\n\n fn read(r: &mut RbspBitReader<'_>, slice_type: &SliceType, pps: &pps::PicParameterSet, sps: &sps::SeqParameterSet, num_ref_active: &Option<NumRefIdxActive>) -> Result<PredWeightTable, SliceHeaderError> {\n\n let chroma_array_type = if sps.chroma_info.separate_colour_plane_flag {\n\n // TODO: \"Otherwise (separate_colour_plane_flag is equal to 1), ChromaArrayType is\n\n // set equal to 0.\" ...does this mean ChromaFormat::Monochrome then?\n\n sps::ChromaFormat::Monochrome\n\n } else {\n\n sps.chroma_info.chroma_format\n\n };\n\n let luma_log2_weight_denom = r.read_ue_named(\"luma_log2_weight_denom\")?;\n\n let chroma_log2_weight_denom = if chroma_array_type != sps::ChromaFormat::Monochrome {\n\n Some(r.read_ue_named(\"chroma_log2_weight_denom\")?)\n\n } else {\n\n None\n", "file_path": "src/nal/slice/mod.rs", "rank": 28, "score": 41035.41967897267 }, { "content": "struct InProgressSlice {\n\n header: h264_reader::nal::NalHeader,\n\n buf: Vec<u8>,\n\n}\n", "file_path": "fuzz/fuzz_targets/fuzz_target_1.rs", "rank": 29, "score": 41035.41967897267 }, { "content": "#[derive(Debug)]\n\nenum SeiHeaderState {\n\n Begin,\n\n PayloadType { payload_type: u32 },\n\n PayloadSize { payload_type: HeaderType, payload_size: u32 },\n\n Payload { payload_type: HeaderType, payload_size: u32, consumed_size: u32 },\n\n End,\n\n}\n\n\n", "file_path": "src/nal/sei/mod.rs", "rank": 30, "score": 41035.41967897267 }, { "content": "#[derive(Debug)]\n\nenum ModificationOfPicNums {\n\n Subtract(u32),\n\n Add(u32),\n\n LongTermRef(u32),\n\n}\n", "file_path": "src/nal/slice/mod.rs", "rank": 31, "score": 41035.41967897267 }, { "content": "struct SliceFuzz {\n\n current_slice: Option<InProgressSlice>,\n\n}\n\nimpl SliceFuzz {\n\n pub fn new() -> SliceFuzz {\n\n SliceFuzz {\n\n current_slice: None,\n\n }\n\n }\n\n}\n\nimpl h264_reader::nal::NalHandler for SliceFuzz {\n\n type Ctx = ();\n\n\n\n fn start(&mut self, ctx: &mut h264_reader::Context<Self::Ctx>, header: h264_reader::nal::NalHeader) {\n\n let mut buf = Vec::new();\n\n buf.push(header.into());\n\n self.current_slice = Some(InProgressSlice {\n\n header,\n\n buf,\n\n });\n", "file_path": "fuzz/fuzz_targets/fuzz_target_1.rs", "rank": 32, "score": 41035.41967897267 }, { "content": "pub trait NalReader {\n\n type Ctx;\n\n\n\n /// Starts a NAL unit.\n\n fn start(&mut self, ctx: &mut Context<Self::Ctx>);\n\n\n\n /// Pushes a non-empty buffer as part of a NAL unit.\n\n fn push(&mut self, ctx: &mut Context<Self::Ctx>, buf: &[u8]);\n\n\n\n /// Ends a NAL unit, which could be empty.\n\n fn end(&mut self, ctx: &mut Context<Self::Ctx>);\n\n}\n\n\n\n/// Push parser for Annex B format which delegates to a [NalReader].\n\n///\n\n/// Guarantees that the bytes supplied to `NalReader`—the concatenation of all\n\n/// `buf`s supplied to `NalReader::push`—will be exactly the same for a given\n\n/// Annex B stream, regardless of boundaries of `AnnexBReader::push` calls.\n\npub struct AnnexBReader<R, Ctx>\n\n where\n", "file_path": "src/annexb.rs", "rank": 33, "score": 40493.85262945283 }, { "content": "#[derive(Debug)]\n\nenum DecRefPicMarking {\n\n Idr {\n\n no_output_of_prior_pics_flag: bool,\n\n long_term_reference_flag: bool,\n\n },\n\n /// `adaptive_ref_pic_marking_mode_flag` equal to `0`\n\n SlidingWindow,\n\n /// `adaptive_ref_pic_marking_mode_flag` equal to `1`\n\n Adaptive(Vec<MemoryManagementControlOperation>),\n\n}\n\nimpl DecRefPicMarking {\n\n fn read(r: &mut RbspBitReader<'_>, header: NalHeader) -> Result<DecRefPicMarking, SliceHeaderError> {\n\n Ok(if header.nal_unit_type() == crate::nal::UnitType::SliceLayerWithoutPartitioningIdr {\n\n DecRefPicMarking::Idr {\n\n no_output_of_prior_pics_flag: r.read_bool_named(\"no_output_of_prior_pics_flag\")?,\n\n long_term_reference_flag: r.read_bool_named(\"long_term_reference_flag\")?,\n\n }\n\n } else if r.read_bool_named(\"adaptive_ref_pic_marking_mode_flag\")? {\n\n let mut ctl = vec![];\n\n loop {\n", "file_path": "src/nal/slice/mod.rs", "rank": 35, "score": 39916.66795408563 }, { "content": "#[derive(Debug)]\n\nenum MemoryManagementControlOperation {\n\n /// `memory_management_control_operation` value of `1`\n\n ShortTermUnusedForRef { difference_of_pic_nums_minus1: u32 },\n\n /// `memory_management_control_operation` value of `2`\n\n LongTermUnusedForRef { long_term_pic_num: u32 },\n\n /// `memory_management_control_operation` value of `3`\n\n ShortTermUsedForLongTerm { difference_of_pic_nums_minus1: u32, long_term_frame_idx: u32 },\n\n /// `memory_management_control_operation` value of `4`\n\n MaxUsedLongTermFrameRef { max_long_term_frame_idx_plus1: u32 },\n\n /// `memory_management_control_operation` value of `5`\n\n AllRefPicturesUnused,\n\n /// `memory_management_control_operation` value of `6`\n\n CurrentUsedForLongTerm { long_term_frame_idx: u32 },\n\n}\n\n\n\n/// Decoded reference picture marking\n", "file_path": "src/nal/slice/mod.rs", "rank": 36, "score": 39916.66795408563 }, { "content": "#[derive(Debug)]\n\nenum RefPicListModifications {\n\n I,\n\n P {\n\n ref_pic_list_modification_l0: Vec<ModificationOfPicNums>,\n\n },\n\n B {\n\n ref_pic_list_modification_l0: Vec<ModificationOfPicNums>,\n\n ref_pic_list_modification_l1: Vec<ModificationOfPicNums>,\n\n },\n\n}\n\nimpl RefPicListModifications {\n\n fn read(slice_family: &SliceFamily, r: &mut RbspBitReader<'_>) -> Result<RefPicListModifications, SliceHeaderError> {\n\n Ok(match slice_family {\n\n SliceFamily::I | SliceFamily::SI => RefPicListModifications::I,\n\n SliceFamily::B => RefPicListModifications::B {\n\n ref_pic_list_modification_l0: Self::read_list(r)?,\n\n ref_pic_list_modification_l1: Self::read_list(r)?,\n\n },\n\n SliceFamily::P | SliceFamily::SP => RefPicListModifications::P {\n\n ref_pic_list_modification_l0: Self::read_list(r)?,\n", "file_path": "src/nal/slice/mod.rs", "rank": 37, "score": 39916.66795408563 }, { "content": "struct PicTimingFuzz;\n\nimpl nal::sei::pic_timing::PicTimingHandler for PicTimingFuzz {\n\n type Ctx = ();\n\n\n\n fn handle(&mut self, ctx: &mut h264_reader::Context<Self::Ctx>, pic_timing: nal::sei::pic_timing::PicTiming) {\n\n println!(\" {:?}\", pic_timing);\n\n }\n\n}\n\n\n\nh264_reader::sei_switch!{\n\n SeiSwitch<()> {\n\n //BufferingPeriod: h264_reader::nal::sei::buffering_period::BufferingPeriodPayloadReader\n\n // => h264_reader::nal::sei::buffering_period::BufferingPeriodPayloadReader::new(),\n\n //UserDataRegisteredItuTT35: h264_reader::nal::sei::user_data_registered_itu_t_t35::UserDataRegisteredItuTT35Reader<TT35Switch>\n\n // => h264_reader::nal::sei::user_data_registered_itu_t_t35::UserDataRegisteredItuTT35Reader::new(TT35Switch::default()),\n\n PicTiming: h264_reader::nal::sei::pic_timing::PicTimingReader<PicTimingFuzz>\n\n => h264_reader::nal::sei::pic_timing::PicTimingReader::new(PicTimingFuzz),\n\n }\n\n}\n", "file_path": "fuzz/fuzz_targets/fuzz_target_1.rs", "rank": 38, "score": 39916.66795408563 }, { "content": "#[derive(Debug)]\n\nenum NumRefIdxActive {\n\n P{ num_ref_idx_l0_active_minus1: u32 },\n\n B{ num_ref_idx_l0_active_minus1: u32, num_ref_idx_l1_active_minus1: u32 }\n\n}\n\nimpl NumRefIdxActive {\n\n fn num_ref_idx_l0_active_minus1(&self) -> u32 {\n\n match *self {\n\n NumRefIdxActive::P { num_ref_idx_l0_active_minus1 } => num_ref_idx_l0_active_minus1,\n\n NumRefIdxActive::B { num_ref_idx_l0_active_minus1, .. } => num_ref_idx_l0_active_minus1,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/nal/slice/mod.rs", "rank": 39, "score": 39916.66795408563 }, { "content": "// TODO: rename to 'RbspHandler' or something, to indicate it's only for post-emulation-prevention-bytes data\n\npub trait NalHandler {\n\n type Ctx;\n\n\n\n fn start(&mut self, ctx: &mut Context<Self::Ctx>, header: NalHeader);\n\n fn push(&mut self, ctx: &mut Context<Self::Ctx>, buf: &[u8]);\n\n fn end(&mut self, ctx: &mut Context<Self::Ctx>);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use hex_literal::*;\n\n\n\n #[test]\n\n fn header() {\n\n let h = NalHeader::new(0b0101_0001).unwrap();\n\n assert_eq!(0b10, h.nal_ref_idc());\n\n assert_eq!(UnitType::Reserved(17), h.nal_unit_type());\n\n }\n\n\n", "file_path": "src/nal/mod.rs", "rank": 40, "score": 39124.61995870987 }, { "content": "struct FuzzSeiPayoadReader {\n\n switch: SeiSwitch,\n\n}\n\nimpl h264_reader::nal::sei::SeiIncrementalPayloadReader for FuzzSeiPayoadReader {\n\n type Ctx = ();\n\n\n\n fn start(&mut self, ctx: &mut h264_reader::Context<Self::Ctx>, payload_type: h264_reader::nal::sei::HeaderType, payload_size: u32) {\n\n //println!(\" SEI: {:?} size={}\", payload_type, payload_size);\n\n self.switch.start(ctx, payload_type, payload_size)\n\n }\n\n\n\n fn push(&mut self, ctx: &mut h264_reader::Context<Self::Ctx>, buf: &[u8]) {\n\n self.switch.push(ctx, buf)\n\n }\n\n\n\n fn end(&mut self, ctx: &mut h264_reader::Context<Self::Ctx>) {\n\n self.switch.end(ctx)\n\n }\n\n\n\n fn reset(&mut self, ctx: &mut h264_reader::Context<Self::Ctx>) {\n\n self.switch.reset(ctx)\n\n }\n\n}\n\n\n", "file_path": "fuzz/fuzz_targets/fuzz_target_1.rs", "rank": 41, "score": 38895.263116268536 }, { "content": "pub trait PicTimingHandler {\n\n type Ctx;\n\n fn handle(&mut self, ctx: &mut Context<Self::Ctx>, pic_timing: PicTiming);\n\n}\n\npub struct PicTimingReader<H: PicTimingHandler> {\n\n handler: H,\n\n}\n\nimpl<H: PicTimingHandler> PicTimingReader<H> {\n\n pub fn new(handler: H) -> Self {\n\n PicTimingReader {\n\n handler,\n\n }\n\n }\n\n}\n\nimpl<H: PicTimingHandler> SeiCompletePayloadReader for PicTimingReader<H> {\n\n type Ctx = H::Ctx;\n\n\n\n fn header(&mut self, ctx: &mut Context<Self::Ctx>, payload_type: HeaderType, buf: &[u8]) {\n\n assert_eq!(payload_type, HeaderType::PicTiming);\n\n match PicTiming::read(ctx, buf) {\n\n Err(e) => error!(\"Failure reading pic_timing: {:?}\", e),\n\n Ok(pic_timing) => {\n\n self.handler.handle(ctx, pic_timing);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/nal/sei/pic_timing.rs", "rank": 42, "score": 35731.775127251196 }, { "content": "pub trait SeiIncrementalPayloadReader {\n\n type Ctx;\n\n fn start(&mut self, ctx: &mut Context<Self::Ctx>, payload_type: HeaderType, payload_size: u32);\n\n fn push(&mut self, ctx: &mut Context<Self::Ctx>, buf: &[u8]);\n\n fn end(&mut self, ctx: &mut Context<Self::Ctx>);\n\n fn reset(&mut self, ctx: &mut Context<Self::Ctx>);\n\n}\n\n\n\npub struct SeiBuffer<R: SeiCompletePayloadReader> {\n\n payload_type: Option<HeaderType>,\n\n buf: Vec<u8>,\n\n reader: R,\n\n}\n\nimpl<R: SeiCompletePayloadReader> SeiBuffer<R> {\n\n pub fn new(reader: R) -> Self {\n\n SeiBuffer {\n\n payload_type: None,\n\n buf: Vec::new(),\n\n reader,\n\n }\n", "file_path": "src/nal/sei/mod.rs", "rank": 43, "score": 35731.775127251196 }, { "content": "pub trait SeiCompletePayloadReader {\n\n type Ctx;\n\n fn header(&mut self, ctx: &mut Context<Self::Ctx>, payload_type: HeaderType, buf: &[u8]);\n\n}\n\n\n", "file_path": "src/nal/sei/mod.rs", "rank": 44, "score": 35731.775127251196 }, { "content": " use std::cell::RefCell;\n\n\n\n #[derive(Default)]\n\n struct State {\n\n started: u32,\n\n ended: u32,\n\n data: Vec<u8>,\n\n }\n\n struct MockReader {\n\n state: Rc<RefCell<State>>\n\n }\n\n impl SeiIncrementalPayloadReader for MockReader {\n\n type Ctx = ();\n\n\n\n fn start(&mut self, _ctx: &mut Context<Self::Ctx>, _payload_type: HeaderType, _payload_size: u32) {\n\n self.state.borrow_mut().started += 1;\n\n }\n\n\n\n fn push(&mut self, _ctx: &mut Context<Self::Ctx>, buf: &[u8]) {\n\n self.state.borrow_mut().data.extend_from_slice(buf);\n", "file_path": "src/nal/sei/mod.rs", "rank": 46, "score": 33.28320486776622 }, { "content": "impl<Ctx> Default for SeqParameterSetNalHandler<Ctx> {\n\n fn default() -> Self {\n\n SeqParameterSetNalHandler {\n\n buf: Vec::new(),\n\n phantom: marker::PhantomData,\n\n }\n\n }\n\n}\n\nimpl<Ctx> NalHandler for SeqParameterSetNalHandler<Ctx> {\n\n type Ctx = Ctx;\n\n\n\n fn start(&mut self, _ctx: &mut Context<Ctx>, header: NalHeader) {\n\n assert_eq!(header.nal_unit_type(), super::UnitType::SeqParameterSet);\n\n }\n\n\n\n fn push(&mut self, _ctx: &mut Context<Ctx>, buf: &[u8]) {\n\n self.buf.extend_from_slice(buf);\n\n }\n\n\n\n fn end(&mut self, ctx: &mut Context<Ctx>) {\n", "file_path": "src/nal/sps.rs", "rank": 47, "score": 32.02088936010842 }, { "content": " }\n\n}\n\nimpl<R: SeiCompletePayloadReader> SeiIncrementalPayloadReader for SeiBuffer<R> {\n\n type Ctx = R::Ctx;\n\n\n\n fn start(&mut self, _ctx: &mut Context<Self::Ctx>, payload_type: HeaderType, _payload_size: u32) {\n\n self.payload_type = Some(payload_type);\n\n }\n\n\n\n fn push(&mut self, _ctx: &mut Context<Self::Ctx>, buf: &[u8]) {\n\n self.buf.extend_from_slice(buf);\n\n }\n\n\n\n fn end(&mut self, ctx: &mut Context<Self::Ctx>) {\n\n self.reader.header(ctx, self.payload_type.unwrap(), &self.buf[..]);\n\n self.buf.clear();\n\n self.payload_type = None;\n\n }\n\n\n\n fn reset(&mut self, _ctx: &mut Context<Self::Ctx>) {\n", "file_path": "src/nal/sei/mod.rs", "rank": 48, "score": 30.880319225076715 }, { "content": " type Ctx = R::Ctx;\n\n\n\n fn start(&mut self, ctx: &mut Context<Self::Ctx>, header: NalHeader) {\n\n assert_eq!(header.nal_unit_type(), super::UnitType::SEI);\n\n self.reader.start(ctx, header);\n\n }\n\n\n\n fn push(&mut self, ctx: &mut Context<Self::Ctx>, buf: &[u8]) {\n\n self.reader.push(ctx, buf);\n\n }\n\n\n\n fn end(&mut self, ctx: &mut Context<Self::Ctx>) {\n\n self.reader.end(ctx);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::rc::Rc;\n", "file_path": "src/nal/sei/mod.rs", "rank": 49, "score": 29.340421098541746 }, { "content": "\n\n fn read_slice_groups(r: &mut RbspBitReader<'_>) -> Result<Option<SliceGroup>,PpsError> {\n\n let num_slice_groups_minus1 = r.read_ue_named(\"num_slice_groups_minus1\")?;\n\n Ok(if num_slice_groups_minus1 > 0 {\n\n Some(SliceGroup::read(r, num_slice_groups_minus1)?)\n\n } else {\n\n None\n\n })\n\n }\n\n}\n\n\n\npub struct PicParameterSetNalHandler<Ctx> {\n\n buf: Vec<u8>,\n\n phantom: marker::PhantomData<Ctx>\n\n}\n\n\n\nimpl<Ctx> Default for PicParameterSetNalHandler<Ctx> {\n\n fn default() -> Self {\n\n PicParameterSetNalHandler {\n\n buf: Vec::new(),\n", "file_path": "src/nal/pps.rs", "rank": 50, "score": 29.03557866204224 }, { "content": "}\n\n\n\n#[derive(Debug)]\n\npub struct PicTiming {\n\n pub delays: Option<Delays>,\n\n pub pic_struct: Option<PicStruct>,\n\n}\n\nimpl PicTiming {\n\n pub fn read<Ctx>(ctx: &mut Context<Ctx>, buf: &[u8]) -> Result<PicTiming, PicTimingError> {\n\n let mut r = RbspBitReader::new(buf);\n\n let seq_parameter_set_id = ParamSetId::from_u32(0).unwrap();\n\n match ctx.sps_by_id(seq_parameter_set_id) {\n\n None => Err(PicTimingError::UndefinedSeqParamSetId(seq_parameter_set_id)),\n\n Some(sps) => {\n\n Ok(PicTiming {\n\n delays: Self::read_delays(&mut r, sps)?,\n\n pic_struct: Self::read_pic_struct(&mut r, sps)?,\n\n })\n\n }\n\n }\n", "file_path": "src/nal/sei/pic_timing.rs", "rank": 51, "score": 28.9535557126694 }, { "content": " ExpGolombTooLarge(&'static str),\n\n}\n\n\n\npub struct RbspBitReader<'buf> {\n\n reader: bitstream_io::read::BitReader<std::io::Cursor<&'buf [u8]>, bitstream_io::BigEndian>,\n\n}\n\nimpl<'buf> RbspBitReader<'buf> {\n\n pub fn new(buf: &'buf [u8]) -> Self {\n\n RbspBitReader {\n\n reader: bitstream_io::read::BitReader::new(std::io::Cursor::new(buf)),\n\n }\n\n }\n\n\n\n pub fn read_ue_named(&mut self, name: &'static str) -> Result<u32,RbspBitReaderError> {\n\n let count = count_zero_bits(&mut self.reader, name)?;\n\n if count > 0 {\n\n let val = self.read_u32(count)?;\n\n Ok((1 << count) -1 + val)\n\n } else {\n\n Ok(0)\n", "file_path": "src/rbsp.rs", "rank": 52, "score": 27.71757594178394 }, { "content": " struct MockHandler;\n\n impl NalHandler for MockHandler {\n\n type Ctx = ();\n\n\n\n fn start(&mut self, _ctx: &mut Context<Self::Ctx>, header: NalHeader) {\n\n assert_eq!(header.nal_unit_type(), UnitType::SeqParameterSet);\n\n }\n\n\n\n fn push(&mut self, _ctx: &mut Context<Self::Ctx>, buf: &[u8]) {\n\n let expected = hex!(\n\n \"64 00 0A AC 72 84 44 26 84 00 00\n\n 00 04 00 00 00 CA 3C 48 96 11 80\");\n\n assert_eq!(buf, &expected[..])\n\n }\n\n\n\n fn end(&mut self, _ctx: &mut Context<Self::Ctx>) {\n\n }\n\n }\n\n\n\n #[test]\n", "file_path": "src/nal/mod.rs", "rank": 53, "score": 27.03847138165554 }, { "content": "\n\n pub fn into_handler(self) -> R {\n\n self.nal_reader\n\n }\n\n}\n\nimpl<R> NalHandler for RbspDecoder<R>\n\n where\n\n R: NalHandler\n\n{\n\n type Ctx = R::Ctx;\n\n\n\n fn start(&mut self, ctx: &mut Context<Self::Ctx>, header: NalHeader) {\n\n self.state = ParseState::Start;\n\n self.nal_reader.start(ctx, header);\n\n }\n\n\n\n fn push(&mut self, ctx: &mut Context<Self::Ctx>, mut buf: &[u8]) {\n\n // buf[0..i] hasn't yet been emitted and is RBSP (has no emulation_prevention_three_bytes).\n\n // self.state describes the state before buf[i].\n\n // buf[i..] has yet to be examined.\n", "file_path": "src/rbsp.rs", "rank": 54, "score": 26.396961617475856 }, { "content": " phantom: marker::PhantomData,\n\n }\n\n }\n\n}\n\nimpl<Ctx> NalHandler for PicParameterSetNalHandler<Ctx> {\n\n type Ctx = Ctx;\n\n\n\n fn start(&mut self, _ctx: &mut Context<Ctx>, header: NalHeader) {\n\n assert_eq!(header.nal_unit_type(), super::UnitType::PicParameterSet);\n\n }\n\n\n\n fn push(&mut self, _ctx: &mut Context<Ctx>, buf: &[u8]) {\n\n self.buf.extend_from_slice(buf);\n\n }\n\n\n\n fn end(&mut self, ctx: &mut Context<Ctx>) {\n\n let pps = PicParameterSet::from_bytes(ctx, &self.buf[..]);\n\n self.buf.clear();\n\n match pps {\n\n Ok(pps) => {\n", "file_path": "src/nal/pps.rs", "rank": 55, "score": 25.544270486607264 }, { "content": " }\n\n }\n\n impl $crate::nal::sei::SeiIncrementalPayloadReader for $struct_name {\n\n type Ctx = $ctx;\n\n\n\n fn start(&mut self, ctx: &mut $crate::Context<Self::Ctx>, payload_type: $crate::nal::sei::HeaderType, payload_size: u32) {\n\n self.current_type = Some(payload_type);\n\n match payload_type {\n\n $(\n\n $crate::nal::sei::HeaderType::$name => self.$name.start(ctx, payload_type, payload_size),\n\n )*\n\n _ => (),\n\n }\n\n }\n\n\n\n fn push(&mut self, ctx: &mut $crate::Context<Self::Ctx>, buf: &[u8]) {\n\n match self.current_type {\n\n $(\n\n Some($crate::nal::sei::HeaderType::$name) => self.$name.push(ctx, buf),\n\n )*\n", "file_path": "src/nal/sei/mod.rs", "rank": 56, "score": 24.791724835067846 }, { "content": " }),\n\n _ => Err(PpsError::InvalidSliceGroupMapType(slice_group_map_type))\n\n }\n\n }\n\n\n\n fn read_run_lengths(r: &mut RbspBitReader<'_>, num_slice_groups_minus1: u32) -> Result<Vec<u32>,PpsError> {\n\n let mut run_length_minus1 = Vec::with_capacity(num_slice_groups_minus1 as usize + 1);\n\n for _ in 0..num_slice_groups_minus1+1 {\n\n run_length_minus1.push(r.read_ue_named(\"run_length_minus1\")?);\n\n }\n\n Ok(run_length_minus1)\n\n }\n\n\n\n fn read_rectangles(r: &mut RbspBitReader<'_>, num_slice_groups_minus1: u32) -> Result<Vec<SliceRect>,PpsError> {\n\n let mut run_length_minus1 = Vec::with_capacity(num_slice_groups_minus1 as usize + 1);\n\n for _ in 0..num_slice_groups_minus1+1 {\n\n run_length_minus1.push(SliceRect::read(r)?);\n\n }\n\n Ok(run_length_minus1)\n\n }\n", "file_path": "src/nal/pps.rs", "rank": 57, "score": 23.349672581793925 }, { "content": "impl Default for SeqScalingMatrix {\n\n fn default() -> Self {\n\n SeqScalingMatrix { }\n\n }\n\n}\n\nimpl SeqScalingMatrix {\n\n fn read(r: &mut RbspBitReader<'_>, chroma_format_idc: u32) -> Result<SeqScalingMatrix,ScalingMatrixError> {\n\n let mut scaling_list4x4 = vec!();\n\n let mut scaling_list8x8 = vec!();\n\n\n\n let count = if chroma_format_idc == 3 { 12 } else { 8 };\n\n for i in 0..count {\n\n let seq_scaling_list_present_flag = r.read_bool()?;\n\n if seq_scaling_list_present_flag {\n\n if i < 6 {\n\n scaling_list4x4.push(ScalingList::read(r, 16)?);\n\n } else {\n\n scaling_list8x8.push(ScalingList::read(r, 64)?);\n\n }\n\n }\n", "file_path": "src/nal/sps.rs", "rank": 58, "score": 23.23908463793292 }, { "content": " BadSeqParamSetId(ParamSetIdError),\n\n /// A field in the bitstream had a value too large for a subsequent calculation\n\n FieldValueTooLarge { name: &'static str, value: u32 },\n\n /// The frame-cropping values are too large vs. the coded picture size,\n\n CroppingError(FrameCropping),\n\n /// The `cpb_cnt_minus1` field must be between 0 and 31 inclusive.\n\n CpbCountOutOfRange(u32),\n\n}\n\n\n\nimpl From<RbspBitReaderError> for SpsError {\n\n fn from(e: RbspBitReaderError) -> Self {\n\n SpsError::RbspReaderError(e)\n\n }\n\n}\n\n\n\npub struct SeqParameterSetNalHandler<Ctx> {\n\n buf: Vec<u8>,\n\n phantom: marker::PhantomData<Ctx>\n\n}\n\n\n", "file_path": "src/nal/sps.rs", "rank": 59, "score": 23.084181561752622 }, { "content": " self.buf.clear();\n\n }\n\n}\n\n\n\npub struct SeiHeaderReader<R: SeiIncrementalPayloadReader> {\n\n state: SeiHeaderState,\n\n reader: R,\n\n}\n\nimpl<R: SeiIncrementalPayloadReader> SeiHeaderReader<R> {\n\n pub fn new(reader: R) -> Self {\n\n SeiHeaderReader {\n\n state: SeiHeaderState::Begin,\n\n reader,\n\n }\n\n }\n\n}\n\nimpl<R: SeiIncrementalPayloadReader> NalHandler for SeiHeaderReader<R> {\n\n type Ctx = R::Ctx;\n\n\n\n fn start(&mut self, _ctx: &mut Context<Self::Ctx>, header: NalHeader) {\n", "file_path": "src/nal/sei/mod.rs", "rank": 60, "score": 22.91769134407182 }, { "content": " self.nal_reader.end(ctx);\n\n }\n\n self.to(ParseState::End);\n\n }\n\n\n\n fn to(&mut self, new_state: ParseState) {\n\n self.state = new_state;\n\n }\n\n\n\n /// count must be 2 or less\n\n fn emit_fake(&mut self, ctx: &mut Context<Ctx>, count: usize) {\n\n let fake = [0u8; 2];\n\n self.nal_reader.push(ctx, &fake[..count]);\n\n }\n\n\n\n fn maybe_emit(&mut self, ctx: &mut Context<Ctx>, buf:&[u8], start: Option<usize>, end: usize, backtrack: usize) {\n\n match start {\n\n Some(s) if s + backtrack < end => {\n\n self.nal_reader.push(ctx, &buf[s..end - backtrack]);\n\n },\n", "file_path": "src/annexb.rs", "rank": 61, "score": 22.84474805313168 }, { "content": " } else {\n\n Some(r.read_i32(time_offset_length)?)\n\n };\n\n Ok(ClockTimestamp {\n\n ct_type,\n\n nuit_field_based_flag,\n\n counting_type,\n\n discontinuity_flag,\n\n cnt_dropped_flag,\n\n n_frames,\n\n smh,\n\n time_offset,\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct PicStruct {\n\n pub pic_struct: PicStructType,\n\n pub clock_timestamps: Vec<Option<ClockTimestamp>>,\n", "file_path": "src/nal/sei/pic_timing.rs", "rank": 62, "score": 22.51140150982888 }, { "content": " pub constraint_flags: ConstraintFlags,\n\n pub level_idc: u8,\n\n pub seq_parameter_set_id: ParamSetId,\n\n pub chroma_info: ChromaInfo,\n\n pub log2_max_frame_num_minus4: u8,\n\n pub pic_order_cnt: PicOrderCntType,\n\n pub max_num_ref_frames: u32,\n\n pub gaps_in_frame_num_value_allowed_flag: bool,\n\n pub pic_width_in_mbs_minus1: u32,\n\n pub pic_height_in_map_units_minus1: u32,\n\n pub frame_mbs_flags: FrameMbsFlags,\n\n pub direct_8x8_inference_flag: bool,\n\n pub frame_cropping: Option<FrameCropping>,\n\n pub vui_parameters: Option<VuiParameters>,\n\n}\n\nimpl SeqParameterSet {\n\n pub fn from_bytes(buf: &[u8]) -> Result<SeqParameterSet, SpsError> {\n\n let mut r = RbspBitReader::new(buf);\n\n let profile_idc = r.read_u8(8)?.into();\n\n let sps = SeqParameterSet {\n", "file_path": "src/nal/sps.rs", "rank": 63, "score": 22.12114087759747 }, { "content": " }\n\n }\n\n\n\n pub fn read_se_named(&mut self, name: &'static str) -> Result<i32, RbspBitReaderError> {\n\n Ok(Self::golomb_to_signed(self.read_ue_named(name)?))\n\n }\n\n\n\n pub fn read_bool(&mut self) -> Result<bool, RbspBitReaderError> {\n\n self.reader.read_bit().map_err( |e| RbspBitReaderError::ReaderError(e) )\n\n }\n\n\n\n pub fn read_bool_named(&mut self, name: &'static str) -> Result<bool, RbspBitReaderError> {\n\n self.reader.read_bit().map_err( |e| RbspBitReaderError::ReaderErrorFor(name, e) )\n\n }\n\n\n\n pub fn read_u8(&mut self, bit_count: u32) -> Result<u8, RbspBitReaderError> {\n\n self.reader.read(u32::from(bit_count)).map_err( |e| RbspBitReaderError::ReaderError(e) )\n\n }\n\n\n\n pub fn read_u16(&mut self, bit_count: u8) -> Result<u16, RbspBitReaderError> {\n", "file_path": "src/rbsp.rs", "rank": 64, "score": 21.87909299467812 }, { "content": " fn push(&mut self, _ctx: &mut Context<Self::Ctx>, buf: &[u8]) {\n\n let dest = self.index..(self.index + buf.len());\n\n\n\n if &self.data[dest.clone()] != buf {\n\n self.data.to_mut()[dest].copy_from_slice(buf);\n\n }\n\n\n\n self.index += buf.len();\n\n }\n\n\n\n fn end(&mut self, _ctx: &mut Context<Self::Ctx>) {\n\n if let Cow::Owned(vec) = &mut self.data {\n\n vec.truncate(self.index);\n\n }\n\n }\n\n }\n\n\n\n let state = DecoderState::new(Cow::Borrowed(nal_unit));\n\n\n\n let mut decoder = RbspDecoder::new(state);\n", "file_path": "src/rbsp.rs", "rank": 65, "score": 21.54844165742293 }, { "content": " let pic_struct = PicStructType::from_id(r.read_u8(4)?)?;\n\n let clock_timestamps = Self::read_clock_timestamps(r, &pic_struct, sps)?;\n\n\n\n Some(PicStruct {\n\n pic_struct,\n\n clock_timestamps,\n\n })\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n })\n\n }\n\n\n\n fn read_clock_timestamps(r: &mut RbspBitReader<'_>, pic_struct: &PicStructType, sps: &sps::SeqParameterSet) -> Result<Vec<Option<ClockTimestamp>>,PicTimingError> {\n\n let mut res = Vec::new();\n\n for _ in 0..pic_struct.num_clock_timestamps() {\n\n res.push(if r.read_bool_named(\"clock_timestamp_flag\")? {\n\n Some(ClockTimestamp::read(r, sps)?)\n\n } else {\n\n None\n\n });\n\n }\n\n Ok(res)\n\n }\n\n}\n", "file_path": "src/nal/sei/pic_timing.rs", "rank": 66, "score": 21.493947284946298 }, { "content": " self.reader.read(u32::from(bit_count)).map_err( |e| RbspBitReaderError::ReaderError(e) )\n\n }\n\n\n\n pub fn read_u32(&mut self, bit_count: u8) -> Result<u32, RbspBitReaderError> {\n\n self.reader.read(u32::from(bit_count)).map_err( |e| RbspBitReaderError::ReaderError(e) )\n\n }\n\n\n\n pub fn read_i32(&mut self, bit_count: u8) -> Result<i32, RbspBitReaderError> {\n\n self.reader.read(u32::from(bit_count)).map_err( |e| RbspBitReaderError::ReaderError(e) )\n\n }\n\n\n\n /// Returns true if positioned before the RBSP trailing bits.\n\n ///\n\n /// This matches the definition of `more_rbsp_data()` in Rec. ITU-T H.264\n\n /// (03/2010) section 7.2.\n\n pub fn has_more_rbsp_data(&mut self, name: &'static str) -> Result<bool, RbspBitReaderError> {\n\n // BitReader returns its reader iff at an aligned position.\n\n //self.reader.reader().map(|r| (r.position() as usize) < r.get_ref().len()).unwrap_or(true)\n\n let mut throwaway = self.reader.clone();\n\n let r = (move || {\n", "file_path": "src/rbsp.rs", "rank": 67, "score": 21.46925713796741 }, { "content": " pub num_ref_idx_l0_default_active_minus1: u32,\n\n pub num_ref_idx_l1_default_active_minus1: u32,\n\n pub weighted_pred_flag: bool,\n\n pub weighted_bipred_idc: u8,\n\n pub pic_init_qp_minus26: i32,\n\n pub pic_init_qs_minus26: i32,\n\n pub chroma_qp_index_offset: i32,\n\n pub deblocking_filter_control_present_flag: bool,\n\n pub constrained_intra_pred_flag: bool,\n\n pub redundant_pic_cnt_present_flag: bool,\n\n pub extension: Option<PicParameterSetExtra>,\n\n}\n\nimpl PicParameterSet {\n\n pub fn from_bytes<Ctx>(ctx: &Context<Ctx>, buf: &[u8]) -> Result<PicParameterSet, PpsError> {\n\n let mut r = RbspBitReader::new(buf);\n\n let pic_parameter_set_id = ParamSetId::from_u32(r.read_ue_named(\"pic_parameter_set_id\")?)\n\n .map_err(PpsError::BadPicParamSetId)?;\n\n let seq_parameter_set_id = ParamSetId::from_u32(r.read_ue_named(\"seq_parameter_set_id\")?)\n\n .map_err(PpsError::BadSeqParamSetId)?;\n\n let seq_parameter_set = ctx.sps_by_id(seq_parameter_set_id)\n", "file_path": "src/nal/pps.rs", "rank": 68, "score": 21.396312618848018 }, { "content": " SecMinHour::SMH(_, _, h) => *h,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ClockTimestamp {\n\n pub ct_type: CtType,\n\n pub nuit_field_based_flag: bool,\n\n pub counting_type: CountingType,\n\n pub discontinuity_flag: bool,\n\n pub cnt_dropped_flag: bool,\n\n pub n_frames: u8,\n\n pub smh: SecMinHour,\n\n pub time_offset: Option<i32>,\n\n}\n\nimpl ClockTimestamp {\n\n fn read(r: &mut RbspBitReader<'_>, sps: &sps::SeqParameterSet) -> Result<ClockTimestamp, PicTimingError> {\n\n let ct_type = CtType::from_id(r.read_u8(2)?);\n\n let nuit_field_based_flag = r.read_bool_named(\"nuit_field_based_flag\")?;\n", "file_path": "src/nal/sei/pic_timing.rs", "rank": 69, "score": 21.168082207138035 }, { "content": " data: Vec<u8>,\n\n }\n\n struct MockReader {\n\n state: Rc<RefCell<State>>\n\n }\n\n impl MockReader {\n\n fn new(state: Rc<RefCell<State>>) -> MockReader {\n\n MockReader {\n\n state\n\n }\n\n }\n\n }\n\n impl NalReader for MockReader {\n\n type Ctx = ();\n\n\n\n fn start(&mut self, _ctx: &mut Context<Self::Ctx>) {\n\n let mut state = self.state.borrow_mut();\n\n assert_eq!(state.started, state.ended);\n\n state.started += 1;\n\n }\n", "file_path": "src/annexb.rs", "rank": 70, "score": 20.891910047608246 }, { "content": "use crate::nal::sei::HeaderType;\n\nuse crate::Context;\n\nuse crate::nal::sei::SeiCompletePayloadReader;\n\nuse log::*;\n\n\n\n#[derive(Debug)]\n\npub enum ItuTT35Error {\n\n NotEnoughData { expected: usize, actual: usize }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum ItuTT35 {\n\n Japan,\n\n Albania,\n\n Algeria,\n\n AmericanSamoa,\n\n GermanyFederalRepublicOf(u8),\n\n Anguilla,\n\n AntiguaandBarbuda,\n\n Argentina,\n", "file_path": "src/nal/sei/user_data_registered_itu_t_t35.rs", "rank": 71, "score": 20.756190355592793 }, { "content": " assert_eq!(header.nal_unit_type(), crate::nal::UnitType::SEI);\n\n self.state = SeiHeaderState::Begin;\n\n }\n\n\n\n fn push(&mut self, ctx: &mut Context<Self::Ctx>, buf: &[u8]) {\n\n assert!(!buf.is_empty());\n\n let mut input = &buf[..];\n\n loop {\n\n if input.is_empty() {\n\n break;\n\n }\n\n let b = input[0];\n\n let mut exit = false;\n\n self.state = match self.state {\n\n SeiHeaderState::End => {\n\n panic!(\"SeiHeaderReader no preceding call to start()\");\n\n },\n\n SeiHeaderState::Begin => {\n\n match b {\n\n 0xff => {\n", "file_path": "src/nal/sei/mod.rs", "rank": 72, "score": 20.71638163762347 }, { "content": " Ok((header, sps, pps))\n\n }\n\n}\n\n\n\npub struct SliceLayerWithoutPartitioningRbsp<Ctx> {\n\n state: ParseState,\n\n phantom: marker::PhantomData<Ctx>\n\n}\n\nimpl<Ctx> super::NalHandler for SliceLayerWithoutPartitioningRbsp<Ctx> {\n\n type Ctx = Ctx;\n\n\n\n fn start(&mut self, _ctx: &mut Context<Ctx>, header: NalHeader) {\n\n self.state = ParseState::Start(header);\n\n }\n\n\n\n fn push(&mut self, ctx: &mut Context<Ctx>, buf: &[u8]) {\n\n match self.state {\n\n ParseState::Unstarted => panic!(\"start() not yet called\"),\n\n ParseState::Start(header) => {\n\n let mut r = RbspBitReader::new(buf);\n", "file_path": "src/nal/slice/mod.rs", "rank": 73, "score": 20.629525010704963 }, { "content": "\n\n fn read_group_ids(r: &mut RbspBitReader<'_>, num_slice_groups_minus1: u32) -> Result<Vec<u32>,PpsError> {\n\n let pic_size_in_map_units_minus1 = r.read_ue_named(\"pic_size_in_map_units_minus1\")?;\n\n // TODO: avoid any panics due to failed conversions\n\n let size = ((1f64+f64::from(pic_size_in_map_units_minus1)).log2()) as u8;\n\n let mut run_length_minus1 = Vec::with_capacity(num_slice_groups_minus1 as usize + 1);\n\n for _ in 0..num_slice_groups_minus1+1 {\n\n run_length_minus1.push(r.read_u32(size)?);\n\n }\n\n Ok(run_length_minus1)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "src/nal/pps.rs", "rank": 74, "score": 20.505094031460636 }, { "content": " R: NalReader<Ctx=Ctx>\n\n{\n\n state: ParseState,\n\n nal_reader: R,\n\n}\n\nimpl<R, Ctx> AnnexBReader<R, Ctx>\n\n where\n\n R: NalReader<Ctx=Ctx>\n\n{\n\n pub fn new(nal_reader: R) -> Self {\n\n AnnexBReader {\n\n state: ParseState::End,\n\n nal_reader,\n\n }\n\n }\n\n\n\n pub fn start(&mut self, ctx: &mut Context<Ctx>) {\n\n if self.state.in_unit().is_some() {\n\n // TODO: or reset()?\n\n self.nal_reader.end(ctx);\n", "file_path": "src/annexb.rs", "rank": 75, "score": 20.434018540515666 }, { "content": "}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct SliceRect {\n\n top_left: u32,\n\n bottom_right: u32,\n\n}\n\nimpl SliceRect {\n\n fn read(r: &mut RbspBitReader<'_>) -> Result<SliceRect,PpsError> {\n\n Ok(SliceRect {\n\n top_left: r.read_ue_named(\"top_left\")?,\n\n bottom_right: r.read_ue_named(\"bottom_right\")?,\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum SliceGroup {\n\n Interleaved {\n\n run_length_minus1: Vec<u32>,\n", "file_path": "src/nal/pps.rs", "rank": 76, "score": 20.292519190874703 }, { "content": " fn push(&mut self, _ctx: &mut Context<Self::Ctx>, buf: &[u8]) {\n\n self.state.borrow_mut().data.extend_from_slice(buf);\n\n }\n\n\n\n fn end(&mut self, _ctx: &mut Context<Self::Ctx>) {\n\n self.state.borrow_mut().ended = true;\n\n }\n\n }\n\n\n\n #[test]\n\n fn it_works() {\n\n let data = hex!(\n\n \"67 64 00 0A AC 72 84 44 26 84 00 00 03\n\n 00 04 00 00 03 00 CA 3C 48 96 11 80\");\n\n for i in 1..data.len()-1 {\n\n let state = Rc::new(RefCell::new(State {\n\n started: false,\n\n ended: false,\n\n data: Vec::new(),\n\n }));\n", "file_path": "src/rbsp.rs", "rank": 77, "score": 20.000386782526142 }, { "content": " pub fn sequence_parameter_sets(&self) -> impl Iterator<Item = Result<&'buf[u8], ParamSetError>> {\n\n let num = self.num_of_sequence_parameter_sets();\n\n let data = &self.data[Self::MIN_CONF_SIZE..];\n\n ParamSetIter::new(data, UnitType::SeqParameterSet)\n\n .take(num)\n\n }\n\n pub fn picture_parameter_sets(&self) -> impl Iterator<Item = Result<&'buf[u8], ParamSetError>> + 'buf {\n\n let offset = self.seq_param_sets_end().unwrap();\n\n let num = self.data[offset];\n\n let data = &self.data[offset+1..];\n\n ParamSetIter::new(data, UnitType::PicParameterSet)\n\n .take(num as usize)\n\n }\n\n\n\n /// Creates an H264 parser context from the given user context, using the settings encoded into\n\n /// this `AvcDecoderConfigurationRecord`.\n\n ///\n\n /// In particular, the _sequence parameter set_ and _picture parameter set_ values of this\n\n /// configuration record will be inserted into the resulting context.\n\n pub fn create_context<C>(&self, ctx: C) -> Result<Context<C>, AvccError> {\n", "file_path": "src/avcc.rs", "rank": 78, "score": 19.936413036479305 }, { "content": "\n\n fn push(&mut self, _ctx: &mut Context<Self::Ctx>, buf: &[u8]) {\n\n let mut state = self.state.borrow_mut();\n\n assert!(state.started > state.ended);\n\n assert!(!buf.is_empty());\n\n state.data.extend_from_slice(buf);\n\n }\n\n\n\n fn end(&mut self, _ctx: &mut Context<Self::Ctx>) {\n\n let mut state = self.state.borrow_mut();\n\n state.ended += 1;\n\n assert_eq!(state.started, state.ended);\n\n }\n\n }\n\n\n\n #[test]\n\n fn simple_nal() {\n\n let state = Rc::new(RefCell::new(State {\n\n started: 0,\n\n ended: 0,\n", "file_path": "src/annexb.rs", "rank": 79, "score": 19.902528126227953 }, { "content": " match SliceHeader::read(ctx, &mut r, header) {\n\n Ok(header) => info!(\"TODO: expose to caller: {:#?}\", header),\n\n Err(e) => error!(\"slice_header() error: SliceHeaderError::{:?}\", e),\n\n }\n\n self.state = ParseState::Continue(header);\n\n },\n\n ParseState::Continue(_header) => {\n\n // TODO\n\n }\n\n }\n\n }\n\n\n\n fn end(&mut self, _ctx: &mut Context<Ctx>) {\n\n // TODO\n\n }\n\n}\n\nimpl<Ctx> Default for SliceLayerWithoutPartitioningRbsp<Ctx> {\n\n fn default() -> Self {\n\n SliceLayerWithoutPartitioningRbsp {\n\n state: ParseState::Unstarted,\n\n phantom: marker::PhantomData,\n\n }\n\n }\n\n}", "file_path": "src/nal/slice/mod.rs", "rank": 80, "score": 19.895843532401493 }, { "content": " None\n\n })\n\n }\n\n fn read_cpb_specs(r: &mut RbspBitReader<'_>, cpb_cnt: u32) -> Result<Vec<CpbSpec>,RbspBitReaderError> {\n\n let mut cpb_specs = Vec::with_capacity(cpb_cnt as usize);\n\n for _ in 0..cpb_cnt {\n\n cpb_specs.push(CpbSpec::read(r)?);\n\n }\n\n Ok(cpb_specs)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct BitstreamRestrictions {\n\n motion_vectors_over_pic_boundaries_flag: bool,\n\n max_bytes_per_pic_denom: u32,\n\n max_bits_per_mb_denom: u32,\n\n log2_max_mv_length_horizontal: u32,\n\n log2_max_mv_length_vertical: u32,\n\n max_num_reorder_frames: u32,\n", "file_path": "src/nal/sps.rs", "rank": 81, "score": 19.851300337526553 }, { "content": "impl ScalingList {\n\n pub fn read(r: &mut RbspBitReader<'_>, size: u8) -> Result<ScalingList,ScalingMatrixError> {\n\n let mut scaling_list = vec!();\n\n let mut last_scale = 8;\n\n let mut next_scale = 8;\n\n let mut _use_default_scaling_matrix_flag = false;\n\n for j in 0..size {\n\n if next_scale != 0 {\n\n let delta_scale = r.read_se_named(\"delta_scale\")?;\n\n if delta_scale < -128 || delta_scale > 127 {\n\n return Err(ScalingMatrixError::DeltaScaleOutOfRange(delta_scale));\n\n }\n\n next_scale = (last_scale + delta_scale + 256) % 256;\n\n _use_default_scaling_matrix_flag = j == 0 && next_scale == 0;\n\n }\n\n let new_value = if next_scale == 0 { last_scale } else { next_scale };\n\n scaling_list.push(new_value);\n\n last_scale = new_value;\n\n }\n\n Ok(ScalingList { })\n", "file_path": "src/nal/sps.rs", "rank": 82, "score": 19.4696990862634 }, { "content": "\n\n#[derive(Debug, Clone)]\n\npub struct VuiParameters {\n\n pub aspect_ratio_info: Option<AspectRatioInfo>,\n\n pub overscan_appropriate: OverscanAppropriate,\n\n pub video_signal_type: Option<VideoSignalType>,\n\n pub chroma_loc_info: Option<ChromaLocInfo>,\n\n pub timing_info: Option<TimingInfo>,\n\n pub nal_hrd_parameters: Option<HrdParameters>,\n\n pub vcl_hrd_parameters: Option<HrdParameters>,\n\n pub low_delay_hrd_flag: Option<bool>,\n\n pub pic_struct_present_flag: bool,\n\n pub bitstream_restrictions: Option<BitstreamRestrictions>,\n\n}\n\nimpl VuiParameters {\n\n fn read(r: &mut RbspBitReader<'_>) -> Result<Option<VuiParameters>, SpsError> {\n\n let vui_parameters_present_flag = r.read_bool()?;\n\n Ok(if vui_parameters_present_flag {\n\n let mut hrd_parameters_present = false;\n\n Some(VuiParameters {\n", "file_path": "src/nal/sps.rs", "rank": 83, "score": 19.428270588077993 }, { "content": " }\n\n\n\n fn push(&mut self, ctx: &mut h264_reader::Context<Self::Ctx>, buf: &[u8]) {\n\n self.current_slice\n\n .as_mut()\n\n .unwrap()\n\n .buf\n\n .extend_from_slice(buf);\n\n }\n\n\n\n fn end(&mut self, ctx: &mut h264_reader::Context<Self::Ctx>) {\n\n let current_slice = self.current_slice.take().unwrap();\n\n let capture = NalCapture::default();\n\n let mut decode = RbspDecoder::new(capture);\n\n decode.start(ctx, current_slice.header);\n\n decode.push(ctx, &current_slice.buf[..]);\n\n decode.end(ctx);\n\n let capture = decode.into_handler();\n\n let mut r = rbsp::RbspBitReader::new(&capture.buf[1..]);\n\n match nal::slice::SliceHeader::read(ctx, &mut r, current_slice.header) {\n", "file_path": "fuzz/fuzz_targets/fuzz_target_1.rs", "rank": 84, "score": 19.403873009223496 }, { "content": " }\n\n\n\n fn read_delays(r: &mut RbspBitReader<'_>, sps: &sps::SeqParameterSet) -> Result<Option<Delays>,PicTimingError> {\n\n Ok(if let Some(ref vui_params) = sps.vui_parameters {\n\n if let Some(ref hrd) = vui_params.nal_hrd_parameters.as_ref().or_else(|| vui_params.nal_hrd_parameters.as_ref() ) {\n\n Some(Delays {\n\n cpb_removal_delay: r.read_u32(hrd.cpb_removal_delay_length_minus1+1)?,\n\n dpb_output_delay: r.read_u32(hrd.dpb_output_delay_length_minus1+1)?,\n\n })\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n })\n\n }\n\n\n\n fn read_pic_struct(r: &mut RbspBitReader<'_>, sps: &sps::SeqParameterSet) -> Result<Option<PicStruct>,PicTimingError> {\n\n Ok(if let Some(ref vui_params) = sps.vui_parameters {\n\n if vui_params.pic_struct_present_flag {\n", "file_path": "src/nal/sei/pic_timing.rs", "rank": 85, "score": 19.2889646752151 }, { "content": " 2 => CountingType::DroppingIndividualZero,\n\n 3 => CountingType::DroppingIndividualMax,\n\n 4 => CountingType::DroppingTwoLowest,\n\n 5 => CountingType::DroppingIndividual,\n\n 6 => CountingType::Dropping,\n\n 7..=31 => CountingType::Reserved(id),\n\n _ => panic!(\"unexpected counting_type {}\", id),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum SecMinHour {\n\n None,\n\n S(u8),\n\n SM(u8, u8),\n\n SMH(u8, u8, u8)\n\n}\n\nimpl SecMinHour {\n\n pub fn seconds(&self) -> u8 {\n", "file_path": "src/nal/sei/pic_timing.rs", "rank": 86, "score": 19.124869507199037 }, { "content": " let video_signal_type_present_flag = r.read_bool()?;\n\n Ok(if video_signal_type_present_flag {\n\n Some(VideoSignalType {\n\n video_format: VideoFormat::from(r.read_u8(3)?),\n\n video_full_range_flag: r.read_bool()?,\n\n colour_description: ColourDescription::read(r)?,\n\n })\n\n } else {\n\n None\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ChromaLocInfo {\n\n chroma_sample_loc_type_top_field: u32,\n\n chroma_sample_loc_type_bottom_field: u32,\n\n}\n\nimpl ChromaLocInfo {\n\n fn read(r: &mut RbspBitReader<'_>) -> Result<Option<ChromaLocInfo>, RbspBitReaderError> {\n", "file_path": "src/nal/sps.rs", "rank": 87, "score": 19.084052683589945 }, { "content": " fn read(r: &mut RbspBitReader<'_>) -> Result<CpbSpec,RbspBitReaderError> {\n\n Ok(CpbSpec {\n\n bit_rate_value_minus1: r.read_ue_named(\"bit_rate_value_minus1\")?,\n\n cpb_size_value_minus1: r.read_ue_named(\"cpb_size_value_minus1\")?,\n\n cbr_flag: r.read_bool_named(\"cbr_flag\")?,\n\n })\n\n }\n\n}\n\n\n\n\n\n#[derive(Debug, Clone)]\n\npub struct HrdParameters {\n\n pub bit_rate_scale: u8,\n\n pub cpb_size_scale: u8,\n\n pub cpb_specs: Vec<CpbSpec>,\n\n pub initial_cpb_removal_delay_length_minus1: u8,\n\n pub cpb_removal_delay_length_minus1: u8,\n\n pub dpb_output_delay_length_minus1: u8,\n\n pub time_offset_length: u8,\n\n}\n", "file_path": "src/nal/sps.rs", "rank": 88, "score": 19.045138176181034 }, { "content": " slice_type: SliceType,\n\n colour_plane: Option<ColourPlane>,\n\n frame_num: u16,\n\n field_pic: FieldPic,\n\n idr_pic_id: Option<u32>,\n\n pic_order_cnt_lsb: Option<PicOrderCountLsb>,\n\n redundant_pic_cnt: Option<u32>,\n\n direct_spatial_mv_pred_flag: Option<bool>,\n\n num_ref_idx_active: Option<NumRefIdxActive>,\n\n ref_pic_list_modification: Option<RefPicListModifications>, // may become an enum rather than Option in future (for ref_pic_list_mvc_modification)\n\n pred_weight_table: Option<PredWeightTable>,\n\n dec_ref_pic_marking: Option<DecRefPicMarking>,\n\n cabac_init_idc: Option<u32>,\n\n slice_qp_delta: i32,\n\n sp_for_switch_flag: Option<bool>,\n\n slice_qs: Option<u32>,\n\n disable_deblocking_filter_idc: u8,\n\n}\n\nimpl SliceHeader {\n\n pub fn read<'a, Ctx>(ctx: &'a mut Context<Ctx>, r: &mut RbspBitReader<'_>, header: NalHeader) -> Result<(SliceHeader, &'a SeqParameterSet, &'a PicParameterSet), SliceHeaderError> {\n", "file_path": "src/nal/slice/mod.rs", "rank": 89, "score": 19.02441606870664 }, { "content": " }\n\n}\n\nimpl<Ctx> Context<Ctx> {\n\n pub fn new(user_context: Ctx) -> Self {\n\n let mut seq_param_sets = vec!();\n\n for _ in 0..32 { seq_param_sets.push(None); }\n\n let mut pic_param_sets = vec!();\n\n for _ in 0..32 { pic_param_sets.push(None); }\n\n Context {\n\n seq_param_sets,\n\n pic_param_sets,\n\n user_context,\n\n }\n\n }\n\n}\n\nimpl<Ctx> Context<Ctx> {\n\n pub fn sps_by_id(&self, id: nal::pps::ParamSetId) -> Option<&nal::sps::SeqParameterSet> {\n\n if id.id() > 31 {\n\n None\n\n } else {\n", "file_path": "src/lib.rs", "rank": 90, "score": 18.908585276818265 }, { "content": " data: Vec::new(),\n\n }));\n\n let mock = MockReader::new(Rc::clone(&state));\n\n let mut r = AnnexBReader::new(mock);\n\n let data = vec!(\n\n 0, 0, 0, 1, // start-code\n\n 3, 0 // NAL data\n\n );\n\n let mut ctx = Context::default();\n\n r.start(&mut ctx);\n\n r.push(&mut ctx, &data[..]);\n\n r.end_units(&mut ctx);\n\n {\n\n let s = state.borrow();\n\n assert_eq!(1, s.started);\n\n assert_eq!(&s.data[..], &[3u8, 0u8][..]);\n\n assert_eq!(1, s.ended);\n\n }\n\n }\n\n\n", "file_path": "src/annexb.rs", "rank": 91, "score": 18.850646725067858 }, { "content": "impl HrdParameters {\n\n fn read(r: &mut RbspBitReader<'_>, hrd_parameters_present: &mut bool) -> Result<Option<HrdParameters>, SpsError> {\n\n let hrd_parameters_present_flag = r.read_bool_named(\"hrd_parameters_present_flag\")?;\n\n *hrd_parameters_present |= hrd_parameters_present_flag;\n\n Ok(if hrd_parameters_present_flag {\n\n let cpb_cnt_minus1 = r.read_ue_named(\"cpb_cnt_minus1\")?;\n\n if cpb_cnt_minus1 > 31 {\n\n return Err(SpsError::CpbCountOutOfRange(cpb_cnt_minus1));\n\n }\n\n let cpb_cnt = cpb_cnt_minus1 + 1;\n\n Some(HrdParameters {\n\n bit_rate_scale: r.read_u8(4)?,\n\n cpb_size_scale: r.read_u8(4)?,\n\n cpb_specs: Self::read_cpb_specs(r, cpb_cnt)?,\n\n initial_cpb_removal_delay_length_minus1: r.read_u8(5)?,\n\n cpb_removal_delay_length_minus1: r.read_u8(5)?,\n\n dpb_output_delay_length_minus1: r.read_u8(5)?,\n\n time_offset_length: r.read_u8(5)?,\n\n })\n\n } else {\n", "file_path": "src/nal/sps.rs", "rank": 92, "score": 18.79237561821609 }, { "content": " use super::*;\n\n\n\n #[derive(Default)]\n\n struct NullRegister {\n\n handled: bool,\n\n }\n\n impl crate::nal::sei::user_data_registered_itu_t_t35::Register for NullRegister {\n\n type Ctx = ();\n\n\n\n fn handle(&mut self, _ctx: &mut crate::Context<Self::Ctx>, country_code: crate::nal::sei::user_data_registered_itu_t_t35::ItuTT35, _payload: &[u8]) {\n\n assert_eq!(country_code, ItuTT35::UnitedKingdom);\n\n self.handled = true;\n\n }\n\n }\n\n #[test]\n\n fn macro_usage() {\n\n tt_35_switch!{\n\n TestTT35Switch<()> {\n\n UnitedKingdom => NullRegister,\n\n }\n\n }\n\n\n\n let mut sw = TestTT35Switch::default();\n\n let mut ctx = crate::Context::new(());\n\n let data = [ 0x00u8 ];\n\n sw.handle(&mut ctx, ItuTT35::UnitedKingdom, &data[..]);\n\n assert!(sw.UnitedKingdom.handled);\n\n }\n\n}", "file_path": "src/nal/sei/user_data_registered_itu_t_t35.rs", "rank": 93, "score": 18.58922861577487 }, { "content": " ended: bool,\n\n data: Vec<u8>,\n\n }\n\n struct MockReader {\n\n state: Rc<RefCell<State>>\n\n }\n\n impl MockReader {\n\n fn new(state: Rc<RefCell<State>>) -> MockReader {\n\n MockReader {\n\n state\n\n }\n\n }\n\n }\n\n impl NalHandler for MockReader {\n\n type Ctx = ();\n\n\n\n fn start(&mut self, _ctx: &mut Context<Self::Ctx>, _header: NalHeader) {\n\n self.state.borrow_mut().started = true;\n\n }\n\n\n", "file_path": "src/rbsp.rs", "rank": 94, "score": 18.455797033680764 }, { "content": " error!(\"End of SEI data encountered having read {} bytes of payloadSize={} for header type {:?}\", consumed_size, payload_size, payload_type);\n\n self.reader.reset(ctx);\n\n },\n\n }\n\n self.state = SeiHeaderState::End;\n\n }\n\n}\n\n\n\npub struct SeiNalHandler<R: SeiIncrementalPayloadReader> {\n\n reader: RbspDecoder<SeiHeaderReader<R>>,\n\n}\n\nimpl<R: SeiIncrementalPayloadReader> SeiNalHandler<R> {\n\n pub fn new(r: R) -> Self {\n\n SeiNalHandler {\n\n reader: RbspDecoder::new(SeiHeaderReader::new(r)),\n\n }\n\n }\n\n}\n\n\n\nimpl<R: SeiIncrementalPayloadReader> NalHandler for SeiNalHandler<R> {\n", "file_path": "src/nal/sei/mod.rs", "rank": 95, "score": 18.28286318665216 }, { "content": " fn read(r: &mut RbspBitReader<'_>) -> Result<FrameMbsFlags, RbspBitReaderError> {\n\n let frame_mbs_only_flag = r.read_bool()?;\n\n if frame_mbs_only_flag {\n\n Ok(FrameMbsFlags::Frames)\n\n } else {\n\n Ok(FrameMbsFlags::Fields {\n\n mb_adaptive_frame_field_flag: r.read_bool()?\n\n })\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct FrameCropping {\n\n pub left_offset: u32,\n\n pub right_offset: u32,\n\n pub top_offset: u32,\n\n pub bottom_offset: u32,\n\n}\n\nimpl FrameCropping {\n", "file_path": "src/nal/sps.rs", "rank": 96, "score": 18.07512202569022 }, { "content": " Ok(if colour_description_present_flag {\n\n Some(ColourDescription {\n\n colour_primaries: r.read_u8(8)?,\n\n transfer_characteristics: r.read_u8(8)?,\n\n matrix_coefficients: r.read_u8(8)?,\n\n })\n\n } else {\n\n None\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct VideoSignalType {\n\n video_format: VideoFormat,\n\n video_full_range_flag: bool,\n\n colour_description: Option<ColourDescription>,\n\n}\n\nimpl VideoSignalType {\n\n fn read(r: &mut RbspBitReader<'_>) -> Result<Option<VideoSignalType>, RbspBitReaderError> {\n", "file_path": "src/nal/sps.rs", "rank": 97, "score": 17.8599525981965 }, { "content": "//! Parser for H264 bitstream syntax. Not a video decoder.\n\n\n\n#![forbid(unsafe_code)]\n\n#![deny(rust_2018_idioms)]\n\n\n\npub mod rbsp;\n\npub mod annexb;\n\npub mod nal;\n\npub mod avcc;\n\n\n\n/// Contextual data that needs to be tracked between evaluations of different portions of H264\n\n/// syntax.\n\npub struct Context<Ctx> {\n\n seq_param_sets: Vec<Option<nal::sps::SeqParameterSet>>,\n\n pic_param_sets: Vec<Option<nal::pps::PicParameterSet>>,\n\n pub user_context: Ctx,\n\n}\n\nimpl Default for Context<()> {\n\n fn default() -> Self {\n\n Self::new(())\n", "file_path": "src/lib.rs", "rank": 98, "score": 17.85760462886614 }, { "content": " Progressive,\n\n Interlaced,\n\n Unknown,\n\n Reserved,\n\n}\n\nimpl CtType {\n\n fn from_id(id: u8) -> CtType {\n\n match id {\n\n 0 => CtType::Progressive,\n\n 1 => CtType::Interlaced,\n\n 2 => CtType::Unknown,\n\n 3 => CtType::Reserved,\n\n _ => panic!(\"unexpected ct_type {}\", id),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum CountingType {\n\n /// no dropping of `n_frames` values, and no use of `time_offset`\n", "file_path": "src/nal/sei/pic_timing.rs", "rank": 99, "score": 17.77957558613651 } ]
Rust
src/main.rs
gfarrell/netctl-tray
58b7ad1684bc9f949839fb2dd08cd9ef2ad0bc74
#![feature(exclusive_range_pattern)] mod state; use notify_rust::{Notification, Timeout}; use qt_gui::QIcon; use qt_widgets::cpp_utils::{CppBox, MutPtr}; use qt_widgets::qt_core::{QString, QTimer, Slot}; use qt_widgets::{QActionGroup, QApplication, QMenu, QSystemTrayIcon, SlotOfActivationReason}; use state::{inotify_watch, scan_profiles, update_state, State}; use std::ffi::OsStr; use std::net::SocketAddr; use std::process::Command; use std::sync::{Arc, Mutex}; use structopt::StructOpt; use users::{get_current_gid, get_current_username, get_user_groups}; #[derive(Debug, StructOpt)] #[structopt( name = "netctl-tray", about = "A lightweight netctl tray app with notifications." )] pub struct Opt { #[structopt(short, long, default_value = "2")] pub interval: f32, #[structopt(long, default_value = "1.1.1.1:53")] pub host: SocketAddr, #[structopt(short)] pub disable_notifications: bool, } fn main() { let args = Opt::from_args(); let (in_wheel, in_network) = is_user_in_wheel_and_network(); if !in_wheel { eprintln!("Warning! You are not in group 'wheel', netctl-tray might not work or work only partially."); } if !in_network { eprintln!("Warning! You are not in group 'network', netctl-tray might not work or work only partially."); } let mut state = State { link_quality: 0, ping: 0.0, all_profiles: Arc::new(Mutex::new(Vec::new())), active_profile: None, }; if let Err(e) = scan_profiles(&mut *state.all_profiles.lock().unwrap()) { eprintln!("Error while scanning profiles: {:?}", e); return; } let all_profiles_clone = state.all_profiles.clone(); if let Err(e) = inotify_watch(all_profiles_clone, "/etc/netctl") { eprintln!("Error watching /etc/netctl: {:?}", e); return; } if let Err(e) = update_state(&mut state, &args) { eprintln!("Can't update tray state: {:?}", e); } let state_ptr: *mut State = &mut state; QApplication::init(|_app| { unsafe { let mut tray = QSystemTrayIcon::from_q_icon(get_status_icon(&mut state).as_ref()); let tray_click = SlotOfActivationReason::new(|reason| { let reason = reason.to_int(); if reason == 3 || reason == 4 { if let Err(e) = Notification::new() .summary("netctl") .body(&format!( "Profile: <b>{}</b>, Ping: <b>{} ms</b>, Quality: <b>{}/70</b>", (*state_ptr) .active_profile .as_ref() .unwrap_or(&"<i>{none}</i>".to_string()), if (*state_ptr).ping == f32::INFINITY { "∞".to_string() } else { (*state_ptr).ping.round().to_string() }, (*state_ptr).link_quality )) .icon("network-wireless") .timeout(Timeout::Milliseconds(5000)) .show() { eprintln!("Error sending desktop notification: {:?}", e); } } }); tray.activated().connect(&tray_click); let mut menu = QMenu::new(); tray.set_context_menu(menu.as_mut_ptr()); let profiles_submenu = menu.add_menu_q_string(QString::from_std_str("Profiles").as_mut_ref()); let mut profile_actions_group = QActionGroup::new(profiles_submenu); let group_ptr = profile_actions_group.as_mut_ptr(); let click = Slot::new(|| { #[cfg(not(feature = "auto"))] { if let Some(current_active_profile) = &(*state_ptr).active_profile { if let Err(e) = Command::new("netctl") .arg("stop") .arg(current_active_profile) .spawn() { eprintln!("Couldn't run netctl stop command: {:?}", e); } } if let Err(e) = Command::new("netctl") .arg("start") .arg((*group_ptr).checked_action().text().to_std_string()) .spawn() { eprintln!("Couldn't run netctl start command: {:?}", e); } } #[cfg(feature = "auto")] { if let Err(e) = Command::new("netctl-auto") .arg("switch-to") .arg((*group_ptr).checked_action().text().to_std_string()) .spawn() { eprintln!("Couldn't run netctl-auto switch-to command: {:?}", e); } } }); let generate_profiles_submenu = Slot::new(|| { gen_profile_submenu( state_ptr, profiles_submenu, &mut profile_actions_group, &click, ); }); profiles_submenu .about_to_show() .connect(&generate_profiles_submenu); let exit_app = Slot::new(|| { std::process::exit(0); }); menu.add_action_q_icon_q_string( QIcon::from_q_string( QString::from_std_str("/usr/share/netctl-tray/exit.svg").as_mut_ref(), ) .as_mut_ref(), QString::from_std_str("Exit").as_mut_ref(), ) .triggered() .connect(&exit_app); tray.show(); let update_state = Slot::new(|| { let old_active_profile = (*state_ptr).active_profile.clone(); if let Err(e) = update_state(&mut (*state_ptr), &args) { eprintln!("Can't update tray state: {:?}", e); } if !args.disable_notifications { if let Err(e) = profile_notification(&mut (*state_ptr), old_active_profile) { eprintln!("Error sending desktop notification: {:?}", e); } } tray.set_icon(get_status_icon(&mut (*state_ptr)).as_ref()); }); let mut update_timer = QTimer::new_0a(); update_timer.set_interval((args.interval * 1000.0) as i32); update_timer.timeout().connect(&update_state); update_timer.start_0a(); QApplication::exec() } }); } unsafe fn gen_profile_submenu( state_ptr: *mut State, mut profiles_submenu: MutPtr<QMenu>, profile_actions_group: &mut CppBox<QActionGroup>, click: &Slot, ) { profiles_submenu.clear(); for profile in &(*(*state_ptr).all_profiles.lock().unwrap()) { let mut item = profiles_submenu.add_action_q_string(QString::from_std_str(profile).as_mut_ref()); item.set_checkable(true); item.set_checked(false); if let Some(active_profile) = &(*state_ptr).active_profile { if active_profile == profile { item.set_checked(true); } } item.set_action_group(profile_actions_group.as_mut_ptr()); item.triggered().connect(click); } } fn profile_notification( state: &mut State, old_active_profile: Option<String>, ) -> Result<(), notify_rust::error::Error> { let text = match (&old_active_profile, &state.active_profile) { (None, Some(new)) => { format!("Profile <b>{}</b> started.", new) } (Some(old), None) => { format!("Profile <b>{}</b> stopped.", old) } (Some(old), Some(new)) => { if old != new { format!("Profile switched: from <b>{}</b> to <b>{}</b>.", old, new) } else { return Ok(()); } } _ => { return Ok(()); } }; Notification::new() .summary("netctl") .body(&text) .icon("network-wireless") .timeout(Timeout::Milliseconds(5000)) .show()?; Ok(()) } fn is_user_in_wheel_and_network() -> (bool, bool) { let username = match get_current_username() { Some(s) => s, None => { eprintln!("Can't get current user!"); return (false, false); } }; let groups = match get_user_groups(&username, get_current_gid()) { Some(g) => g, None => { eprintln!("Couldn't get the list of groups the user is in."); return (false, false); } }; let mut in_wheel = false; let mut in_network = false; for group in groups { if group.name() == OsStr::new("network") { in_network = true; } else if group.name() == OsStr::new("wheel") { in_wheel = true; } } (in_wheel, in_network) } fn get_status_icon(state: &mut State) -> CppBox<QIcon> { let icon_path = if state.active_profile.is_none() { "/usr/share/netctl-tray/no_profile.svg" } else { if state.ping == f32::INFINITY { match state.link_quality { 0 => "/usr/share/netctl-tray/no_signal_no_internet.svg", 1..23 => "/usr/share/netctl-tray/bad_no_internet.svg", 23..47 => "/usr/share/netctl-tray/medium_no_internet.svg", _ => "/usr/share/netctl-tray/good_no_internet.svg", } } else { match state.link_quality { 0 => "/usr/share/netctl-tray/no_signal.svg", 1..23 => "/usr/share/netctl-tray/bad.svg", 23..47 => "/usr/share/netctl-tray/medium.svg", _ => "/usr/share/netctl-tray/good.svg", } } }; unsafe { QIcon::from_q_string(QString::from_std_str(&icon_path).as_mut_ref()) } }
#![feature(exclusive_range_pattern)] mod state; use notify_rust::{Notification, Timeout}; use qt_gui::QIcon; use qt_widgets::cpp_utils::{CppBox, MutPtr}; use qt_widgets::qt_core::{QString, QTimer, Slot}; use qt_widgets::{QActionGroup, QApplication, QMenu, QSystemTrayIcon, SlotOfActivationReason}; use state::{inotify_watch, scan_profiles, update_state, State}; use std::ffi::OsStr; use std::net::SocketAddr; use std::process::Command; use std::sync::{Arc, Mutex}; use structopt::StructOpt; use users::{get_current_gid, get_current_username, get_user_groups}; #[derive(Debug, StructOpt)] #[structopt( name = "netctl-tray", about = "A lightweight netctl tray app with notifications." )] pub struct Opt { #[structopt(short, long, default_value = "2")] pub interval: f32, #[structopt(long, default_value = "1.1.1.1:53")] pub host: SocketAddr, #[structopt(short)] pub disable_notifications: bool, } fn main() { let args = Opt::from_args(); let (in_wheel, in_network) = is_user_in_wheel_and_network(); if !in_wheel { eprintln!("Warning! You are not in group 'wheel', netctl-tray might not work or work only partially."); } if !in_network { eprintln!("Warning! You are not in group 'network', netctl-tray might not work or work only partially."); } let mut state = State { link_quality: 0, ping: 0.0, all_profiles: Arc::new(Mutex::new(Vec::new())), active_profile: None, }; if let Err(e) = scan_profiles(&mut *state.all_profiles.lock().unwrap()) { eprintln!("Error while scanning profiles: {:?}", e); return; } let all_profiles_clone = state.all_profiles.clone(); if let Err(e) = inotify_watch(all_profiles_clone, "/etc/netctl") { eprintln!("Error watching /etc/netctl: {:?}", e); return; } if let Err(e) = update_state(&mut state, &args) { eprintln!("Can't update tray state: {:?}", e); } let state_ptr: *mut State = &mut state; QApplication::init(|_app| { unsafe { let mut tray = QSystemTrayIcon::from_q_icon(get_status_icon(&mut state).as_ref()); let tray_click = SlotOfActivationReason::new(|reason| { let reason = reason.to_int(); if reason == 3 || reason == 4 { if let Err(e) = Notification::new() .summary("netctl") .body(&format!( "Profile: <b>{}</b>, Ping: <b>{} ms</b>, Quality: <b>{}/70</b>", (*state_ptr) .active_profile .as_ref() .unwrap_or(&"<i>{none}</i>".to_string()), if (*state_ptr).ping == f32::INFINITY { "∞".to_string() } else { (*state_ptr).ping.round().to_string() }, (*state_ptr).link_quality )) .icon("network-wireless") .timeout(Timeout::Milliseconds(5000)) .show() { eprintln!("Error sending desktop notification: {:?}", e); } } }); tray.activated().connect(&tray_click); let mut menu = QMenu::new(); tray.set_context_menu(menu.as_mut_ptr()); let profiles_submenu = menu.add_menu_q_string(QString::from_std_str("Profiles").as_mut_ref()); let mut profile_actions_group = QActionGroup::new(profiles_submenu); let group_ptr = profile_actions_group.as_mut_ptr(); let click = Slot::new(|| { #[cfg(not(feature = "auto"))] { if let Some(current_active_profile) = &(*state_ptr).active_profile { if let Err(e) = Command::new("netctl") .arg("stop") .arg(current_active_profile) .spawn() { eprintln!("Couldn't run netctl stop command: {:?}", e); } } if let Err(e) = Command::new("netctl") .arg("start") .arg((*group_ptr).checked_action().text().to_std_string()) .spawn() { eprintln!("Couldn't run netctl start command: {:?}", e); } } #[cfg(feature = "auto")] { if let Err(e) = Command::new("netctl-auto") .arg("switch-to") .arg((*group_ptr).checked_action().text().to_std_string()) .spawn() { eprintln!("Couldn't run netctl-auto switch-to command: {:?}", e); } } }); let generate_profiles_submenu = Slot::new(|| { gen_profile_submenu( state_ptr, profiles_submenu, &mut profile_actions_group, &click, ); }); profiles_submenu .about_to_show() .connect(&generate_profiles_submenu); let exit_app = Slot::new(|| { std::process::exit(0); }); menu.add_action_q_icon_q_string( QIcon::from_q_string( QString::from_std_str("/usr/share/netctl-tray/exit.svg").as_mut_ref(), ) .as_mut_ref(), QString::from_std_str("Exit").as_mut_ref(), ) .triggered() .connect(&exit_app); tray.show(); let update_state = Slot::new(|| { let old_active_profile = (*state_ptr).active_profile.clone(); if let Err(e) = update_state(&mut (*state_ptr), &args) { eprintln!("Can't update tray state: {:?}", e); } if !args.disable_notifications { if let Err(e) = profile_notification(&mut (*state_ptr), old_active_profile) { eprintln!("Error sending desktop notification: {:?}", e); } } tray.set_icon(get_status_icon(&mut (*state_ptr)).as_ref()); }); let mut update_timer = QTimer::new_0a(); update_timer.set_interval((args.interval * 1000.0) as i32); update_timer.timeout().connect(&update_state); update_timer.start_0a(); QApplication::exec() } }); }
fn profile_notification( state: &mut State, old_active_profile: Option<String>, ) -> Result<(), notify_rust::error::Error> { let text = match (&old_active_profile, &state.active_profile) { (None, Some(new)) => { format!("Profile <b>{}</b> started.", new) } (Some(old), None) => { format!("Profile <b>{}</b> stopped.", old) } (Some(old), Some(new)) => { if old != new { format!("Profile switched: from <b>{}</b> to <b>{}</b>.", old, new) } else { return Ok(()); } } _ => { return Ok(()); } }; Notification::new() .summary("netctl") .body(&text) .icon("network-wireless") .timeout(Timeout::Milliseconds(5000)) .show()?; Ok(()) } fn is_user_in_wheel_and_network() -> (bool, bool) { let username = match get_current_username() { Some(s) => s, None => { eprintln!("Can't get current user!"); return (false, false); } }; let groups = match get_user_groups(&username, get_current_gid()) { Some(g) => g, None => { eprintln!("Couldn't get the list of groups the user is in."); return (false, false); } }; let mut in_wheel = false; let mut in_network = false; for group in groups { if group.name() == OsStr::new("network") { in_network = true; } else if group.name() == OsStr::new("wheel") { in_wheel = true; } } (in_wheel, in_network) } fn get_status_icon(state: &mut State) -> CppBox<QIcon> { let icon_path = if state.active_profile.is_none() { "/usr/share/netctl-tray/no_profile.svg" } else { if state.ping == f32::INFINITY { match state.link_quality { 0 => "/usr/share/netctl-tray/no_signal_no_internet.svg", 1..23 => "/usr/share/netctl-tray/bad_no_internet.svg", 23..47 => "/usr/share/netctl-tray/medium_no_internet.svg", _ => "/usr/share/netctl-tray/good_no_internet.svg", } } else { match state.link_quality { 0 => "/usr/share/netctl-tray/no_signal.svg", 1..23 => "/usr/share/netctl-tray/bad.svg", 23..47 => "/usr/share/netctl-tray/medium.svg", _ => "/usr/share/netctl-tray/good.svg", } } }; unsafe { QIcon::from_q_string(QString::from_std_str(&icon_path).as_mut_ref()) } }
unsafe fn gen_profile_submenu( state_ptr: *mut State, mut profiles_submenu: MutPtr<QMenu>, profile_actions_group: &mut CppBox<QActionGroup>, click: &Slot, ) { profiles_submenu.clear(); for profile in &(*(*state_ptr).all_profiles.lock().unwrap()) { let mut item = profiles_submenu.add_action_q_string(QString::from_std_str(profile).as_mut_ref()); item.set_checkable(true); item.set_checked(false); if let Some(active_profile) = &(*state_ptr).active_profile { if active_profile == profile { item.set_checked(true); } } item.set_action_group(profile_actions_group.as_mut_ptr()); item.triggered().connect(click); } }
function_block-full_function
[ { "content": "// Updates the netctl-tray state: ping, quality and current active profile\n\npub fn update_state(state: &mut State, args: &Opt) -> Result<(), std::io::Error> {\n\n // get the current active profile\n\n #[cfg(not(feature = \"auto\"))]\n\n let raw_profiles = Command::new(\"netctl\").arg(\"list\").output()?;\n\n #[cfg(feature = \"auto\")]\n\n let raw_profiles = Command::new(\"netctl-auto\").arg(\"list\").output()?;\n\n // Iterate through each line\n\n let mut active_profile = None;\n\n for line in raw_profiles.stdout.split(|c| *c == '\\n' as u8) {\n\n if line.len() == 0 {\n\n continue;\n\n }\n\n // If the line starts with an asterisk, then the profile is active\n\n // and we need it's name\n\n if line[0] == '*' as u8 {\n\n active_profile = match std::str::from_utf8(&line[2..]) {\n\n Ok(s) => Some(s.to_owned()),\n\n Err(e) => {\n\n eprintln!(\"Can't read profile name from netctl list: {:?}\", e);\n\n break;\n", "file_path": "src/state.rs", "rank": 0, "score": 105797.13816760368 }, { "content": "// Scans the files in /etc/netctl and adds the profiles to the vector\n\npub fn scan_profiles(all_profiles: &mut Vec<String>) -> Result<(), std::io::Error> {\n\n // for every file or folder in /etc/netcl\n\n for entry in fs::read_dir(\"/etc/netctl/\")? {\n\n let path = entry?.path();\n\n let metadata = path.metadata()?;\n\n if metadata.is_file() {\n\n // the file name of the profile configuration\n\n // is the name of the profile.\n\n let profile_name = match path.file_name().unwrap().to_str() {\n\n Some(f) => f,\n\n None => {\n\n eprintln!(\n\n \"Can't convert OsStr to str: {:?}\",\n\n path.file_name().unwrap()\n\n );\n\n continue;\n\n }\n\n };\n\n // add the profile to the vector\n\n all_profiles.push(profile_name.to_owned());\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/state.rs", "rank": 2, "score": 78609.13901440593 }, { "content": "pub fn inotify_watch(\n\n all_profiles: Arc<Mutex<Vec<String>>>,\n\n dir: &str,\n\n) -> Result<(), notify::Error> {\n\n // initialize the inotify watcher\n\n let mut watcher = immediate_watcher(move |res: Result<NEvent, _>| match res {\n\n Ok(event) => {\n\n match event.kind {\n\n NEventKind::Create(_) => {\n\n // Add the new profile\n\n for path in event.paths {\n\n match path.file_name().unwrap().to_str() {\n\n Some(p) => all_profiles.lock().unwrap().push(p.to_owned()),\n\n None => {\n\n eprintln!(\n\n \"Can't convert OsStr to str: {:?}\",\n\n path.file_name().unwrap()\n\n );\n\n continue;\n\n }\n", "file_path": "src/state.rs", "rank": 3, "score": 71283.71152549019 }, { "content": "# netctl-tray\n\nA lightweight netctl tray app with notifications written in Rust.\n\n\n\n## Screenshots\n\n\n\n![](https://i.imgur.com/5PavZiO.png) ![](https://i.imgur.com/mwWpkA4.png) ![](https://i.imgur.com/yghZ4Gt.png)\n\n\n\n## Usage\n\n\n\nTo launch the tray app:\n\n```\n\n$ netctl-tray\n\n```\n\nYou have to be in groups `wheel` and `network` for it to work properly. \n\nTo add an user to them, use:\n\n```\n\n# usermod -a -G wheel,network <user>\n\n```\n\n\n\n## Compiling\n\n\n\nThis application needs to be compiled for netctl and netctl-auto separately.\n\nFor `netctl`:\n\n```\n\ncargo build --release\n\n```\n\nFor `netctl-auto`:\n\n```\n\ncargo build --release --features \"auto\"\n\n```\n\n\n\n## Installation\n\n\n\nThis app is available on the AUR: [netctl-tray](https://aur.archlinux.org/packages/netctl-tray/) and [netctl-tray-auto](https://aur.archlinux.org/packages/netctl-tray-auto/)\n\n\n\n## Contributing\n\n\n\nAll contributions are welcome!\n", "file_path": "README.md", "rank": 7, "score": 20602.048414822417 }, { "content": "use crate::Opt;\n\nuse notify::{\n\n event::{Event as NEvent, EventKind as NEventKind},\n\n immediate_watcher, RecursiveMode, Watcher,\n\n};\n\nuse std::fs;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::net::TcpStream;\n\nuse std::process::Command;\n\nuse std::sync::{Arc, Mutex};\n\nuse std::time::{Duration, Instant};\n\n\n\n#[derive(Debug)]\n\npub struct State {\n\n pub link_quality: u8,\n\n pub ping: f32,\n\n pub all_profiles: Arc<Mutex<Vec<String>>>,\n\n pub active_profile: Option<String>,\n\n}\n\n\n", "file_path": "src/state.rs", "rank": 8, "score": 15910.016670316909 }, { "content": " // check ping\n\n // try connecting to the given IP\n\n let now = Instant::now();\n\n if TcpStream::connect_timeout(&args.host, Duration::from_nanos(500_000_000)).is_ok() {\n\n state.ping = now.elapsed().as_millis() as f32;\n\n } else {\n\n state.ping = f32::INFINITY;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/state.rs", "rank": 9, "score": 15907.953359735882 }, { "content": " }\n\n };\n\n break;\n\n }\n\n }\n\n state.active_profile = active_profile;\n\n\n\n if let Some(active_profile) = &state.active_profile {\n\n // Now we need to get the interface the current profile uses\n\n let mut current_profile_file = File::open(&format!(\"/etc/netctl/{}\", active_profile))?;\n\n let mut current_profile_contents = String::new();\n\n current_profile_file.read_to_string(&mut current_profile_contents)?;\n\n // iterate over lines to find the one specifying the interface\n\n let mut profile_interface = \"\";\n\n for line in current_profile_contents.split('\\n') {\n\n if line.starts_with(\"Interface\") {\n\n let mut interface = match line.split('=').nth(1) {\n\n Some(i) => i,\n\n None => {\n\n eprintln!(\n", "file_path": "src/state.rs", "rank": 10, "score": 15906.003367149719 }, { "content": " // Now, as we know the used interface we can check the link quality\n\n // It can be found in /proc/net/wireless\n\n let mut file = File::open(\"/proc/net/wireless\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n // iterate over lines and find the one describing our needed interface\n\n for line in contents.split('\\n').skip(2) {\n\n if line.starts_with(profile_interface) {\n\n // Found the line\n\n // find the right column\n\n let mut columns = line.split(' ').filter(|x| !x.is_empty());\n\n let mut link_quality = columns.nth(2).unwrap();\n\n // remove the last char which is a dot apparently\n\n link_quality = &link_quality[..link_quality.len() - 1];\n\n let link_quality: u8 = link_quality.parse().unwrap();\n\n state.link_quality = link_quality;\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/state.rs", "rank": 11, "score": 15904.817481790798 }, { "content": " };\n\n }\n\n }\n\n NEventKind::Remove(_) => {\n\n // Remove the profile\n\n for path in event.paths {\n\n match path.file_name().unwrap().to_str() {\n\n Some(p) => {\n\n all_profiles.lock().unwrap().retain(|x| *x != p);\n\n }\n\n None => {\n\n eprintln!(\n\n \"Can't convert OsStr to str: {:?}\",\n\n path.file_name().unwrap()\n\n );\n\n continue;\n\n }\n\n };\n\n }\n\n }\n", "file_path": "src/state.rs", "rank": 12, "score": 15902.439555900575 }, { "content": " \"Profile not properly configured! Corrupted file: /etc/netctl/{}\",\n\n active_profile\n\n );\n\n continue;\n\n }\n\n }\n\n .trim();\n\n // Remove quotes if there\n\n if interface.starts_with('\"') && interface.ends_with('\"') {\n\n // double quotes\n\n interface = &interface[1..interface.len() - 1];\n\n } else if interface.starts_with('\\'') && interface.ends_with('\\'') {\n\n // single quotes\n\n interface = &interface[1..interface.len() - 1];\n\n }\n\n profile_interface = interface;\n\n break;\n\n }\n\n }\n\n\n", "file_path": "src/state.rs", "rank": 13, "score": 15902.432835868602 }, { "content": " _ => {}\n\n }\n\n }\n\n Err(e) => eprintln!(\"watch error: {:?}\", e),\n\n })?;\n\n\n\n watcher.watch(dir, RecursiveMode::Recursive)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/state.rs", "rank": 14, "score": 15902.140953337595 } ]
Rust
examples/rpg_engine/src/character.rs
arn-the-long-beard/ntnu_rust_lecture
1c7d6aba84e9e5ea99f18a5e46a11a674a08aa34
use crate::dice::SkillDice; use crate::item::Weapon; use crate::item::*; use crate::stuff::{Stuff, StuffConfig}; use colored::*; #[derive(Clone)] pub struct Character { name: String, health: f32, max_health: f32, stuff: Stuff, } #[allow(unused)] impl Character { pub fn name(&self) -> &str { &self.name } pub fn health(&self) -> f32 { self.health } pub fn set_health(&mut self, health: f32) { self.health = health; } } #[allow(unused)] impl Character { pub fn new(name: &str, health: f32) -> Self { Character { name: name.to_string(), health, max_health: health, stuff: Default::default(), } } pub fn roll_dice(&self, skill: SkillDice) -> u8 { skill.dices_roll_result(&self.name) } #[must_use] pub fn grab_weapon<W: Weapon + Send + Sync + 'static>(mut self, new_weapon: W) -> Self { self.stuff = self.stuff.equip_weapon(new_weapon); self } #[must_use] pub fn drop_first_weapon(mut self) -> Self { self.stuff.unset_first_weapon(); self } #[must_use] pub fn drop_second_weapon(mut self) -> Self { self.stuff.unset_second_weapon(); self } #[must_use] pub fn grab_armor<A: Armor + Send + Sync + 'static>(mut self, armor: A) -> Self { self.stuff = self.stuff.equip_armor(armor); self } fn check_blocking_damages(&self) -> Option<BlockedDamages> { match self.stuff.get_weapon_settings() { StuffConfig::DualWeapons => { None } StuffConfig::ShieldAndWeapon => self.stuff.get_second_weapon_blocking_damage(), StuffConfig::TwoHandsWeapon => self.stuff.get_first_weapon_blocking_damage(), StuffConfig::OnlyShied => self.stuff.get_second_weapon_blocking_damage(), StuffConfig::OneSingleHandWeapon => self.stuff.get_first_weapon_blocking_damage(), StuffConfig::OneWeaponAsSecondary => None, } } pub fn can_block(&self) -> Option<BlockedDamages> { self.check_blocking_damages() } pub fn gets_hit(&mut self, raw_damages: RawDamages) -> RawDamages { let mut damage_taken = raw_damages - self.get_armor(); if damage_taken < 0.0 { damage_taken = raw_damages * 0.1; } self.update_health_from_taken_damage(&damage_taken); damage_taken } fn update_health_from_taken_damage(&mut self, damages: &RawDamages) { self.health -= *damages; if self.health < 0.0 { self.health = 0.0 } } #[deprecated] pub fn get_attacked_by(&mut self, damages: RawDamages, attack_dice: u8, def_dice: Option<u8>) { let mut receive_damage = damages - self.get_armor(); if let Some(def_result) = def_dice { if def_result > attack_dice { let blocking_damage = self.can_block().unwrap_or(0.0); receive_damage -= blocking_damage; println!( "{} {} {} with its weapon", self.name().bold(), "blocked".blue(), blocking_damage.to_string().red() ) } else { println!( "{} {} {} the attack ", self.name().bold(), "failed".red(), "blocking".underline() ); } } else { println!("{} Will not block the attack", self.name.bold()); } if receive_damage < 0.0 { receive_damage = damages * 0.1; } println!( "{} received {} damages", self.name.bold(), receive_damage.to_string().red().bold() ); self.health -= receive_damage; if self.health < 0.0 { self.health = 0.0 } } fn get_armor(&self) -> BlockedDamages { self.stuff.get_armor_rating() } pub fn get_health_status(&self) -> HealthStatus { let percentage: u8 = ((self.health / self.max_health) * 100.0) as u8; match percentage { 0 => HealthStatus::Dead, 1..=10 => HealthStatus::AlmostDead, 11..=30 => HealthStatus::SeriouslyHurt, 31..=50 => HealthStatus::VeryHurt, 51..=75 => HealthStatus::LightlyHurt, 76..=99 => HealthStatus::SlightlyHurt, 100 => HealthStatus::Healthy, _ => { println!( "{} % of maximum health, Did you get some magic ?", percentage ); HealthStatus::Healthy } } } pub fn deal_damages(&self) -> RawDamages { self.stuff.calculate_damages() } } #[derive(PartialEq, Debug)] pub enum HealthStatus { Dead, AlmostDead, SeriouslyHurt, VeryHurt, LightlyHurt, SlightlyHurt, Healthy, } #[cfg(test)] mod test { use super::*; fn get_test_player() -> Character { Character::new("test character", 1000.0) } fn get_long_iron_sword() -> RegularWeapon { RegularWeapon::new("Long Iron Sword", 25.0, HandheldType::SingleHand) } fn get_long_steel_sword() -> RegularWeapon { RegularWeapon::new("Long Steel Sword", 30.0, HandheldType::SingleHand) } fn get_steel_battle_axe() -> RegularWeapon { RegularWeapon::new("Steal battle Axe", 65.0, HandheldType::TwoHands) } fn get_iron_shield() -> Shield { Shield::new("Iron Shield", 25.0, 5.0) } fn get_steel_shield() -> Shield { Shield::new("Steel Shield", 35.0, 7.0) } fn get_daedric_mail() -> BodyArmor { BodyArmor::new("Daedric Shield", 45.0) } #[test] fn test_blocking_for_weapon_config() { let mut guard = get_test_player().grab_weapon(get_steel_battle_axe()); assert_eq!( guard.check_blocking_damages().unwrap(), get_steel_battle_axe().damages() * 0.5 ); guard = guard.grab_weapon(get_iron_shield()); assert_eq!( guard.check_blocking_damages().unwrap(), get_iron_shield().can_block_if_possible().unwrap() ); guard = guard.grab_weapon(get_long_iron_sword()); assert_eq!( guard.check_blocking_damages().unwrap(), get_iron_shield().can_block_if_possible().unwrap() ); guard = guard.grab_weapon(get_long_iron_sword()); assert!(guard.check_blocking_damages().is_none()); guard = guard.drop_second_weapon(); assert_eq!( guard.check_blocking_damages().unwrap(), get_long_iron_sword().damages() * 0.3 ); } #[test] fn check_status() { let mut guard_test = get_test_player(); assert_eq!(&guard_test.get_health_status(), &HealthStatus::Healthy); guard_test.set_health(850.00); assert_eq!(&guard_test.get_health_status(), &HealthStatus::SlightlyHurt); guard_test.set_health(550.00); assert_eq!(&guard_test.get_health_status(), &HealthStatus::LightlyHurt); guard_test.set_health(350.00); assert_eq!(&guard_test.get_health_status(), &HealthStatus::VeryHurt); guard_test.set_health(250.00); assert_eq!( &guard_test.get_health_status(), &HealthStatus::SeriouslyHurt ); guard_test.set_health(50.00); assert_eq!(&guard_test.get_health_status(), &HealthStatus::AlmostDead); guard_test.set_health(0.0); assert_eq!(&guard_test.get_health_status(), &HealthStatus::Dead); } #[test] fn kill_naked_character() { let mut guard = get_test_player(); guard.get_attacked_by(1800.2, 3, None); assert_eq!(&guard.get_health_status(), &HealthStatus::Dead); } #[test] fn defense_armored_character() { let mut guard = get_test_player().grab_armor(get_daedric_mail()); guard.get_attacked_by(100.0, 3, None); assert_eq!(guard.health, 945.0); } #[test] fn defense_armored_character_with_block() { let mut guard = get_test_player() .grab_armor(get_daedric_mail()) .grab_weapon(get_iron_shield()); guard.get_attacked_by(100.0, 3, Some(5)); assert_eq!(guard.health, 970.0); } }
use crate::dice::SkillDice; use crate::item::Weapon; use crate::item::*; use crate::stuff::{Stuff, StuffConfig}; use colored::*; #[derive(Clone)] pub struct Character { name: String, health: f32, max_health: f32, stuff: Stuff, } #[allow(unused)] impl Character { pub fn name(&self) -> &str { &self.name } pub fn health(&self) -> f32 { self.health } pub fn set_health(&mut self, health: f32) { self.health = health; } } #[allow(unused)] impl Character { pub fn new(name: &str, health: f32) -> Self { Character { name: name.to_string(), health, max_health: health, stuff: Default::default(), } } pub fn roll_dice(&self, skill: SkillDice) -> u8 { skill.dices_roll_result(&self.name) } #[must_use] pub fn grab_weapon<W: Weapon + Send + Sync + 'static>(mut self, new_weapon: W) -> Self { self.stuff = self.stuff.equip_weapon(new_weapon); self } #[must_use] pub fn drop_first_weapon(mut self) -> Self { self.stuff.unset_first_weapon(); self } #[must_use] pub fn drop_second_weapon(mut self) -> Self { self.stuff.unset_second_weapon(); self } #[must_use] pub fn grab_armor<A: Armor + Send + Sync + 'static>(mut self, armor: A) -> Self { self.stuff = self.stuff.equip_armor(armor); self } fn check_blocking_damages(&self) -> Option<BlockedDamages> { match self.stuff.get_weapon_settings() { StuffConfig::DualWeapons => { None } StuffConfig::ShieldAndWeapon => self.stuff.get_second_weapon_blocking_damage(), StuffConfig::TwoHandsWeapon => self.stuff.get_first_weapon_blocking_damage(), StuffConfig::OnlyShied => self.stuff.get_second_weapon_blocking_damage(), StuffConfig::OneSingleHandWeapon => self.stuff.get_first_weapon_blocking_damage(), StuffConfig::OneWeaponAsSecondary => None, } } pub fn can_block(&self) -> Option<BlockedDamages> { self.check_blocking_damages() } pub fn gets_hit(&mut s
fn update_health_from_taken_damage(&mut self, damages: &RawDamages) { self.health -= *damages; if self.health < 0.0 { self.health = 0.0 } } #[deprecated] pub fn get_attacked_by(&mut self, damages: RawDamages, attack_dice: u8, def_dice: Option<u8>) { let mut receive_damage = damages - self.get_armor(); if let Some(def_result) = def_dice { if def_result > attack_dice { let blocking_damage = self.can_block().unwrap_or(0.0); receive_damage -= blocking_damage; println!( "{} {} {} with its weapon", self.name().bold(), "blocked".blue(), blocking_damage.to_string().red() ) } else { println!( "{} {} {} the attack ", self.name().bold(), "failed".red(), "blocking".underline() ); } } else { println!("{} Will not block the attack", self.name.bold()); } if receive_damage < 0.0 { receive_damage = damages * 0.1; } println!( "{} received {} damages", self.name.bold(), receive_damage.to_string().red().bold() ); self.health -= receive_damage; if self.health < 0.0 { self.health = 0.0 } } fn get_armor(&self) -> BlockedDamages { self.stuff.get_armor_rating() } pub fn get_health_status(&self) -> HealthStatus { let percentage: u8 = ((self.health / self.max_health) * 100.0) as u8; match percentage { 0 => HealthStatus::Dead, 1..=10 => HealthStatus::AlmostDead, 11..=30 => HealthStatus::SeriouslyHurt, 31..=50 => HealthStatus::VeryHurt, 51..=75 => HealthStatus::LightlyHurt, 76..=99 => HealthStatus::SlightlyHurt, 100 => HealthStatus::Healthy, _ => { println!( "{} % of maximum health, Did you get some magic ?", percentage ); HealthStatus::Healthy } } } pub fn deal_damages(&self) -> RawDamages { self.stuff.calculate_damages() } } #[derive(PartialEq, Debug)] pub enum HealthStatus { Dead, AlmostDead, SeriouslyHurt, VeryHurt, LightlyHurt, SlightlyHurt, Healthy, } #[cfg(test)] mod test { use super::*; fn get_test_player() -> Character { Character::new("test character", 1000.0) } fn get_long_iron_sword() -> RegularWeapon { RegularWeapon::new("Long Iron Sword", 25.0, HandheldType::SingleHand) } fn get_long_steel_sword() -> RegularWeapon { RegularWeapon::new("Long Steel Sword", 30.0, HandheldType::SingleHand) } fn get_steel_battle_axe() -> RegularWeapon { RegularWeapon::new("Steal battle Axe", 65.0, HandheldType::TwoHands) } fn get_iron_shield() -> Shield { Shield::new("Iron Shield", 25.0, 5.0) } fn get_steel_shield() -> Shield { Shield::new("Steel Shield", 35.0, 7.0) } fn get_daedric_mail() -> BodyArmor { BodyArmor::new("Daedric Shield", 45.0) } #[test] fn test_blocking_for_weapon_config() { let mut guard = get_test_player().grab_weapon(get_steel_battle_axe()); assert_eq!( guard.check_blocking_damages().unwrap(), get_steel_battle_axe().damages() * 0.5 ); guard = guard.grab_weapon(get_iron_shield()); assert_eq!( guard.check_blocking_damages().unwrap(), get_iron_shield().can_block_if_possible().unwrap() ); guard = guard.grab_weapon(get_long_iron_sword()); assert_eq!( guard.check_blocking_damages().unwrap(), get_iron_shield().can_block_if_possible().unwrap() ); guard = guard.grab_weapon(get_long_iron_sword()); assert!(guard.check_blocking_damages().is_none()); guard = guard.drop_second_weapon(); assert_eq!( guard.check_blocking_damages().unwrap(), get_long_iron_sword().damages() * 0.3 ); } #[test] fn check_status() { let mut guard_test = get_test_player(); assert_eq!(&guard_test.get_health_status(), &HealthStatus::Healthy); guard_test.set_health(850.00); assert_eq!(&guard_test.get_health_status(), &HealthStatus::SlightlyHurt); guard_test.set_health(550.00); assert_eq!(&guard_test.get_health_status(), &HealthStatus::LightlyHurt); guard_test.set_health(350.00); assert_eq!(&guard_test.get_health_status(), &HealthStatus::VeryHurt); guard_test.set_health(250.00); assert_eq!( &guard_test.get_health_status(), &HealthStatus::SeriouslyHurt ); guard_test.set_health(50.00); assert_eq!(&guard_test.get_health_status(), &HealthStatus::AlmostDead); guard_test.set_health(0.0); assert_eq!(&guard_test.get_health_status(), &HealthStatus::Dead); } #[test] fn kill_naked_character() { let mut guard = get_test_player(); guard.get_attacked_by(1800.2, 3, None); assert_eq!(&guard.get_health_status(), &HealthStatus::Dead); } #[test] fn defense_armored_character() { let mut guard = get_test_player().grab_armor(get_daedric_mail()); guard.get_attacked_by(100.0, 3, None); assert_eq!(guard.health, 945.0); } #[test] fn defense_armored_character_with_block() { let mut guard = get_test_player() .grab_armor(get_daedric_mail()) .grab_weapon(get_iron_shield()); guard.get_attacked_by(100.0, 3, Some(5)); assert_eq!(guard.health, 970.0); } }
elf, raw_damages: RawDamages) -> RawDamages { let mut damage_taken = raw_damages - self.get_armor(); if damage_taken < 0.0 { damage_taken = raw_damages * 0.1; } self.update_health_from_taken_damage(&damage_taken); damage_taken }
function_block-function_prefixed
[ { "content": "pub fn roll_dice(actor: &str, dices: &str, action: &str) -> i64 {\n\n let result = Roller::new(&format!(\"{} : {} \", dices, action))\n\n .unwrap()\n\n .roll()\n\n .unwrap();\n\n println!(\n\n \"{} rolls {} for {} \",\n\n actor.bold(),\n\n dices.underline(),\n\n action.magenta()\n\n );\n\n result.as_single().unwrap().get_total()\n\n}\n\n\n\npub enum SkillDice {\n\n Initiative,\n\n Blocking,\n\n Attack,\n\n Dodge,\n\n}\n", "file_path": "examples/rpg_engine/src/dice.rs", "rank": 0, "score": 112005.73445544412 }, { "content": "fn update_name(company: &mut Company, new_name: &str) {\n\n company.name = new_name.to_string();\n\n}\n\n\n", "file_path": "examples/ownership/src/main.rs", "rank": 1, "score": 100676.02401084735 }, { "content": "// Let's start with armor;\n\npub trait Armor: Item {\n\n fn set_armor_rating(self, armor_rating: ArmorRating) -> Self\n\n where\n\n Self: Sized;\n\n fn armor_rating(&self) -> &ArmorRating;\n\n}\n\n\n\n/// NB : We could define many different armor type.\n\npub struct BodyArmor {\n\n armor_rating: f32,\n\n name: String,\n\n}\n\n\n\nimpl Default for BodyArmor {\n\n fn default() -> Self {\n\n BodyArmor {\n\n armor_rating: 0 as f32,\n\n name: \"\".to_string(),\n\n }\n\n }\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 2, "score": 82039.43523088138 }, { "content": "pub trait Weapon: Item {\n\n /// Describe how much damage a weapon can deal.\n\n /// More damage a weapon deals, better quality it is .\n\n fn damages(&self) -> &RawDamages;\n\n fn set_damages(self, amount: RawDamages) -> Self\n\n where\n\n Self: Sized;\n\n // Block attack and make calculation if possible\n\n fn can_block_if_possible(&self) -> Option<BlockedDamages> {\n\n match self.handheld_type() {\n\n HandheldType::SingleHand => Some(self.damages() * 0.3),\n\n HandheldType::TwoHands => Some(self.damages() * 0.5),\n\n // A bit dummy here because we have different implementation later.\n\n HandheldType::OnlyLeft => None,\n\n }\n\n }\n\n fn set_handheld_type(self, handheld: HandheldType) -> Self\n\n where\n\n Self: Sized;\n\n fn handheld_type(&self) -> &HandheldType;\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 3, "score": 82036.01231102912 }, { "content": "fn rate_business(value: &u32) -> &'static str {\n\n match value {\n\n 0 => \"Bankrupt\",\n\n 1..=100 => \"Weak\",\n\n 101..=350 => \"Ok\",\n\n 351..=700 => \"Nice\",\n\n 701..=1000 => \"Maybe a bubble\",\n\n _ => \"Like seven sisters\",\n\n }\n\n}\n\n\n", "file_path": "examples/ownership/src/main.rs", "rank": 4, "score": 72352.89180362954 }, { "content": "struct Company {\n\n pub name: String,\n\n /// In Billion\n\n pub value: u32,\n\n}\n\n\n", "file_path": "examples/ownership/src/main.rs", "rank": 5, "score": 48413.4046604762 }, { "content": "fn main() {\n\n let mut facebook = Company {\n\n name: \"FaceMash\".to_string(),\n\n value: 0,\n\n };\n\n // 1 - immutability\n\n facebook.name = \"Facebook\".to_string();\n\n facebook.value = 900;\n\n\n\n println!(\"{} new name is cool \", facebook.name);\n\n\n\n // 2 - Reference with & to read data\n\n // Try to assign value with `&` and see what happens:D\n\n display_data(&facebook);\n\n\n\n println!(\"{} is super old, we need rebranding. \", facebook.name); // <- Macro stuff does auto reference https://stackoverflow.com/questions/30450399/does-println-borrow-or-own-the-variable\n\n\n\n // <- if we do not use mut here, compiler will say NO\n\n facebook.name = \"Meta\".to_string();\n\n facebook.value -= 231;\n", "file_path": "examples/ownership/src/main.rs", "rank": 6, "score": 46285.67051924822 }, { "content": "fn main() {\n\n let mut line = String::new();\n\n print!(\"Enter total number of students:\");\n\n std::io::stdin().read_line(&mut line).unwrap();\n\n\n\n let students_amount: u32 = line.trim().parse().expect(\"We need an integer here\");\n\n\n\n let mut class = Vec::new();\n\n\n\n println!(\"Enter grades for students\");\n\n\n\n for student in 0..students_amount {\n\n print!(\"student{} :\", student + 1);\n\n let mut line = String::new();\n\n std::io::stdin().read_line(&mut line).unwrap();\n\n println!();\n\n\n\n let grade: u32 = line.trim().parse().expect(\"We need an integer here\");\n\n\n\n class.push(grade);\n\n }\n\n\n\n println!(\"Display grades for students\");\n\n\n\n for (index, grade) in class.iter().enumerate() {\n\n println!(\"student{} : {}\", index + 1, grade);\n\n }\n\n}\n", "file_path": "examples/rust_allocation/src/main.rs", "rank": 7, "score": 45320.39136510334 }, { "content": "//\n\npub trait Item {\n\n fn name(&self) -> &str;\n\n fn set_name(self, name: &str) -> Self\n\n where\n\n Self: Sized;\n\n}\n\n\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 8, "score": 43446.805758282906 }, { "content": "fn main() {\n\n let iron_plate = BodyArmor::new(\"Iron Plate\", 32.0);\n\n let steel_plate = BodyArmor::new(\"Steel Plate\", 54.0);\n\n let daedric_armor = BodyArmor::new(\"Daedric Armor\", 25.0);\n\n let daedric_armor_2 = BodyArmor::new(\"Daedric Armor 2\", 25.0);\n\n\n\n // Lets put some shields\n\n let steel_shield = Shield::new(\"steal Shield\", 55.0, 20.0);\n\n let iron_shield = Shield::new(\"Iron Shield\", 25.0, 15.0);\n\n\n\n // Lets put some weapons.\n\n let steel_long_sword = RegularWeapon::new(\"Steel Long Sword\", 40.0, HandheldType::SingleHand);\n\n let iron_long_sword = RegularWeapon::new(\"Iron Long Sword\", 35.0, HandheldType::SingleHand);\n\n let steel_battle_axe = RegularWeapon::new(\"Steel battle Axe\", 65.0, HandheldType::TwoHands);\n\n let daedric_battle_axe = RegularWeapon::new(\"Daedric battle Axe\", 85.0, HandheldType::TwoHands);\n\n\n\n let grand_ma_skyrim = Character::new(\"Skyrim Grandma\", 300.00)\n\n .grab_weapon(steel_battle_axe)\n\n .grab_armor(daedric_armor);\n\n\n", "file_path": "examples/rpg_engine/examples/multiple_fights/src/main.rs", "rank": 9, "score": 42767.386336336785 }, { "content": "#[allow(unused)]\n\nfn main() {\n\n println!(\"Hello and Fight\");\n\n\n\n // Lets put some armors.\n\n let iron_plate = BodyArmor::new(\"Iron Plate\", 32.0);\n\n let steel_plate = BodyArmor::new(\"Steel Plate\", 54.0);\n\n let daedric_armor = BodyArmor::new(\"Daedric Armor\", 25.0);\n\n let daedric_armor_2 = BodyArmor::new(\"Daedric Armor 2\", 25.0);\n\n\n\n // Lets put some shields\n\n let steel_shield = Shield::new(\"steal Shield\", 55.0, 20.0);\n\n let iron_shield = Shield::new(\"Iron Shield\", 25.0, 15.0);\n\n\n\n // Lets put some weapons.\n\n let iron_long_sword = RegularWeapon::new(\"Iron Long Sword\", 35.0, HandheldType::SingleHand);\n\n let steel_battle_axe = RegularWeapon::new(\"Steel battle Axe\", 65.0, HandheldType::TwoHands);\n\n let daedric_battle_axe = RegularWeapon::new(\"Daedric battle Axe\", 85.0, HandheldType::TwoHands);\n\n\n\n let grand_ma_skyrim = Character::new(\"Skyrim Grandma\", 300.00)\n\n .grab_weapon(steel_battle_axe)\n", "file_path": "examples/rpg_engine/examples/simple_fight/src/main.rs", "rank": 10, "score": 42767.386336336785 }, { "content": "#[cfg(feature = \"song\")]\n\nfn add_song() {\n\n println!(\n\n \"Dovahkiin, Dovahkiin, naal ok zin los vahriin,\n\n Wah dein vokul mahfaeraak ahst vaal!\n\n Ahr fin norok paal graan fod nust hon zindro zaan,\n\n Dovahkiin fah hin kogaan mu draal!\"\n\n );\n\n thread::sleep(Duration::new(2, 0));\n\n println!(\n\n \"\n\n Huzrah nu, kul do od, wah aan bok lingrah vod,\n\n Ahrk fin tey, boziik fun, do fin gein!\n\n Wo lost fron wah ney dov, ahrk fin reyliik do jul,\n\n Voth aan suleyk wah ronit faal krein!\"\n\n );\n\n thread::sleep(Duration::new(2, 0));\n\n println!(\n\n \" Ahrk fin zul, rok drey kod, nau tol morokei frod,\n\n Rul lot Taazokaan motaad voth kein!\n\n Sahrot Thu'um, med aan tuz, bey zeim hokoron pah,\n", "file_path": "examples/rpg_engine/examples/multiple_fights/src/main.rs", "rank": 11, "score": 42014.23692119663 }, { "content": "fn display_data(company: &Company) {\n\n println!(\"Name : {} \", company.name);\n\n println!(\"Market Cap : {} \", company.name);\n\n println!(\"Rating {}\", rate_business(&company.value))\n\n}\n\n\n", "file_path": "examples/ownership/src/main.rs", "rank": 12, "score": 39611.71752658159 }, { "content": "fn rebuild_business(_: Company) -> Company {\n\n Company {\n\n name: \"Facebook 2.0\".to_string(),\n\n value: 850,\n\n }\n\n}\n\n\n", "file_path": "examples/ownership/src/main.rs", "rank": 13, "score": 38266.86489439207 }, { "content": "#![allow(unused)]\n\n\n\nuse crate::item::*;\n\nuse std::sync::Arc;\n\n\n\n/// https://stackoverflow.com/questions/49377231/when-to-use-rc-vs-box\n\n/// Option on Arc is maybe useless, I could use default value for Weapon & Armor as well.\n\n#[derive(Clone, Default)]\n\npub struct Stuff {\n\n armor: Option<Arc<dyn Armor + Send + Sync>>,\n\n first_weapon: Option<Arc<dyn Weapon + Send + Sync>>,\n\n second_weapon: Option<Arc<dyn Weapon + Send + Sync>>,\n\n}\n\n\n\nimpl Stuff {\n\n fn set_armor<A: 'static + Armor + Send + Sync>(&mut self, armor: A) {\n\n self.armor = Some(Arc::new(armor));\n\n }\n\n\n\n fn set_first_weapon<W: 'static + Weapon + Send + Sync>(&mut self, first_weapon: W) {\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 30, "score": 29351.77110957809 }, { "content": " self.first_weapon = Some(Arc::new(first_weapon));\n\n }\n\n\n\n fn set_second_weapon<W: 'static + Weapon + Send + Sync>(&mut self, second_weapon: W) {\n\n self.second_weapon = Some(Arc::new(second_weapon))\n\n }\n\n\n\n pub fn unset_first_weapon(&mut self) {\n\n self.first_weapon = None;\n\n }\n\n\n\n pub fn unset_second_weapon(&mut self) {\n\n self.second_weapon = None;\n\n }\n\n\n\n fn armor(&self) -> &Option<Arc<dyn Armor + Send + Sync>> {\n\n &self.armor\n\n }\n\n\n\n fn first_weapon(&self) -> &Option<Arc<dyn Weapon + Send + Sync>> {\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 31, "score": 29348.57049955516 }, { "content": " &self.first_weapon\n\n }\n\n\n\n fn second_weapon(&self) -> &Option<Arc<dyn Weapon + Send + Sync>> {\n\n &self.second_weapon\n\n }\n\n\n\n /// Will panic if you have equipped a two hand weapon as a second Weapon.\n\n /// We could have specific trait for weapons to be used with both Hands.\n\n /// Ex : SingleHand Item could have a trait \"BothHand\", and restrict this trait for second hand.\n\n #[must_use]\n\n pub fn equip_weapon<W: 'static + Weapon + Send + Sync>(mut self, weapon: W) -> Self {\n\n match weapon.handheld_type() {\n\n HandheldType::SingleHand => {\n\n if let Some(current_weapon) = self.first_weapon() {\n\n if current_weapon.handheld_type() == &HandheldType::SingleHand {\n\n self.second_weapon = Some(current_weapon.clone())\n\n }\n\n }\n\n self.set_first_weapon(weapon);\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 32, "score": 29345.060491359443 }, { "content": " pub fn get_second_weapon_blocking_damage(&self) -> Option<f32> {\n\n self.second_weapon.as_ref()?.can_block_if_possible()\n\n }\n\n}\n\n\n\n#[derive(PartialEq)]\n\npub enum StuffConfig {\n\n DualWeapons,\n\n ShieldAndWeapon,\n\n TwoHandsWeapon,\n\n OnlyShied,\n\n OneSingleHandWeapon,\n\n OneWeaponAsSecondary,\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::item::{Armor, BodyArmor, HandheldType, Item, RegularWeapon, Shield, Weapon};\n\n use crate::stuff::{Stuff, StuffConfig};\n\n\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 33, "score": 29343.86166966887 }, { "content": " #[must_use]\n\n pub fn equip_armor<A: 'static + Armor + Send + Sync>(mut self, armor: A) -> Self {\n\n self.set_armor(armor);\n\n self\n\n }\n\n\n\n /// Calculate armor rating.\n\n pub fn get_armor_rating(&self) -> ArmorRating {\n\n if let Some(armor) = self.armor() {\n\n *armor.armor_rating()\n\n } else {\n\n 0.0\n\n }\n\n }\n\n\n\n pub fn calculate_blocked_damage_armor(&self) -> BlockedDamages {\n\n let armor_damages = if let Some(armor) = self.armor() {\n\n *armor.armor_rating()\n\n } else {\n\n 0.0\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 34, "score": 29343.16950207902 }, { "content": " get_long_iron_sword().name()\n\n );\n\n assert!(stuff.second_weapon.is_none());\n\n\n\n assert!(stuff.get_weapon_settings() == StuffConfig::OneSingleHandWeapon);\n\n }\n\n\n\n #[test]\n\n fn use_two_hands_weapons() {\n\n let steel_battle_axe = get_steel_battle_axe();\n\n let long_iron_sword = get_long_iron_sword();\n\n let steel_shield = get_steel_shield();\n\n let mut stuff = Stuff::default()\n\n .equip_weapon(long_iron_sword)\n\n .equip_weapon(steel_shield);\n\n\n\n stuff = stuff.equip_weapon(steel_battle_axe);\n\n\n\n assert_eq!(\n\n stuff.first_weapon.as_ref().unwrap().name().to_string(),\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 35, "score": 29341.46277957638 }, { "content": " assert_eq!(\n\n stuff.second_weapon.as_ref().unwrap().name().to_string(),\n\n get_iron_shield().name()\n\n );\n\n\n\n stuff = stuff\n\n .equip_weapon(get_iron_shield())\n\n .equip_weapon(get_steel_shield());\n\n\n\n assert_eq!(\n\n stuff.second_weapon.as_ref().unwrap().name().to_string(),\n\n get_steel_shield().name()\n\n );\n\n assert!(stuff.get_weapon_settings() == StuffConfig::ShieldAndWeapon);\n\n }\n\n\n\n #[test]\n\n fn test_damages_with_shield_and_sword() {\n\n let damages = Stuff::default()\n\n .equip_weapon(get_long_steel_sword())\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 36, "score": 29340.37914674709 }, { "content": "\n\n assert_eq!(\n\n stuff.first_weapon.as_ref().unwrap().name().to_string(),\n\n get_long_steel_sword().name()\n\n );\n\n\n\n assert_eq!(\n\n stuff.second_weapon.as_ref().unwrap().name().to_string(),\n\n get_long_iron_sword().name()\n\n );\n\n }\n\n\n\n #[test]\n\n fn replace_two_hands_weapon_with_single() {\n\n let steel_battle_axe = get_steel_battle_axe();\n\n let mut stuff = Stuff::default().equip_weapon(steel_battle_axe);\n\n let long_iron_sword = get_long_iron_sword();\n\n stuff = stuff.equip_weapon(long_iron_sword);\n\n assert_eq!(\n\n stuff.first_weapon.as_ref().unwrap().name().to_string(),\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 37, "score": 29340.355432618802 }, { "content": " assert_eq!(\n\n stuff.second_weapon.as_ref().unwrap().name().to_string(),\n\n get_long_iron_sword().name()\n\n );\n\n assert!(stuff.get_weapon_settings() == StuffConfig::DualWeapons);\n\n }\n\n\n\n #[test]\n\n /// Let's see when we move the first weapon as second\n\n /// Let's see if using just one weapon works\n\n fn replace_single_hand_weapons() {\n\n let long_iron_sword = get_long_iron_sword();\n\n let long_steel_sword = get_long_steel_sword();\n\n let steel_shield = get_steel_shield();\n\n\n\n let mut stuff = Stuff::default()\n\n .equip_weapon(long_iron_sword)\n\n .equip_weapon(steel_shield);\n\n\n\n stuff = stuff.equip_weapon(long_steel_sword);\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 38, "score": 29339.605343629457 }, { "content": " }\n\n\n\n #[test]\n\n fn use_single_hand_weapon_in_place_of_shield() {\n\n let long_iron_sword = get_long_iron_sword();\n\n let another_long_iron_sword = get_long_iron_sword();\n\n let steel_shield = get_steel_shield();\n\n\n\n let mut stuff = Stuff::default()\n\n .equip_weapon(long_iron_sword)\n\n .equip_weapon(steel_shield);\n\n\n\n // because of mut, need &mut self if we want to update partialy the object;\n\n stuff = stuff.equip_weapon(another_long_iron_sword);\n\n\n\n assert_eq!(\n\n stuff.first_weapon.as_ref().unwrap().name().to_string(),\n\n get_long_iron_sword().name()\n\n );\n\n\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 39, "score": 29339.480350668233 }, { "content": " get_steel_battle_axe().name()\n\n );\n\n\n\n assert!(stuff.second_weapon.is_none());\n\n assert!(stuff.get_weapon_settings() == StuffConfig::TwoHandsWeapon);\n\n }\n\n\n\n #[test]\n\n fn use_shields() {\n\n let long_iron_sword = get_long_iron_sword();\n\n let another_long_iron_sword = get_long_iron_sword();\n\n let steel_shield = get_steel_shield();\n\n let iron_shield = get_iron_shield();\n\n\n\n let mut stuff = Stuff::default()\n\n .equip_weapon(long_iron_sword)\n\n .equip_weapon(steel_shield);\n\n\n\n stuff = stuff.equip_weapon(iron_shield);\n\n\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 40, "score": 29339.34544803951 }, { "content": " StuffConfig::DualWeapons\n\n } else if self.is_shield_only() {\n\n StuffConfig::OnlyShied\n\n } else if self.is_shield_with_single_weapon() {\n\n StuffConfig::ShieldAndWeapon\n\n } else if self.is_single_weapon() {\n\n StuffConfig::OneSingleHandWeapon\n\n } else if self.is_two_hands_weapon() {\n\n StuffConfig::TwoHandsWeapon\n\n } else if self.is_one_single_as_secondary() {\n\n StuffConfig::OneWeaponAsSecondary\n\n } else {\n\n panic!(\"Config not found maybe no weapons have been equipped\")\n\n }\n\n }\n\n\n\n pub fn get_first_weapon_blocking_damage(&self) -> Option<f32> {\n\n self.first_weapon.as_ref()?.can_block_if_possible()\n\n }\n\n\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 41, "score": 29338.54462365618 }, { "content": " }\n\n\n\n fn is_shield_only(&self) -> bool {\n\n if let (None, Some(second_weapon)) = (self.first_weapon(), self.second_weapon()) {\n\n second_weapon.handheld_type() == &HandheldType::OnlyLeft\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n fn is_one_single_as_secondary(&self) -> bool {\n\n if let (None, Some(second_weapon)) = (self.first_weapon(), self.second_weapon()) {\n\n second_weapon.handheld_type() == &HandheldType::SingleHand\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n pub fn get_weapon_settings(&self) -> StuffConfig {\n\n if self.is_double_weapon() {\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 42, "score": 29338.225871121067 }, { "content": " .equip_weapon(get_steel_battle_axe())\n\n .calculate_damages();\n\n assert_eq!(&damages, get_steel_battle_axe().damages())\n\n }\n\n\n\n #[test]\n\n fn test_armor_rating_with_no_armor() {\n\n let rating = Stuff::default().get_armor_rating();\n\n\n\n assert_eq!(rating, 0.0)\n\n }\n\n\n\n #[test]\n\n fn test_armor_rating_with_armor() {\n\n let rating = Stuff::default()\n\n .equip_armor(get_daedric_mail())\n\n .get_armor_rating();\n\n\n\n assert_eq!(&rating, get_daedric_mail().armor_rating());\n\n }\n\n}\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 43, "score": 29335.768517259694 }, { "content": " };\n\n armor_damages\n\n }\n\n\n\n fn is_single_weapon(&self) -> bool {\n\n if let Some(w) = self.first_weapon() {\n\n w.handheld_type() == &HandheldType::SingleHand && self.second_weapon().is_none()\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n fn is_double_weapon(&self) -> bool {\n\n if let (Some(first_weapon), Some(second_weapon)) =\n\n (self.first_weapon(), self.second_weapon())\n\n {\n\n first_weapon.handheld_type() == &HandheldType::SingleHand\n\n && second_weapon.handheld_type() == &HandheldType::SingleHand\n\n } else {\n\n false\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 44, "score": 29335.49845773231 }, { "content": " self.set_first_weapon(weapon);\n\n }\n\n }\n\n self\n\n }\n\n\n\n ///Calculate how much damage the equipped weapons can do.\n\n /// Bash damages from shield are counted\n\n pub fn calculate_damages(&self) -> RawDamages {\n\n let mut damages: RawDamages = 0.0;\n\n\n\n if let Some(first_weapon) = self.first_weapon() {\n\n damages = *first_weapon.damages();\n\n }\n\n if let Some(second_weapon) = self.second_weapon() {\n\n damages += *second_weapon.damages();\n\n }\n\n damages\n\n }\n\n\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 45, "score": 29333.588505057745 }, { "content": " }\n\n }\n\n\n\n fn is_two_hands_weapon(&self) -> bool {\n\n if let (Some(first_weapon), None) = (self.first_weapon(), self.second_weapon()) {\n\n first_weapon.handheld_type() == &HandheldType::TwoHands\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n fn is_shield_with_single_weapon(&self) -> bool {\n\n if let (Some(first_weapon), Some(second_weapon)) =\n\n (self.first_weapon(), self.second_weapon())\n\n {\n\n first_weapon.handheld_type() == &HandheldType::SingleHand\n\n && second_weapon.handheld_type() == &HandheldType::OnlyLeft\n\n } else {\n\n false\n\n }\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 46, "score": 29333.570534418042 }, { "content": " fn get_long_iron_sword() -> RegularWeapon {\n\n RegularWeapon::new(\"Long Iron Sword\", 25.0, HandheldType::SingleHand)\n\n }\n\n fn get_long_steel_sword() -> RegularWeapon {\n\n RegularWeapon::new(\"Long Steel Sword\", 30.0, HandheldType::SingleHand)\n\n }\n\n fn get_steel_battle_axe() -> RegularWeapon {\n\n RegularWeapon::new(\"Steal battle Axe\", 65.0, HandheldType::TwoHands)\n\n }\n\n\n\n fn get_iron_shield() -> Shield {\n\n Shield::new(\"Iron Shield\", 25.0, 5.0)\n\n }\n\n\n\n fn get_steel_shield() -> Shield {\n\n Shield::new(\"Steel Shield\", 35.0, 7.0)\n\n }\n\n\n\n fn get_daedric_mail() -> BodyArmor {\n\n BodyArmor::new(\"Daedric Shield\", 45.0)\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 47, "score": 29333.205914994793 }, { "content": " .equip_weapon(get_steel_shield())\n\n .calculate_damages();\n\n\n\n assert_eq!(\n\n damages,\n\n get_steel_shield().damages() + get_long_steel_sword().damages()\n\n )\n\n }\n\n\n\n #[test]\n\n fn test_damages_with_shield() {\n\n let damages = Stuff::default()\n\n .equip_weapon(get_steel_shield())\n\n .calculate_damages();\n\n assert_eq!(&damages, get_steel_shield().damages())\n\n }\n\n\n\n #[test]\n\n fn test_damages_with_two_hands() {\n\n let damages = Stuff::default()\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 48, "score": 29332.87143058789 }, { "content": " }\n\n HandheldType::OnlyLeft => {\n\n if let Some(current_first_weapon) = self.first_weapon() {\n\n if current_first_weapon.handheld_type() == &HandheldType::TwoHands {\n\n self.unset_first_weapon();\n\n }\n\n }\n\n\n\n // See comment on how we could avoid this issue at compile time.\n\n // if First weapon is set or not, we do not care, left item always goes left.\n\n if let Some(current_second_weapon) = self.second_weapon() {\n\n if current_second_weapon.handheld_type() == &HandheldType::TwoHands {\n\n panic!(\"It seems you have a two hand weapon as second weapon\");\n\n }\n\n }\n\n\n\n self.set_second_weapon(weapon)\n\n }\n\n HandheldType::TwoHands => {\n\n self.unset_second_weapon();\n", "file_path": "examples/rpg_engine/src/stuff.rs", "rank": 49, "score": 29331.557400880785 }, { "content": "}\n\n\n\nimpl Armor for Shield {\n\n fn set_armor_rating(mut self, reduction: f32) -> Self {\n\n self.armor_rating = reduction;\n\n self\n\n }\n\n\n\n fn armor_rating(&self) -> &f32 {\n\n &self.armor_rating\n\n }\n\n}\n\n\n\nimpl Item for Shield {\n\n fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n fn set_name(mut self, name: &str) -> Self {\n\n self.name = name.to_string();\n\n self\n\n }\n\n}\n\n\n\nimpl Weapon for Shield {\n\n fn damages(&self) -> &RawDamages {\n\n &self.bash_damage\n\n }\n\n\n\n fn set_damages(mut self, amount: RawDamages) -> Self {\n\n self.bash_damage = amount;\n\n self\n\n }\n\n\n\n fn can_block_if_possible(&self) -> Option<BlockedDamages> {\n\n //We could have skills here to help us to calculate\n\n Some(self.armor_rating)\n\n }\n\n\n\n fn set_handheld_type(mut self, handheld: HandheldType) -> Self {\n\n self.handheld = handheld;\n\n self\n\n }\n\n\n\n fn handheld_type(&self) -> &HandheldType {\n\n &self.handheld\n\n }\n\n}\n\n\n\n// NB: I could have made multiple trait instead of enum as well.\n\n#[derive(PartialEq)]\n\npub enum HandheldType {\n\n SingleHand,\n\n OnlyLeft,\n\n TwoHands,\n\n}\n\n\n\nimpl Shield {\n\n pub fn new(name: &str, armor: f32, bash_damages: f32) -> Self {\n\n Self::default()\n\n .set_name(name)\n\n .set_armor_rating(armor)\n\n .set_handheld_type(HandheldType::OnlyLeft)\n\n .set_damages(bash_damages)\n\n }\n\n}\n\n```\n\n\n\n\n\nCheck the code for more examples.\n\n\n\nCharacter definition and Stuff definition that qualifies the weapons + armor configuration.\n\n\n\n\n", "file_path": "README.md", "rank": 50, "score": 26.136983025310542 }, { "content": "###### 1 - Armor & Weapons\n\n\n\n```rust\n\n// Every \"object ( weapons or armor is an item with a name )\n\npub trait Item {\n\n fn name(&self) -> &str;\n\n fn set_name(self, name: &str) -> Self\n\n where\n\n Self: Sized;\n\n}\n\n\n\n// We have some Armor\n\npub trait Armor: Item {\n\n fn set_armor_rating(self, armor_rating: ArmorRating) -> Self\n\n where\n\n Self: Sized;\n\n fn armor_rating(&self) -> &ArmorRating;\n\n}\n\n\n\npub trait Weapon: Item {\n\n /// Describe how much damage a weapon can deal.\n\n /// More damage a weapon deals, better quality it is .\n\n fn damages(&self) -> &RawDamages;\n\n fn set_damages(self, amount: RawDamages) -> Self\n\n where\n\n Self: Sized;\n\n // Block attack and make calculation if possible\n\n fn can_block_if_possible(&self) -> Option<BlockedDamages> {\n\n match self.handheld_type() {\n\n HandheldType::SingleHand => Some(self.damages() * 0.4),\n\n HandheldType::TwoHands => Some(self.damages() * 0.7),\n\n // A bit dummy here because we have different implementation later.\n\n HandheldType::OnlyLeft => None,\n\n }\n\n }\n\n fn set_handheld_type(self, handheld: HandheldType) -> Self\n\n where\n\n Self: Sized;\n\n fn handheld_type(&self) -> &HandheldType;\n\n}\n\n\n\n// Can make alias type for better semantic.\n\npub type BlockedDamages = f32;\n\npub type RawDamages = f32;\n\npub type ArmorRating = f32;\n\n```\n\n\n\n\n\n-> Now we can make some struct that implement this trait.\n\n\n\nHere is the shield which is a weapon, but it also has armor properties.\n\n\n\n```rust\n\n// ----- Rest of the Code\n\npub struct Shield {\n\n armor_rating: f32,\n\n name: String,\n\n hold: HandheldType,\n\n bash_damage: RawDamages,\n\n handheld: HandheldType,\n\n}\n\n\n\nimpl Default for Shield {\n\n fn default() -> Self {\n\n Shield {\n\n armor_rating: 0.0,\n\n name: \"\".to_string(),\n\n hold: HandheldType::OnlyLeft,\n\n bash_damage: 0.0,\n\n handheld: HandheldType::OnlyLeft,\n\n }\n\n }\n", "file_path": "README.md", "rank": 51, "score": 25.582850312128354 }, { "content": "###### 2 - Character\n\n```rust\n\npub struct Character {\n\n name: String,\n\n health: f32,\n\n max_health: f32,\n\n stuff: Stuff,\n\n}\n\n\n\n/// Here we store any kind of weapons and armor.\n\n/// Stuff contains specific pointers to dynamic object\n\n/// The compiler will say No to this until you tell him that the object size is know at compile time in the trait definition.\n\n/// That is why we have the word `Sized` for self in previous trait.\n\n#[derive(Default)]\n\npub struct Stuff {\n\n armor: Option<Rc<dyn Armor>>,\n\n first_weapon: Option<Rc<dyn Weapon>>,\n\n second_weapon: Option<Rc<dyn Weapon>>, // Rc is a specific reference pointer\n\n}\n\n\n\nimpl Stuff {\n\n \n\n// Many other methods \n\n/// Will panic if you have equipped a two hand weapon as a second Weapon.\n\n/// \n\npub fn equip_weapon<W: 'static + Weapon>(mut self, weapon: W) -> Self {\n\n match weapon.handheld_type() {\n\n \n\n HandheldType::SingleHand => {\n\n if let Some(current_weapon) = self.first_weapon() {\n\n if current_weapon.handheld_type() == &HandheldType::SingleHand {\n\n self.second_weapon = Some(current_weapon.clone())\n\n }\n\n }\n\n self.set_first_weapon(weapon);\n\n }\n\n \n\n HandheldType::OnlyLeft => {\n\n if let Some(current_first_weapon) = self.first_weapon() {\n\n if current_first_weapon.handheld_type() == &HandheldType::TwoHands {\n\n self.unset_first_weapon();\n\n }\n\n }\n\n\n\n // See comment on how we could avoid this issue at compile time.\n\n // if First weapon is set or not, we do not care, left item always goes left.\n\n if let Some(current_second_weapon) = self.second_weapon() {\n\n if current_second_weapon.handheld_type() == &HandheldType::TwoHands {\n\n panic!(\"It seems you have a two hand weapon as second weapon\");\n\n }\n\n }\n\n\n\n self.set_second_weapon(weapon)\n\n }\n\n HandheldType::TwoHands => {\n\n self.unset_second_weapon();\n\n self.set_first_weapon(weapon);\n\n }\n\n }\n\n self\n\n}\n\n\n\n}\n\n```\n\n\n\n\n\n\n", "file_path": "README.md", "rank": 52, "score": 23.07275436044905 }, { "content": "###### 3 - Fight\n\n\n\n```rust\n\npub struct Fight {\n\n winner_name: Option<String>,\n\n round: u16,\n\n opponents: (Character, Character),\n\n}\n\n\n\n#[allow(unused)]\n\nimpl Fight {\n\n pub fn winner_name(&self) -> &Option<String> {\n\n &self.winner_name\n\n }\n\n pub fn round(&self) -> u16 {\n\n self.round\n\n }\n\n pub fn opponents(&self) -> &(Character, Character) {\n\n &self.opponents\n\n }\n\n pub fn new(first_fighter: Character, second_fighter: Character) -> Self {\n\n Fight {\n\n winner_name: None,\n\n round: 0,\n\n opponents: (first_fighter, second_fighter),\n\n }\n\n }\n\n\n\n pub fn start(&mut self) {\n\n // My ugly code you can check\n\n }\n\n}\n\n\n\n```\n\n\n\n\n\n\n\nHere is how the \"game\" looks like.\n\n\n\n```rust\n\n\n\nmod character;\n\nmod dice;\n\nmod fight;\n\nmod item;\n\nmod stuff;\n\nuse crate::character::Character;\n\nuse crate::fight::Fight;\n\nuse item::*;\n\n\n\nfn main() {\n\n println!(\"Hello and Fight\");\n\n\n\n // Lets put some armors.\n\n let iron_plate = BodyArmor::new(\"Iron Plate\", 32.0);\n\n let steel_plate = BodyArmor::new(\"Steel Plate\", 54.0);\n\n let daedric_armor = BodyArmor::new(\"Daedric Armor\", 25.0);\n\n let daedric_armor_2 = BodyArmor::new(\"Daedric Armor 2\", 25.0);\n\n\n\n // Lets put some shields\n\n let steel_shield = Shield::new(\"steal Shield\", 55.0, 20.0);\n\n let iron_shield = Shield::new(\"Iron Shield\", 25.0, 15.0);\n\n\n\n // Lets put some weapons.\n\n let iron_long_sword = RegularWeapon::new(\"Iron Long Sword\", 35.0, HandheldType::SingleHand);\n\n let steel_battle_axe = RegularWeapon::new(\"Steel battle Axe\", 65.0, HandheldType::TwoHands);\n\n let daedric_battle_axe = RegularWeapon::new(\"Daedric battle Axe\", 85.0, HandheldType::TwoHands);\n\n\n\n let grand_ma_skyrim = Character::new(\"Skyrim Grandma\", 300.00)\n\n .grab_weapon(steel_battle_axe)\n\n .grab_armor(daedric_armor);\n\n\n\n let white_run_guard = Character::new(\"Olaf the dummy guard\", 300.00)\n\n .grab_weapon(steel_shield) // <- we can do it because of generic + trait objects for weapon\n\n .grab_weapon(iron_long_sword)\n", "file_path": "README.md", "rank": 53, "score": 22.834707255179488 }, { "content": "}\n\n\n\nimpl BodyArmor {\n\n pub fn new(name: &str, armor_rating: f32) -> Self {\n\n Self::default()\n\n .set_name(name)\n\n .set_armor_rating(armor_rating)\n\n }\n\n}\n\n\n\nimpl Armor for BodyArmor {\n\n fn set_armor_rating(mut self, reduction: f32) -> Self {\n\n self.armor_rating = reduction;\n\n self\n\n }\n\n fn armor_rating(&self) -> &f32 {\n\n &self.armor_rating\n\n }\n\n}\n\n\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 54, "score": 20.967478703069155 }, { "content": "use crate::character::{Character, HealthStatus};\n\nuse crate::dice::SkillDice;\n\nuse crate::item::{BlockedDamages, RawDamages};\n\nuse colored::*;\n\n\n\npub mod turn;\n\nuse crate::fight::turn::{Turn, TurnStep};\n\nuse std::collections::HashMap;\n\n\n\npub struct Fight {\n\n winner_name: Option<String>,\n\n round: u32,\n\n attack_counting: HashMap<String, u32>,\n\n opponents: (Character, Character),\n\n winner: Option<Character>,\n\n loser: Option<Character>,\n\n}\n\n\n\n#[allow(unused)]\n\nimpl Fight {\n", "file_path": "examples/rpg_engine/src/fight/mod.rs", "rank": 55, "score": 20.237343308900936 }, { "content": "use crate::character::{Character, HealthStatus};\n\nuse crate::dice::SkillDice;\n\nuse crate::item::{BlockedDamages, RawDamages};\n\nuse colored::*;\n\n\n\npub struct Turn {\n\n pub attacker: Character,\n\n pub defender: Character,\n\n pub number: u32,\n\n}\n\n\n\nimpl Turn {\n\n pub fn new(number: u32, attacker: Character, defender: Character) -> Self {\n\n let turn = Turn {\n\n attacker,\n\n defender,\n\n number,\n\n };\n\n\n\n turn.display_results(TurnStep::Initiative);\n", "file_path": "examples/rpg_engine/src/fight/turn.rs", "rank": 56, "score": 19.664503134141345 }, { "content": "impl Item for BodyArmor {\n\n fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n fn set_name(mut self, name: &str) -> Self {\n\n self.name = name.to_string();\n\n self\n\n }\n\n}\n\n\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 57, "score": 18.637626289364434 }, { "content": "\n\n fn can_block_if_possible(&self) -> Option<BlockedDamages> {\n\n //We could have skills here to help us to calculate\n\n Some(self.armor_rating)\n\n }\n\n\n\n fn set_handheld_type(mut self, handheld: HandheldType) -> Self {\n\n self.handheld = handheld;\n\n self\n\n }\n\n\n\n fn handheld_type(&self) -> &HandheldType {\n\n &self.handheld\n\n }\n\n}\n\n\n\nimpl Shield {\n\n pub fn new(name: &str, armor: f32, bash_damages: f32) -> Self {\n\n Self::default()\n\n .set_name(name)\n\n .set_armor_rating(armor)\n\n .set_handheld_type(HandheldType::OnlyLeft)\n\n .set_damages(bash_damages)\n\n }\n\n}\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 58, "score": 18.56351402560583 }, { "content": "}\n\n\n\n// NB: I could have made multiple trait instead of enum as well.\n\n#[derive(PartialEq)]\n\npub enum HandheldType {\n\n SingleHand,\n\n OnlyLeft,\n\n TwoHands,\n\n}\n\n\n\n/// NB : We could define many different of weapon ( like enchanted weapons for example and or melee weapons )\n\npub struct RegularWeapon {\n\n name: String,\n\n handheld: HandheldType,\n\n damages: RawDamages,\n\n}\n\n\n\nimpl RegularWeapon {\n\n pub fn new(name: &str, damages: f32, handheld: HandheldType) -> Self {\n\n RegularWeapon::default()\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 59, "score": 17.814510171752023 }, { "content": " pub fn roll_initiative_dice(&self) -> (String, String) {\n\n if self.opponents.1.roll_dice(SkillDice::Initiative)\n\n >= self.opponents.0.roll_dice(SkillDice::Initiative)\n\n {\n\n (\n\n self.opponents.1.name().to_string(),\n\n self.opponents.0.name().to_string(),\n\n )\n\n } else {\n\n (\n\n self.opponents.0.name().to_string(),\n\n self.opponents.1.name().to_string(),\n\n )\n\n }\n\n }\n\n\n\n pub fn resolve(mut self) -> Character {\n\n while self.winner_name.is_none() {\n\n self.round += 1;\n\n\n", "file_path": "examples/rpg_engine/src/fight/mod.rs", "rank": 60, "score": 17.386338580661487 }, { "content": "impl Item for Shield {\n\n fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n fn set_name(mut self, name: &str) -> Self {\n\n self.name = name.to_string();\n\n self\n\n }\n\n}\n\n\n\nimpl Weapon for Shield {\n\n fn damages(&self) -> &RawDamages {\n\n &self.bash_damage\n\n }\n\n\n\n fn set_damages(mut self, amount: RawDamages) -> Self {\n\n self.bash_damage = amount;\n\n self\n\n }\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 61, "score": 17.244140478931374 }, { "content": " pub fn winner_name(&self) -> &Option<String> {\n\n &self.winner_name\n\n }\n\n pub fn round(&self) -> u32 {\n\n self.round\n\n }\n\n pub fn opponents(&self) -> &(Character, Character) {\n\n &self.opponents\n\n }\n\n\n\n pub fn new(first_fighter: Character, second_fighter: Character) -> Self {\n\n let mut attack_counting = HashMap::new();\n\n attack_counting.insert(first_fighter.name().to_string(), 0);\n\n attack_counting.insert(second_fighter.name().to_string(), 0);\n\n Fight {\n\n winner_name: None,\n\n round: 0,\n\n attack_counting,\n\n opponents: (first_fighter, second_fighter),\n\n winner: None,\n", "file_path": "examples/rpg_engine/src/fight/mod.rs", "rank": 62, "score": 16.89652917497325 }, { "content": " .set_name(name)\n\n .set_damages(damages)\n\n .set_handheld_type(handheld)\n\n }\n\n}\n\n\n\nimpl Default for RegularWeapon {\n\n fn default() -> Self {\n\n RegularWeapon {\n\n name: \"Hands\".to_string(),\n\n handheld: HandheldType::SingleHand,\n\n damages: 1.0,\n\n }\n\n }\n\n}\n\n\n\nimpl Item for RegularWeapon {\n\n fn name(&self) -> &str {\n\n &self.name\n\n }\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 63, "score": 16.53880573180039 }, { "content": "\n\n fn set_name(mut self, name: &str) -> Self {\n\n self.name = name.to_string();\n\n self\n\n }\n\n}\n\n\n\nimpl Weapon for RegularWeapon {\n\n fn damages(&self) -> &RawDamages {\n\n &self.damages\n\n }\n\n\n\n fn set_damages(mut self, amount: RawDamages) -> Self {\n\n self.damages = amount;\n\n self\n\n }\n\n\n\n fn set_handheld_type(mut self, handheld: HandheldType) -> Self {\n\n self.handheld = handheld;\n\n self\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 64, "score": 16.120544769193994 }, { "content": " armor_rating: 0.0,\n\n name: \"\".to_string(),\n\n hold: HandheldType::OnlyLeft,\n\n bash_damage: 0.0,\n\n handheld: HandheldType::OnlyLeft,\n\n }\n\n }\n\n}\n\n\n\nimpl Armor for Shield {\n\n fn set_armor_rating(mut self, reduction: f32) -> Self {\n\n self.armor_rating = reduction;\n\n self\n\n }\n\n\n\n fn armor_rating(&self) -> &f32 {\n\n &self.armor_rating\n\n }\n\n}\n\n\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 65, "score": 15.917870682409905 }, { "content": " let first = self.opponents.0.roll_dice(SkillDice::Initiative);\n\n let second = self.opponents.1.roll_dice(SkillDice::Initiative);\n\n\n\n if first >= second {\n\n (self.opponents.1.clone(), self.opponents.0.clone())\n\n } else {\n\n (self.opponents.0.clone(), self.opponents.1.clone())\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use super::*;\n\n use crate::prelude::*;\n\n\n\n fn get_test_player(name: &str) -> Character {\n\n Character::new(name, 1000.0).grab_weapon(RegularWeapon::default())\n\n }\n", "file_path": "examples/rpg_engine/src/fight/mod.rs", "rank": 66, "score": 15.298252171638932 }, { "content": " }\n\n\n\n fn handheld_type(&self) -> &HandheldType {\n\n &self.handheld\n\n }\n\n}\n\n\n\n#[allow(unused)]\n\n/// NB : Shield could be a trait instead of a struct as well.\n\npub struct Shield {\n\n armor_rating: f32,\n\n name: String,\n\n hold: HandheldType,\n\n bash_damage: RawDamages,\n\n handheld: HandheldType,\n\n}\n\n\n\nimpl Default for Shield {\n\n fn default() -> Self {\n\n Shield {\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 67, "score": 15.012652669035273 }, { "content": "\n\nimpl SkillDice {\n\n pub fn dices_roll_result(&self, actor: &str) -> u8 {\n\n let res = match self {\n\n SkillDice::Initiative => roll_dice(actor, \"2d6\", \"initiative\"),\n\n SkillDice::Blocking => roll_dice(actor, \"1d6\", \"block\"),\n\n SkillDice::Attack => roll_dice(actor, \"1d10\", \"attack\"),\n\n SkillDice::Dodge => roll_dice(actor, \"1d10\", \"dodge\"),\n\n };\n\n\n\n res as u8\n\n }\n\n}\n", "file_path": "examples/rpg_engine/src/dice.rs", "rank": 68, "score": 14.318686665094566 }, { "content": " loser: None,\n\n }\n\n }\n\n\n\n fn update_attacks_counter(&mut self, attack_name: &str) {\n\n if let Some(number_attacks) = self.attack_counting.get_mut(attack_name) {\n\n *number_attacks += 1;\n\n }\n\n }\n\n\n\n /// SO ugly code, please use `resolve()` instead.\n\n #[deprecated]\n\n pub fn start(&mut self) {\n\n let mut attack_counting: HashMap<String, u32> = HashMap::new();\n\n\n\n attack_counting.insert(self.opponents.1.name().to_string(), 0);\n\n attack_counting.insert(self.opponents.0.name().to_string(), 0);\n\n\n\n while self.winner_name.is_none() {\n\n self.round += 1;\n", "file_path": "examples/rpg_engine/src/fight/mod.rs", "rank": 69, "score": 14.119533783997397 }, { "content": "#### Examples\n\n\n\n\n\n**a.** Mutability & Immutability\n\n\n\n```rust\n\n\n\nstruct Company {\n\n pub name: String,\n\n /// In Billion\n\n pub value: u32,\n\n}\n\n\n\nfn main() {\n\n let mut facebook = Company {\n\n name: \"FaceMash\".to_string(),\n\n value: 0,\n\n };\n\n \n\n // <- if we do not use mut here, compiler will say NO\n\n facebook.name = \"Facebook\".to_string();\n\n facebook.value = 900;\n\n\n\n println!(\"{} new name is cool \", facebook.name);\n\n}\n\n```\n\n\n\n**b.** Borrowing data to read it.\n\n\n\n```rust\n\n\n\n\n\nfn display_data(company: &Company) {\n\n println!(\"Name : {} \", company.name);\n\n println!(\"Market Cap : {} \", company.name);\n\n println!(\"Rating {}\", rate_business(&company.value))\n\n}\n\n\n\nfn main() {\n\n // ---- Rest of the code\n\n // 2 - Reference with `&` to read data\n\n // <- if we do not use & here, compiler will say NO because function asks for it.\n\n display_data(&facebook);\n\n}\n\n\n\n```\n\n**c.** Owning an object to move it ( to consume it ) to do stuff with it. \n\n\n\n```rust\n\n\n\n\n\nfn rebuild_business(_: Company) -> Company {\n\n Company {\n\n name: \"Facebook 2.0\".to_string(),\n\n value: 850,\n\n }\n\n}\n\n\n\n\n\nfn main() {\n\n \n\n // ---- Rest of the code\n\n println!(\"{} is super old, we need rebranding \", facebook.name);\n\n\n\n\n\n facebook.name = \"Meta\".to_string();\n\n println!(\"{} is an awesome name\", facebook.name);\n\n\n\n // <-- we move `facebook` inside the scope of the `rebuild_business` function so we cannot access it anymore .\n\n let mut new_facebook = rebuild_business(facebook);\n\n\n\n // println!(\"{} is still alive ?\", facebook.name); <-- get moved error value\n\n display_data(&new_facebook);\n\n \n\n}\n\n\n\n\n\n```\n\n\n\n\n\n**d.** Compiler check for everything for you\n\n\n\nLittle surprise with mutable references 😁\n\n\n\n```rust\n\n\n\nfn update_name(company: &mut Company, new_name: &str) {\n\n company.name = new_name.to_string();\n\n}\n\n\n\n\n\nfn main (){\n\n\n\n // --- Rest of the code ---\n", "file_path": "README.md", "rank": 70, "score": 14.046455328103516 }, { "content": "//! Using https://doc.rust-lang.org/book/ch17-02-trait-objects.html\n\n\n\npub type BlockedDamages = f32;\n\npub type RawDamages = f32;\n\npub type ArmorRating = f32;\n\n\n\n//\n", "file_path": "examples/rpg_engine/src/item.rs", "rank": 71, "score": 12.534888898357934 }, { "content": "mod character;\n\nmod dice;\n\nmod fight;\n\nmod item;\n\nmod stuff;\n\n\n\npub mod prelude {\n\n pub use crate::{character::*, dice::*, fight::*, item::*, stuff::*};\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n let result = 2 + 2;\n\n assert_eq!(result, 4);\n\n }\n\n}\n", "file_path": "examples/rpg_engine/src/lib.rs", "rank": 72, "score": 11.379415293550467 }, { "content": " second_fight_winner.name().bold(),\n\n );\n\n\n\n println!(\n\n \"{} has {} HP\",\n\n first_fight_winner.name().bold(),\n\n first_fight_winner.health().to_string().green().bold(),\n\n );\n\n println!(\n\n \"{} has {} HP\",\n\n second_fight_winner.name().bold(),\n\n second_fight_winner.health().to_string().green().bold(),\n\n );\n\n println!();\n\n\n\n // Use the stuff bellow to make it work sequential\n\n //first_fight.join().unwrap();\n\n //second_fight.join().unwrap();\n\n let final_winner = Fight::new(first_fight_winner, second_fight_winner).resolve();\n\n\n\n println!(\n\n \"The best fighter is : {}\",\n\n final_winner.name().yellow().bold()\n\n );\n\n\n\n #[cfg(feature = \"song\")]\n\n add_song();\n\n}\n", "file_path": "examples/rpg_engine/examples/multiple_fights/src/main.rs", "rank": 73, "score": 10.538656947484998 }, { "content": "#Todo\n\n\n\n- [ ] Clean up deprecated methods\n\n- [ ] Add proper documentation\n\n- [ ] Refactor with unit testing\n\n- [ ] Display weapons name during blocking and attacks ( need to refactor stuff and character)\n\n\n", "file_path": "examples/rpg_engine/README.md", "rank": 74, "score": 10.407641373020102 }, { "content": " self.display_results(TurnStep::ResultBlocking {\n\n succeed: false,\n\n blocked_damage: damage,\n\n });\n\n None\n\n }\n\n }\n\n\n\n pub fn resolve_damages(&mut self, hit_damage: RawDamages) {\n\n let result = self.defender.gets_hit(hit_damage);\n\n self.display_results(TurnStep::DamagesTaken { damages: result })\n\n }\n\n\n\n pub fn resolve_winner_and_loser(&self) -> Option<(Character, Character)> {\n\n if self.defender.get_health_status() == HealthStatus::Dead {\n\n self.display_results(TurnStep::EndFight);\n\n Some((self.attacker.clone(), self.defender.clone()))\n\n } else {\n\n None\n\n }\n", "file_path": "examples/rpg_engine/src/fight/turn.rs", "rank": 75, "score": 10.033820597516613 }, { "content": "use colored::*;\n\nuse rpg_engine::prelude::*;\n\nuse std::sync::mpsc;\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\n#[cfg(feature = \"song\")]\n", "file_path": "examples/rpg_engine/examples/multiple_fights/src/main.rs", "rank": 76, "score": 9.431958741895507 }, { "content": " let white_run_guard = Character::new(\"Olaf the dummy guard\", 300.00)\n\n .grab_weapon(steel_shield) // <- we can do it because of generic + trait objects for weapon\n\n .grab_weapon(iron_long_sword)\n\n .grab_armor(daedric_armor_2);\n\n\n\n let lydia = Character::new(\"Lydia\", 300.0)\n\n .grab_weapon(daedric_battle_axe)\n\n .grab_armor(iron_plate);\n\n\n\n let braith = Character::new(\"Braith\", 100.0).grab_weapon(RegularWeapon::default());\n\n\n\n let dovahkiin = Character::new(\"Dovahkiin\", 1500.0)\n\n .grab_weapon(steel_long_sword)\n\n .grab_weapon(iron_shield);\n\n\n\n let (tx_1, rx_1) = mpsc::channel();\n\n\n\n // This is OS native Thread\n\n let _ = thread::spawn(move || {\n\n let winner = Fight::new(white_run_guard, grand_ma_skyrim).resolve();\n", "file_path": "examples/rpg_engine/examples/multiple_fights/src/main.rs", "rank": 77, "score": 9.218139295485066 }, { "content": " }\n\n\n\n /// Should use string template make unit test.\n\n /// Displays specific message for a particular step during the turn.\n\n fn display_results(&self, turn_step: TurnStep) {\n\n let attacker_name = &self.attacker.name();\n\n let defender_name = &self.defender.name();\n\n match turn_step {\n\n TurnStep::Initiative => {\n\n println!(\n\n \"{} will attack and {} will defend\",\n\n attacker_name.bold(),\n\n defender_name.bold()\n\n );\n\n }\n\n TurnStep::Attack => {\n\n println!(\n\n \"{} {} {} {} attack\",\n\n defender_name.bold(),\n\n \"failed\".red(),\n", "file_path": "examples/rpg_engine/src/fight/turn.rs", "rank": 78, "score": 9.016343295075039 }, { "content": " turn\n\n }\n\n\n\n pub fn resolve_attack_defense(&self) -> Option<(u8, RawDamages)> {\n\n let attack = self.attacker.roll_dice(SkillDice::Attack);\n\n let dodge = self.defender.roll_dice(SkillDice::Dodge);\n\n\n\n if dodge > attack {\n\n self.display_results(TurnStep::Dodge);\n\n None\n\n } else {\n\n self.display_results(TurnStep::Attack);\n\n Some((attack, self.attacker.deal_damages()))\n\n }\n\n }\n\n\n\n /// Roll dice and display message for success or fail .\n\n /// And return update damages.\n\n pub fn resolve_blocking(\n\n &mut self,\n", "file_path": "examples/rpg_engine/src/fight/turn.rs", "rank": 79, "score": 8.94926610779768 }, { "content": " if def.is_some() {\n\n println!(\"{} tries to block {}\", defender.name(), attacker.name());\n\n }\n\n defender.get_attacked_by(damage, attack, def);\n\n }\n\n\n\n if defender.get_health_status() == HealthStatus::Dead {\n\n self.winner_name = Some(attacker.name().to_string());\n\n\n\n println!(\"{} is dead :((((((((\", defender.name());\n\n println!(\n\n \"{} won the fight and has {} hp left <3 !!!!!!!!!!!!!!!\",\n\n attacker.name(),\n\n attacker.health()\n\n );\n\n\n\n println!(\" -------------- Game statistics -------------- \");\n\n\n\n println!(\"Fight finished after {} rounds\", self.round);\n\n\n", "file_path": "examples/rpg_engine/src/fight/mod.rs", "rank": 80, "score": 8.7353747527976 }, { "content": " .grab_armor(daedric_armor);\n\n\n\n let white_run_guard = Character::new(\"Olaf the dummy guard\", 300.00)\n\n .grab_weapon(steel_shield) // <- we can do it because of generic + trait objects for weapon\n\n .grab_weapon(iron_long_sword)\n\n .grab_armor(daedric_armor_2);\n\n\n\n Fight::new(white_run_guard, grand_ma_skyrim).start();\n\n}\n\n\n\n//https://en.uesp.net/wiki/Skyrim:Block#Defensive_Blocking\n", "file_path": "examples/rpg_engine/examples/simple_fight/src/main.rs", "rank": 81, "score": 8.40862572041224 }, { "content": " println!(\n\n \" {} has attacked {} times \",\n\n loser_name.bold(),\n\n loser_nb_attacks.to_string().underline()\n\n );\n\n println!(\n\n \" {} has attacked {} times \",\n\n winner_name.bold(),\n\n winner_nb_attacks.to_string().underline()\n\n );\n\n println!()\n\n }\n\n _ => {\n\n panic!(\"Cannot display statistics because winner and loser are not resolved\")\n\n }\n\n }\n\n }\n\n\n\n /// Consume the tuple to return a new one with attacker and then defender.\n\n fn resolve_initiative(&self) -> (Character, Character) {\n", "file_path": "examples/rpg_engine/src/fight/mod.rs", "rank": 82, "score": 8.399227831661705 }, { "content": " \"{} {} the fight and has {} hp left <3 !!!!!!!!!!!!!!!\",\n\n &self.attacker.name().bold(),\n\n \"won\".yellow().bold(),\n\n &self.attacker.health().to_string().green().bold()\n\n );\n\n }\n\n }\n\n }\n\n}\n\n\n\npub enum TurnStep {\n\n Initiative,\n\n Attack,\n\n Dodge,\n\n TryBlocking,\n\n ResultBlocking {\n\n blocked_damage: BlockedDamages,\n\n succeed: bool,\n\n },\n\n\n\n DamagesTaken {\n\n damages: BlockedDamages,\n\n },\n\n EndFight,\n\n}\n", "file_path": "examples/rpg_engine/src/fight/turn.rs", "rank": 83, "score": 8.267331515020086 }, { "content": "use caith::Roller;\n\nuse colored::*;\n\n\n", "file_path": "examples/rpg_engine/src/dice.rs", "rank": 84, "score": 7.275035964842725 }, { "content": " turn.resolve_damages(hit_damage);\n\n\n\n if let Some((winner, loser)) = turn.resolve_winner_and_loser() {\n\n self.winner_name = Some(winner.name().to_string());\n\n self.winner = Some(winner);\n\n self.loser = Some(loser);\n\n self.show_statistics()\n\n }\n\n }\n\n // We cloned them in the first place so we need to have then.\n\n self.update_opponents(turn.attacker, turn.defender);\n\n }\n\n\n\n self.winner.unwrap()\n\n }\n\n\n\n /// Update the stored value with new ones.\n\n fn update_opponents(&mut self, fighter_1: Character, fighter_2: Character) {\n\n self.opponents = (fighter_1, fighter_2);\n\n }\n", "file_path": "examples/rpg_engine/src/fight/mod.rs", "rank": 85, "score": 7.017003959104473 }, { "content": "#### Resume\n\n\n\nPros :\n\n- Rules to write and read are easy \n\n- No extra syntax for these rules ( like Malloc or delete)\n\n- Everything is actually automated ( not need to delete object or do memory stuff on basic levels)\n\n- The Compiler checks everything for you\n\n- No need to think about technical detail so you can focus on business logic.\n\n\n\nCons : \n\n- Redefine the way you write code because you need to turn upside down your brain\n\n- You will hate the compiler\n\n- Get addicted to the safety\n\n\n\n\n\nIs memory safety solved?\n\n\n\n[There are bugs](https://github.com/Artisan-Lab/Rust-memory-safety-bugs)\n\nYou can do manual [memory management](https://stackoverflow.com/questions/48485454/rust-manual-memory-management) if you want in Unsafe mode\n\n\n\n![Rust laughing at you](assets/foo.png)\n\n\n\n---\n\n\n\n### Traits\n\n \n\n[Inheritance vs trait compositions](https://en.wikipedia.org/wiki/Composition_over_inheritance#)\n\n\n\n- Useful to have for objects that share specific behaviors\n\n\n\n- Flexible and allow having many types handled together\n\n\n\n- Can be understood as `interfaces` in some other languages\n\n\n\n##### Examples\n\n\n\nGot the idea to simulate the weapons system from Skyrim and how damages are dealt based on the stuff\n\n\n\nhttps://en.uesp.net/wiki/Skyrim:Block#Defensive_Blocking\n\n\n\n####### 1 - How to define weapons and armor\n\n\n\n####### 2 - How to define a character\n\n\n\n####### 3 - How to define a *fight*\n\n\n\n\n\nAs you will see , I took some freedom from their documentation for the calculation.\n\n\n\nNB :\n\n- Mainly kept the rules for blocking for dual_wielding, two hands weapons, shield + single weapon and one single weapon only.\n\n\n\n- Coding with the flow, no much thinking because I had tons of fun\n\n\n", "file_path": "README.md", "rank": 86, "score": 6.502073286311116 }, { "content": " fn get_long_iron_sword() -> RegularWeapon {\n\n RegularWeapon::new(\"Long Iron Sword\", 25.0, HandheldType::SingleHand)\n\n }\n\n fn get_long_steel_sword() -> RegularWeapon {\n\n RegularWeapon::new(\"Long Steel Sword\", 30.0, HandheldType::SingleHand)\n\n }\n\n fn get_steel_battle_axe() -> RegularWeapon {\n\n RegularWeapon::new(\"Steal battle Axe\", 65.0, HandheldType::TwoHands)\n\n }\n\n\n\n fn get_iron_shield() -> Shield {\n\n Shield::new(\"Iron Shield\", 25.0, 5.0)\n\n }\n\n\n\n fn get_steel_shield() -> Shield {\n\n Shield::new(\"Steel Shield\", 35.0, 7.0)\n\n }\n\n\n\n fn get_daedric_mail() -> BodyArmor {\n\n BodyArmor::new(\"Daedric Shield\", 45.0)\n\n }\n\n\n\n #[test]\n\n fn start() {\n\n let winner = Fight::new(get_test_player(\"player 1\"), get_test_player(\"player 2\")).resolve();\n\n assert_eq!(winner.name(), \"player 1\");\n\n }\n\n}\n", "file_path": "examples/rpg_engine/src/fight/mod.rs", "rank": 87, "score": 6.428571246907883 }, { "content": " let update = &mut new_facebook; // Can only make a single mutable reference.\n\n\n\n // display_data(&new_facebook); Cannot read while writing 😁\n\n // let update2 = &mut new_facebook; throw error here because we can only have mutable reference at the same time.\n\n update.name = \"Facebook 3.0\".to_string();\n\n println!(\"{} is an awesome name.\", new_facebook.name);\n\n\n\n let update2 = &mut new_facebook;\n\n update2.name = \"Facebook 4.0\".to_string();\n\n\n\n // <- Update name without taking ownership with function\n\n update_name(&mut new_facebook, \"Facebook 3000\");\n\n println!(\"{} is an awesome name.\", new_facebook.name);\n\n}\n\n\n\n```\n\n\n", "file_path": "README.md", "rank": 88, "score": 6.195187220963151 }, { "content": " println!(\"{} will defend \", defender_name);\n\n\n\n // Attack & Dodge dice roll.\n\n let attack = attacker.roll_dice(SkillDice::Attack);\n\n\n\n let dodge = defender.roll_dice(SkillDice::Dodge);\n\n\n\n if dodge > attack {\n\n println!(\n\n \"{} dodged {}'s attack successfully\",\n\n defender.name(),\n\n attacker.name()\n\n );\n\n } else {\n\n let damage = attacker.deal_damages();\n\n // Block dice roll.\n\n let def = defender\n\n .can_block()\n\n .map(|_| defender.roll_dice(SkillDice::Blocking));\n\n\n", "file_path": "examples/rpg_engine/src/fight/mod.rs", "rank": 89, "score": 6.115103233525856 }, { "content": "\n\n TurnStep::ResultBlocking {\n\n blocked_damage,\n\n succeed,\n\n } => {\n\n if succeed {\n\n println!(\n\n \"{} {} {} {} damages from {}\",\n\n defender_name.bold(),\n\n \"succeed\".green(),\n\n \"to block\".underline(),\n\n blocked_damage.to_string().red(),\n\n attacker_name.bold()\n\n );\n\n } else {\n\n println!(\n\n \"{} {} to block {} from {}\",\n\n defender_name.bold(),\n\n \"failed\".red(),\n\n blocked_damage.to_string().red(),\n", "file_path": "examples/rpg_engine/src/fight/turn.rs", "rank": 90, "score": 6.074534202815084 }, { "content": "##### Resume\n\n\n\nPros: \n\n- We can make many object with different types and handle them as long as they have the behavior we need.\n\n- Can make default behavior\n\n- We can store objects without specific type as long as their size is safe = we know their size at compile time\n\n- Everything is always checked by our lovely compiler\n\n- Can back up stuff with unit-test\n\n\n\nCons:\n\n- Require some training to understand Generics and trait object \n\n- Need to use specific new pointer like Rc/Arc for advanced stuff\n\n- The compiler will be painful with you\n\n\n\n![Little Britain](assets/little_britain.jpg)\n\n\n\n---\n\n\n\n### Conditional compilation\n\n\n\nWell, you can decide what to compile or not 😁\n\n\n\n\n\n##### Example\n\n\n\n\n\n- Running tests\n\n\n\n```rust\n\n#[cfg(test)]\n\nmod test {\n\n \n\n // --- unit test \n\n}\n\n\n\n```\n\n\n\n`cargo test`\n\n\n\nThe command will compile unit test and run them.\n\n\n\n\n\n- Having specific features\n\n\n\nIn cargo.toml\n\n\n\n```toml\n\n[features]\n\nsong=[]\n\n```\n\n\n\nNow in your code\n\n\n\n```rust\n\n#[cfg(feature = \"song\")]\n\nfn add_song() {\n\n println!(\"Here is the song of the Dovakin\")\n\n}\n\n```\n\n\n\n\n\nWe add the function in main as well 😁\n\n\n\n```rust\n\nfn main() {\n\n // ------ Rest of the code\n\n #[cfg(feature = \"song\")]\n\n add_song();\n\n}\n\n```\n\n\n\n\n\nHow to run :\n\n\n\n\n\n`cargo run --features song`\n\n\n\n##### Resume\n\n\n\nPros:\n\n- You decide what you want to compile\n\n- Super useful to reduce the size of your package and use only what you need\n\n\n\nCons:\n\n- Use strings, so little helping from the IDE ( just a bit still 😁)\n\n\n\nGood example to use for this is [web_sys](https://docs.rs/web-sys/0.3.56/web_sys/) library as bridge between Rust and the Web\n\n\n\n![feeling](assets/python_rust.jpg)\n\n\n\n---\n\n\n\n### Multithreading\n\n\n\n\n\n[Multiple concurrency models](https://rust-lang.github.io/async-book/01_getting_started/02_why_async.html)\n\n\n\n\n", "file_path": "README.md", "rank": 91, "score": 5.865647245274372 }, { "content": " println!(\"{} is an awesome name.\", facebook.name);\n\n\n\n // <-- we move `facebook` inside the scope of the `rebuild_business` function so we cannot access it anymore .\n\n let mut new_facebook = rebuild_business(facebook);\n\n\n\n // println!(\"{} is still alive ?\", facebook.name); <-- get moved error value\n\n display_data(&new_facebook);\n\n\n\n let update = &mut new_facebook; // Can only make a single mutable reference.\n\n // let update2 = &mut new_facebook; throw error here\n\n // display_data(&new_facebook); Cannot read while writing :D\n\n update.name = \"Facebook 3.0\".to_string();\n\n println!(\"{} is an awesome name.\", new_facebook.name);\n\n\n\n let update2 = &mut new_facebook;\n\n update2.name = \"Facebook 4.0\".to_string();\n\n\n\n update_name(&mut new_facebook, \"Facebook 3000\");\n\n\n\n println!(\"{} is an awesome name.\", new_facebook.name);\n\n}\n", "file_path": "examples/ownership/src/main.rs", "rank": 92, "score": 5.685136821234227 }, { "content": " attacker_name.bold(),\n\n );\n\n }\n\n }\n\n TurnStep::DamagesTaken { damages } => {\n\n println!(\n\n \"{} deals {} {} to {}\",\n\n attacker_name.bold(),\n\n damages.to_string().red(),\n\n \"damages\".red(),\n\n defender_name.bold()\n\n );\n\n }\n\n TurnStep::EndFight => {\n\n println!(\n\n \"{} is {} :((((((((\",\n\n &self.defender.name().bold(),\n\n \"dead\".red().bold()\n\n );\n\n println!(\n", "file_path": "examples/rpg_engine/src/fight/turn.rs", "rank": 93, "score": 5.625404358145826 }, { "content": "\n\n /// SHow simple statistics.\n\n fn show_statistics(&self) {\n\n println!(\" -------------- Game statistics -------------- \");\n\n\n\n println!(\"Fight finished after {} rounds\", self.round);\n\n\n\n match (&self.winner, &self.loser) {\n\n (Some(winner), Some(loser)) => {\n\n let loser_name = loser.name();\n\n let winner_name = winner.name();\n\n\n\n let loser_nb_attacks = self\n\n .attack_counting\n\n .get(loser_name)\n\n .expect(\"Should have gotten winner attack number\");\n\n let winner_nb_attacks = self\n\n .attack_counting\n\n .get(winner_name)\n\n .expect(\"Should have gotten loser attack number\");\n", "file_path": "examples/rpg_engine/src/fight/mod.rs", "rank": 94, "score": 5.382219718086269 }, { "content": "char const* info_compiler = \"INFO\" \":\" \"compiler[\" COMPILER_ID \"]\";\n", "file_path": "examples/cpp_allocation/cmake-build-debug/CMakeFiles/3.17.5/CompilerIdC/CMakeCCompilerId.c", "rank": 95, "score": 4.936975027519667 }, { "content": "char const* info_platform = \"INFO\" \":\" \"platform[\" PLATFORM_ID \"]\";\n", "file_path": "examples/cpp_allocation/cmake-build-debug/CMakeFiles/3.17.5/CompilerIdC/CMakeCCompilerId.c", "rank": 96, "score": 4.936975027519667 }, { "content": " tx_1.send(winner)\n\n .expect(\"Should have passed the resolved winner\");\n\n });\n\n\n\n let (tx_2, rx_2) = mpsc::channel();\n\n\n\n // This is OS native Thread\n\n let _ = thread::spawn(move || {\n\n let winner = Fight::new(lydia, dovahkiin).resolve();\n\n tx_2.send(winner)\n\n .expect(\"Should have passed the resolved winner\");\n\n });\n\n\n\n let second_fight_winner = rx_2.recv().expect(\"Should have receive the winner\");\n\n let first_fight_winner = rx_1.recv().expect(\"Should have receive the winner\");\n\n\n\n println!(\"----------------- Final Fight ----------------- \");\n\n println!(\n\n \"{} and {} will fight until only one survive\",\n\n first_fight_winner.name().bold(),\n", "file_path": "examples/rpg_engine/examples/multiple_fights/src/main.rs", "rank": 97, "score": 4.177714987534773 }, { "content": "##### Example\n\n\n\nLet's make 2 fights simultaneously and take the winner for the last one.\n\n\n\n\n\n```rust\n\n\n\n // ----- Rest of the Code\n\n\n\n let (tx_1, rx_1) = mpsc::channel();\n\n // This is OS native Thread\n\n let _ = thread::spawn(move || {\n\n let winner = Fight::new(white_run_guard, grand_ma_skyrim).resolve();\n\n tx_1.send(winner)\n\n .expect(\"Should have passed the resolved winner\");\n\n });\n\n\n\n let (tx_2, rx_2) = mpsc::channel();\n\n\n\n // This is OS native Thread\n\n let _ = thread::spawn(move || {\n\n let winner = Fight::new(lydia, dovakin).resolve();\n\n tx_2.send(winner)\n\n .expect(\"Should have passed the resolved winner\");\n\n });\n\n\n\n let second_fight_winner = rx_2.recv().expect(\"Should have receive the winner\");\n\n let first_fight_winner = rx_1.recv().expect(\"Should have receive the winner\");\n\n\n\n let final_winner = Fight::new(first_fight_winner, second_fight_winner).resolve();\n\n\n\n println!(\"The best fighter is : {}\", final_winner.name());\n\n\n\n```\n\n\n\n\n\nWorks like a charm. \n\nOf Course if my business logic sucks, then it won't work as expected, but that is not the compiler responsibility.\n\nBut unit-test are there for it 😁\n\n\n\n##### Resume\n\n\n\nPros:\n\n- Took me 5 minutes to make it work\n\n- No runtime bugs, if you do something wrong, the compiler will tell you 😁\n\n- You can make it simple and easy to understand\n\n\n\nCons:\n\n- The compiler complained about the Type for Stuff because I needed to add trait bounds to ensure the code was safe ( but the compiler told me again soo 😀)\n\n- Need to think the specific pointer you use such as Rc vs Arc ( but again the compiler helps with that )\n\n\n\n![c_rust_meme](assets/c_rust_meme.jpg)\n\n\n\n---\n\n\n", "file_path": "README.md", "rank": 98, "score": 4.00094022876374 }, { "content": " \"to dodge\".underline(),\n\n attacker_name.bold(),\n\n );\n\n }\n\n\n\n TurnStep::Dodge => {\n\n println!(\n\n \"{} dodged {} attack {}\",\n\n defender_name.bold(),\n\n attacker_name.bold(),\n\n \"successfully\".green()\n\n );\n\n }\n\n TurnStep::TryBlocking => {\n\n println!(\n\n \"{} tries to block {}\",\n\n defender_name.bold(),\n\n attacker_name.bold()\n\n );\n\n }\n", "file_path": "examples/rpg_engine/src/fight/turn.rs", "rank": 99, "score": 3.664106738120457 } ]
Rust
src/bin/roughenough-kms.rs
lachesis/roughenough
a5e29a47646cc57bdd8e3603818cc9bd46f81bfc
#[macro_use] extern crate log; use clap::{App, Arg}; use data_encoding::{Encoding, HEXLOWER_PERMISSIVE}; use log::LevelFilter; use simple_logger::SimpleLogger; #[allow(unused_imports)] use roughenough::kms::{EnvelopeEncryption, KmsProvider}; use roughenough::roughenough_version; const HEX: Encoding = HEXLOWER_PERMISSIVE; #[cfg(not(any(feature = "awskms", feature = "gcpkms")))] fn encrypt_seed(_: &str, _: &str) { unreachable!() } #[cfg(any(feature = "awskms", feature = "gcpkms"))] fn encrypt_seed(kms_key: &str, hex_seed: &str) { let kms_client = get_kms(kms_key); let plaintext_seed = HEX.decode(hex_seed.as_ref()).expect("Error decoding hex seed value"); if plaintext_seed.len() != 32 { panic!( "Seed must be 32 bytes long; provided seed is {}", plaintext_seed.len() ); } match EnvelopeEncryption::encrypt_seed(&kms_client, &plaintext_seed) { Ok(encrypted_blob) => { println!("kms_protection: \"{}\"", kms_key); println!("seed: {}", HEX.encode(&encrypted_blob)); } Err(e) => { error!("Error: {:?}", e); } } } #[cfg(not(any(feature = "awskms", feature = "gcpkms")))] fn decrypt_blob(_: &str, _: &str) { unreachable!() } #[cfg(any(feature = "awskms", feature = "gcpkms"))] fn decrypt_blob(kms_key: &str, hex_blob: &str) { let kms_client = get_kms(kms_key); let ciphertext = HEX.decode(hex_blob.as_ref()).expect("Error decoding hex blob value"); match EnvelopeEncryption::decrypt_seed(&kms_client, ciphertext.as_ref()) { Ok(plaintext) => { println!("{}", HEX.encode(&plaintext)); } Err(e) => { error!("Error: {:?}", e); } } } #[cfg(feature = "awskms")] fn get_kms(kms_key: &str) -> impl KmsProvider { use roughenough::kms::AwsKms; AwsKms::from_arn(kms_key).unwrap() } #[cfg(feature = "gcpkms")] fn get_kms(kms_key: &str) -> impl KmsProvider { use roughenough::kms::GcpKms; GcpKms::from_resource_id(kms_key).unwrap() } #[allow(unused_variables)] pub fn main() { SimpleLogger::new() .with_level(LevelFilter::Info) .init() .unwrap(); if !(cfg!(feature = "gcpkms") || cfg!(feature = "awskms")) { warn!("KMS support was not compiled into this build; nothing to do."); warn!("See the Roughenough documentation for information on KMS support."); warn!(" https://github.com/int08h/roughenough/blob/master/doc/OPTIONAL-FEATURES.md"); return; } let matches = App::new("roughenough-kms") .version(roughenough_version().as_ref()) .long_about("Encrypt and decrypt Roughenough long-term server seeds using a KMS") .arg( Arg::with_name("KEY_ID") .short("k") .long("kms-key") .takes_value(true) .required(true) .help("Identity of the KMS key to be used"), ) .arg( Arg::with_name("DECRYPT") .short("d") .long("decrypt") .takes_value(true) .required(false) .help("Previously encrypted blob to decrypt to plaintext"), ) .arg( Arg::with_name("SEED") .short("s") .long("seed") .takes_value(true) .required(false) .help("32 byte hex seed for the server's long-term identity"), ) .get_matches(); let kms_key = matches.value_of("KEY_ID").expect("Invalid KMS key id"); if matches.is_present("SEED") { let hex_seed = matches.value_of("SEED").expect("Invalid seed value"); encrypt_seed(kms_key, hex_seed); } else if matches.is_present("DECRYPT") { let hex_blob = matches.value_of("DECRYPT").expect("Invalid blob value"); decrypt_blob(kms_key, hex_blob); } else { error!("Neither seed encryption (-s) or blob decryption (-d) was specified."); error!("One of them is required."); } }
#[macro_use] extern crate log; use clap::{App, Arg}; use data_encoding::{Encoding, HEXLOWER_PERMISSIVE}; use log::LevelFilter; use simple_logger::SimpleLogger; #[allow(unused_imports)] use roughenough::kms::{EnvelopeEncryption, KmsProvider}; use roughenough::roughenough_version; const HEX: Encoding = HEXLOWER_PERMISSIVE; #[cfg(not(any(feature = "awskms", feature = "gcpkms")))] fn encrypt_seed(_: &str, _: &str) { unreachable!() } #[cfg(any(feature = "awskms", feature = "gcpkms"))] fn encrypt_seed(kms_key: &str, hex_seed: &str) { let kms_client = get_kms(kms_key); let plaintext_seed = HEX.decode(hex_seed.as_ref()).expect("Error decoding hex seed value"); if plaintext_seed.len() != 32 { panic!( "Seed must be 32 bytes long; provided seed is {}", plaintext_seed.len() ); } match EnvelopeEncryption::encrypt_seed(&kms_client, &plaintext_seed) { Ok(encrypted_blob) => { println!("kms_protection: \"{}\"", kms_key); println!("seed: {}", HEX.encode(&encrypted_blob)); } Err(e) => { error!("Error: {:?}", e); } } } #[cfg(not(any(feature = "awskms", feature = "gcpkms")))] fn decrypt_blob(_: &str, _: &str) { unreachable!() } #[cfg(any(feature = "awskms", feature = "gcpkms"))] fn decrypt_blob(kms_key: &str, hex_blob: &str) { let kms_client = get_kms(kms_key); let ciphertext = HEX.decode(hex_blob.as_ref()).expect("Error decoding hex blob value"); match EnvelopeEncryption::decrypt_seed(&kms_client, ciphertext.as_ref()) { Ok(plaintext) => { println!("{}", HEX.encode(&plaintext)); } Err(e) => { error!("Error: {:?}", e); } } } #[cfg(feature = "awskms")] fn get_kms(kms_key: &str) -> impl KmsProvider { use roughenough::kms::AwsKms; AwsKms::from_arn(kms_key).unwrap() } #[cfg(feature = "gcpkms")] fn get_kms(kms_key: &str) -> impl KmsProvider { use roughenough::kms::GcpKms; GcpKms::from_resource_id(kms_key).unwrap() } #[allow(unused_variables)] pub fn main() { SimpleLogger::new() .with_level(LevelFilter::Info) .init() .unwrap(); if !(cfg!(feature = "gcpkms") || cfg!(feature = "awskms")) { warn!("KMS support was not compiled into this build; nothing to do."); warn!("See the Roughenough documentation for information on KMS support."); warn!(" https://github.com/int08h/roughenough/blob/master/doc/OPTIONAL-FEATURES.md"); return; }
let kms_key = matches.value_of("KEY_ID").expect("Invalid KMS key id"); if matches.is_present("SEED") { let hex_seed = matches.value_of("SEED").expect("Invalid seed value"); encrypt_seed(kms_key, hex_seed); } else if matches.is_present("DECRYPT") { let hex_blob = matches.value_of("DECRYPT").expect("Invalid blob value"); decrypt_blob(kms_key, hex_blob); } else { error!("Neither seed encryption (-s) or blob decryption (-d) was specified."); error!("One of them is required."); } }
let matches = App::new("roughenough-kms") .version(roughenough_version().as_ref()) .long_about("Encrypt and decrypt Roughenough long-term server seeds using a KMS") .arg( Arg::with_name("KEY_ID") .short("k") .long("kms-key") .takes_value(true) .required(true) .help("Identity of the KMS key to be used"), ) .arg( Arg::with_name("DECRYPT") .short("d") .long("decrypt") .takes_value(true) .required(false) .help("Previously encrypted blob to decrypt to plaintext"), ) .arg( Arg::with_name("SEED") .short("s") .long("seed") .takes_value(true) .required(false) .help("32 byte hex seed for the server's long-term identity"), ) .get_matches();
assignment_statement
[ { "content": "/// Factory function to create a `ServerConfig` _trait object_ based on the value\n\n/// of the provided `arg`.\n\n///\n\n/// * `ENV` will return an [`EnvironmentConfig`](struct.EnvironmentConfig.html)\n\n/// * any other value returns a [`FileConfig`](struct.FileConfig.html)\n\n///\n\npub fn make_config(arg: &str) -> Result<Box<dyn ServerConfig>, Error> {\n\n if arg == \"ENV\" {\n\n match EnvironmentConfig::new() {\n\n Ok(cfg) => Ok(Box::new(cfg)),\n\n Err(e) => Err(e),\n\n }\n\n } else {\n\n match FileConfig::new(arg) {\n\n Ok(cfg) => Ok(Box::new(cfg)),\n\n Err(e) => Err(e),\n\n }\n\n }\n\n}\n\n\n\n///\n\n/// Validate configuration settings. Returns `true` if the config is valid, `false` otherwise.\n\n///\n", "file_path": "src/config/mod.rs", "rank": 0, "score": 120468.40105180875 }, { "content": "/// Roughenough version string enriched with any compile-time optional features\n\npub fn roughenough_version() -> String {\n\n let kms_str = if cfg!(feature = \"awskms\") {\n\n \" (+AWS KMS)\"\n\n } else if cfg!(feature = \"gcpkms\") {\n\n \" (+GCP KMS)\"\n\n } else {\n\n \"\"\n\n };\n\n\n\n format!(\"{}{}\", VERSION, kms_str)\n\n}\n\n\n\n// Constants and magic numbers of the Roughtime protocol\n\n\n\n/// Minimum size (in bytes) of a client request\n\npub const MIN_REQUEST_LENGTH: u32 = 1024;\n\n\n\n/// Size (in bytes) of seeds used to derive private keys\n\npub const SEED_LENGTH: u32 = 32;\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 105556.69495275454 }, { "content": "#[cfg(not(any(feature = \"awskms\", feature = \"gcpkms\")))]\n\npub fn load_seed(config: &dyn ServerConfig) -> Result<Vec<u8>, error::Error> {\n\n match config.kms_protection() {\n\n KmsProtection::Plaintext => Ok(config.seed()),\n\n v => Err(error::Error::InvalidConfiguration(format!(\n\n \"kms_protection '{}' requires KMS, but server was not compiled with KMS support\",\n\n v\n\n ))),\n\n }\n\n}\n", "file_path": "src/kms/mod.rs", "rank": 2, "score": 73804.99768809955 }, { "content": "/// Guess which protocol the request is using and extract the client's nonce from the request\n\npub fn nonce_from_request(buf: &[u8], num_bytes: usize) -> Result<(Vec<u8>, Version), Error> {\n\n if num_bytes < MIN_REQUEST_LENGTH as usize {\n\n return Err(Error::RequestTooShort);\n\n }\n\n\n\n match guess_protocol_version(buf) {\n\n Version::Classic => nonce_from_classic_request(&buf[..num_bytes]),\n\n Version::Rfc => nonce_from_rfc_request(&buf[..num_bytes]),\n\n }\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 3, "score": 73046.71167608433 }, { "content": "///\n\n/// A key management system that wraps/unwraps a data encryption key (DEK).\n\n///\n\npub trait KmsProvider {\n\n /// Make a blocking request to encrypt (wrap) the provided plaintext data encryption key.\n\n fn encrypt_dek(&self, plaintext_dek: &PlaintextDEK) -> Result<EncryptedDEK, KmsError>;\n\n\n\n /// Make a blocking request to decrypt (unwrap) a previously encrypted data encryption key.\n\n fn decrypt_dek(&self, encrypted_dek: &EncryptedDEK) -> Result<PlaintextDEK, KmsError>;\n\n}\n\n\n\n#[cfg(feature = \"awskms\")]\n\nmod awskms;\n\n\n\n/// Load the seed value for the long-term key.\n\n///\n\n/// Loading behavior depends on the value of `config.kms_protection()`:\n\n///\n\n/// * If `config.kms_protection() == Plaintext` then the value returned from `config.seed()`\n\n/// is used as-is and assumed to be a 32-byte hexadecimal value.\n\n///\n\n/// * Otherwise `config.seed()` is assumed to be an encrypted opaque blob generated from\n\n/// a prior `EnvelopeEncryption::encrypt_seed` call. The value of `config.kms_protection()`\n\n/// is parsed as a KMS key id and `EnvelopeEncryption::decrypt_seed` is called to obtain\n\n/// the plaintext seed value.\n\n///\n", "file_path": "src/kms/mod.rs", "rank": 4, "score": 70164.18147173194 }, { "content": "#[allow(clippy::useless_let_if_seq)]\n\npub fn is_valid_config(cfg: &dyn ServerConfig) -> bool {\n\n let mut is_valid = true;\n\n\n\n if cfg.port() == 0 {\n\n error!(\"server port not set: {}\", cfg.port());\n\n is_valid = false;\n\n }\n\n\n\n if cfg.interface().is_empty() {\n\n error!(\"'interface' is missing\");\n\n is_valid = false;\n\n }\n\n\n\n if cfg.seed().is_empty() {\n\n error!(\"'seed' value is missing\");\n\n is_valid = false;\n\n } else if *cfg.kms_protection() == KmsProtection::Plaintext\n\n && cfg.seed().len() != SEED_LENGTH as usize\n\n {\n\n error!(\n", "file_path": "src/config/mod.rs", "rank": 5, "score": 64421.31983843311 }, { "content": "fn has_supported_version(msg: &RtMessage) -> bool {\n\n let expected_ver_bytes = Version::Rfc.wire_bytes();\n\n\n\n if let Some(tag_bytes) = msg.get_field(Tag::VER) {\n\n // Iterate the list of supplied versions, looking for a match\n\n for found_ver_bytes in tag_bytes.chunks(4) {\n\n if found_ver_bytes == expected_ver_bytes {\n\n return true;\n\n }\n\n }\n\n }\n\n\n\n false\n\n}\n", "file_path": "src/request.rs", "rank": 6, "score": 59062.79032629927 }, { "content": "fn create_nested_message(c: &mut Criterion) {\n\n let pad = [0u8; 400];\n\n\n\n c.bench_function(\"create nested message\", move |b| {\n\n b.iter(|| {\n\n let mut msg1 = RtMessage::with_capacity(4);\n\n msg1.add_field(Tag::SIG, \"0987\".as_bytes()).unwrap();\n\n msg1.add_field(Tag::NONC, \"wxyz\".as_bytes()).unwrap();\n\n msg1.add_field(Tag::DELE, \"1234\".as_bytes()).unwrap();\n\n msg1.add_field(Tag::PATH, \"abcd\".as_bytes()).unwrap();\n\n\n\n let mut msg2 = RtMessage::with_capacity(2);\n\n msg2.add_field(Tag::PUBK, \"1234567890\".as_bytes()).unwrap();\n\n msg2.add_field(Tag::PAD_CLASSIC, pad.as_ref()).unwrap();\n\n })\n\n });\n\n}\n\n\n\nstatic SIZES: &[u8] = &[1, 3, 9, 17, 200];\n\nstatic DATA: &[u8] = &[1u8; 64];\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 7, "score": 58474.447568798125 }, { "content": "fn reuse_merkle_trees(c: &mut Criterion) {\n\n let mut tree = MerkleTree::new();\n\n\n\n c.bench_function_over_inputs(\n\n \"reuse existing merkle tree\",\n\n move |b, &size| {\n\n b.iter(|| {\n\n tree.reset();\n\n for _ in 0..*size {\n\n tree.push_leaf(DATA);\n\n }\n\n black_box(tree.compute_root());\n\n })\n\n },\n\n SIZES,\n\n );\n\n}\n\n\n\ncriterion_group!(\n\n message_creation,\n", "file_path": "benches/roughenough-bench.rs", "rank": 8, "score": 58474.447568798125 }, { "content": "fn create_empty_message(c: &mut Criterion) {\n\n c.bench_function(\"create empty message\", |b| {\n\n b.iter(|| RtMessage::with_capacity(0))\n\n });\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 9, "score": 58474.447568798125 }, { "content": "fn create_two_field_message(c: &mut Criterion) {\n\n c.bench_function(\"create two field message\", |b| {\n\n b.iter(|| {\n\n let mut msg = RtMessage::with_capacity(2);\n\n msg.add_field(Tag::NONC, \"1234\".as_bytes()).unwrap();\n\n msg.add_field(Tag::PAD_CLASSIC, \"abcd\".as_bytes()).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 10, "score": 56508.76199786539 }, { "content": "fn create_four_field_message(c: &mut Criterion) {\n\n c.bench_function(\"create four field message\", |b| {\n\n b.iter(|| {\n\n let mut msg = RtMessage::with_capacity(4);\n\n msg.add_field(Tag::SIG, \"0987\".as_bytes()).unwrap();\n\n msg.add_field(Tag::NONC, \"wxyz\".as_bytes()).unwrap();\n\n msg.add_field(Tag::DELE, \"1234\".as_bytes()).unwrap();\n\n msg.add_field(Tag::PATH, \"abcd\".as_bytes()).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 11, "score": 56508.76199786539 }, { "content": "fn create_single_field_message(c: &mut Criterion) {\n\n c.bench_function(\"create single field message\", |b| {\n\n b.iter(|| {\n\n let mut msg = RtMessage::with_capacity(1);\n\n msg.add_field(Tag::NONC, \"1234\".as_bytes()).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 12, "score": 56508.76199786539 }, { "content": "fn create_new_merkle_tree(c: &mut Criterion) {\n\n c.bench_function_over_inputs(\n\n \"create new merkle trees\",\n\n move |b, &size| {\n\n b.iter(|| {\n\n let mut tree = MerkleTree::new();\n\n for _ in 0..*size {\n\n tree.push_leaf(DATA);\n\n }\n\n black_box(tree.compute_root())\n\n })\n\n },\n\n SIZES,\n\n );\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 13, "score": 56508.76199786539 }, { "content": "///\n\n/// Specifies parameters needed to configure a Roughenough server.\n\n///\n\n/// Parameters labeled \"**Required**\" must always be provided and have no default value\n\n/// while those labeled \"**Optional**\" provide sane default values that can be overridden.\n\n///\n\n/// YAML Key | Environment Variable | Necessity | Description\n\n/// --- | --- | --- | ---\n\n/// `interface` | `ROUGHENOUGH_INTERFACE` | Required | IP address or interface name for listening to client requests\n\n/// `port` | `ROUGHENOUGH_PORT` | Required | UDP port to listen for requests\n\n/// `seed` | `ROUGHENOUGH_SEED` | Required | A 32-byte hexadecimal value used to generate the server's long-term key pair. **This is a secret value and must be un-guessable**, treat it with care. (If compiled with KMS support, length will vary)\n\n/// `batch_size` | `ROUGHENOUGH_BATCH_SIZE` | Optional | The maximum number of requests to process in one batch. All nonces in a batch are used to build a Merkle tree, the root of which is signed. Default is `64` requests per batch.\n\n/// `status_interval` | `ROUGHENOUGH_STATUS_INTERVAL` | Optional | Number of _seconds_ between each logged status update. Default is `600` seconds (10 minutes).\n\n/// `health_check_port` | `ROUGHENOUGH_HEALTH_CHECK_PORT` | Optional | If present, enable an HTTP health check responder on the provided port. **Use with caution**.\n\n/// `kms_protection` | `ROUGHENOUGH_KMS_PROTECTION` | Optional | If compiled with KMS support, the ID of the KMS key used to protect the long-term identity.\n\n/// `client_stats` | `ROUGHENOUGH_CLIENT_STATS` | Optional | A value of `on` or `yes` will enable tracking of per-client request statistics that will be output each time server status is logged. Default is `off` (disabled).\n\n/// `fault_percentage` | `ROUGHENOUGH_FAULT_PERCENTAGE` | Optional | Likelihood (as a percentage) that the server will intentionally return an invalid client response. An integer range from `0` (disabled, all responses valid) to `50` (50% of responses will be invalid). Default is `0` (disabled).\n\n///\n\n/// Implementations of this trait obtain a valid configuration from different back-end\n\n/// sources. See:\n\n/// * [FileConfig](struct.FileConfig.html) - configure via a YAML file\n\n/// * [EnvironmentConfig](struct.EnvironmentConfig.html) - configure via environment variables\n\n/// * [MemoryConfig](struct.MemoryConfig.html) - in-memory configuration for testing\n\n///\n\npub trait ServerConfig {\n\n /// [Required] IP address or interface name to listen for client requests\n\n fn interface(&self) -> &str;\n\n\n\n /// [Required] UDP port to listen for requests\n\n fn port(&self) -> u16;\n\n\n\n /// [Required] A 32-byte hexadecimal value used to generate the server's\n\n /// long-term key pair. **This is a secret value and must be un-guessable**,\n\n /// treat it with care.\n\n fn seed(&self) -> Vec<u8>;\n\n\n\n /// [Optional] The maximum number of requests to process in one batch. All\n\n /// nonces in a batch are used to build a Merkle tree, the root of which is signed.\n\n /// Defaults to [DEFAULT_BATCH_SIZE](constant.DEFAULT_BATCH_SIZE.html)\n\n fn batch_size(&self) -> u8;\n\n\n\n /// [Optional] Amount of time between each logged status update.\n\n /// Defaults to [DEFAULT_STATUS_INTERVAL](constant.DEFAULT_STATUS_INTERVAL.html)\n\n fn status_interval(&self) -> Duration;\n", "file_path": "src/config/mod.rs", "rank": 14, "score": 43101.65419070414 }, { "content": "///\n\n/// Implementations of this trait record client activity\n\n///\n\npub trait ServerStats {\n\n fn add_rfc_request(&mut self, addr: &IpAddr);\n\n\n\n fn add_classic_request(&mut self, addr: &IpAddr);\n\n\n\n fn add_invalid_request(&mut self, addr: &IpAddr);\n\n\n\n fn add_health_check(&mut self, addr: &IpAddr);\n\n\n\n fn add_rfc_response(&mut self, addr: &IpAddr, bytes_sent: usize);\n\n\n\n fn add_classic_response(&mut self, addr: &IpAddr, bytes_sent: usize);\n\n\n\n fn total_valid_requests(&self) -> u64;\n\n\n\n fn num_rfc_requests(&self) -> u64;\n\n\n\n fn num_classic_requests(&self) -> u64;\n\n\n\n fn total_invalid_requests(&self) -> u64;\n", "file_path": "src/stats/mod.rs", "rank": 15, "score": 43076.40420006202 }, { "content": "## Description \n\n\n\nThe server's long-term identity can be protected by encrypting it, storing the encrypted value\n\nin the configuration, and invoking a cloud key management system to temporarily decrypt \n\n(in memory) the long-term identity at server start-up. \n\n\n\nThis way the server's long-term identity is never stored in plaintext. Instead the encrypted \n\nlong-term identity \"blob\" is safe to store on disk, on Github, in a container, etc. Ability \n\nto access the unencrypted identity is controlled \"out of band\" by the KMS system.\n\n\n\n## How to enable KMS support\n\n\n\nKMS support must be compiled-in. To enable:\n\n\n\n```bash\n\n# Build with Google Cloud KMS support\n\n$ cargo build --release --features \"gcpkms\"\n\n\n\n# Build with AWS KMS support\n\n$ cargo build --release --features \"awskms\"\n\n```\n\n\n\n## Google or Amazon: choose one and one only\n\n\n\nSadly, due to incompatibilities with dependencies of the KMS libraries, only **one** \n\nKMS system can be enabled at a time. Attempting `--features \"awskms,gcpkms\"` will result\n\nin a build failure.\n\n\n\n## Using `roughtime-kms` to encrypt the long-term seed\n\n\n\nUse the command line tool `roughtime-kms` to encrypt the seed value for the \n\nserver's long-term identity. To do this you will need: \n\n\n\n 1. The long-term key seed value \n\n 2. Access credentials for your cloud of choice\n\n 3. An identifier for the KMS key to be used\n\n 4. Necessary permissions to perform symmetric encrypt/decrypt operations\n\n using the selected key\n\n\n\nFor Amazon the key identifier is an ARN in the form:\n\n```\n\narn:aws:kms:SOME_AWS_REGION:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\n\n```\n\n\n\nFor Google the key identifier is a resource ID in the form:\n\n```\n\nprojects/PROJECT_NAME/locations/GCP_LOCATION/keyRings/KEYRING_NAME/cryptoKeys/KEY_NAME\n\n```\n\n\n\n### AWS Example\n\n\n", "file_path": "doc/OPTIONAL-FEATURES.md", "rank": 16, "score": 39097.99916666816 }, { "content": "#### Credentials \n\n\n\n[Rusoto](https://rusoto.org/) is used by Roughenough to access AWS. If your system\n\nhas AWS credentials in the typical `~/.aws/credentials` then everything should \"just work\".\n\n\n\nOtherwise Rusoto supports alternative ways to provide AWS credentials. See \n\n[Rusoto's documentation](https://github.com/rusoto/rusoto/blob/master/AWS-CREDENTIALS.md) \n\nfor details.\n\n\n\n#### `roughenough-kms` Command line\n\n\n\n```bash\n\n# Provide AWS credentials as described in the Rusoto docs\n\n\n\n# Build roughenough with AWS KMS support\n\n$ cargo build --release --features \"awskms\"\n\n\n\n# Encrypt the seed value\n\n$ target/release/roughenough-kms \\\n\n -k arn:aws:kms:SOME_AWS_REGION:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab \\\n\n -s a0a31d76900080c3cdc42fe69de8dd0086d6b54de7814004befd0b9c4447757e\n\n \n\n# Output of above will be something like this\n\nkms_protection: \"arn:aws:kms:SOME_AWS_REGION:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"\n\nseed: b8000c000102020078d39e85c7386e9e2bed1f30fac6dd322db96b8aaac8974fc6c0e0f566f8f6c971012fca1e69fffffd947fe82a9e505baf580000007e307c06092a864886f70d010706a06f306d020100306806092a864886f70d010701301e060960864801650304012e3011040c55d16d891b3b2a1ae2587a9c020110803bcc74dd96336009087772b28ec908c40e4113b1ab9b98934bd3b4f3dd3c1e8cdc6da82a4321fd8378ad0e2e0507bf0c5ea0e28d447e5f8482533baa423b7af8459ae87736f381d87fe38c21a805fae1c25c43d59200f42cae0d07f741e787a04c0ad72774942dddf818be0767e4963fe5a810f734a0125c\n\n```\n\n\n", "file_path": "doc/OPTIONAL-FEATURES.md", "rank": 17, "score": 39097.80912678349 }, { "content": "# Optional Features\n\n\n\nThese features are **disabled by default** and must be explicitly enabled as \n\ndescribed below.\n\n\n\n* [HTTP Health Check responder](#http-health-check)\n\n* [Key Management System (KMS) support](#key-management-system-kms-support)\n\n\n\n# HTTP Health Check\n\n\n\n## Description \n\n\n\nIntended for use by load balancers or other control plane facilities to monitor \n\nthe state of Roughenough servers and remove unhealthy instances automatically. \n\n\n\nThe server unconditionally emits a response to *any TCP connection* to the health\n\ncheck port, then closes the connection:\n\n\n\n```http\n\nHTTP/1.1 200 OK\n\nContent-Length: 0\n\nConnection: Close\n\n\n\n```\n\n\n\nNo attempt is made to parse the request, the server *always* emits this response. \n\n\n\n## How to enable\n\n\n\nProvide a value for the `health_check_port` setting. This enables the HTTP \n\nhealth check responder on the configured port. \n\n\n\n```yaml\n\ninterface: 127.0.0.1\n\nport: 8686\n\nseed: f61075c988feb9cb700a4a6a3291bfbc9cab11b9c9eca8c802468eb38a43d7d3\n\nhealth_check_port: 8000\n\n```\n\n\n\n## DoS Warning\n\n\n\n**An unprotected health-check port can be used to DoS the server. Do NOT expose \n\nthe health check port to the internet!** \n\n\n\nTo accurately reflect the ability of a Roughenough server to respond to requests, \n\nthe health check socket is serviced in the same event loop executing the primary Roughtime \n\nprotocol. Abuse of the health-check port can denial-of-service the whole server.\n\n\n\nIf enabled, ensure the health check port is accessible only to the *intended load-balancer(s)\n\nand/or control plane components*.\n\n\n\n\n\n# Key Management System (KMS) Support\n\n\n", "file_path": "doc/OPTIONAL-FEATURES.md", "rank": 18, "score": 39088.73699598725 }, { "content": "#### Credentials\n\n\n\nOnly **Service Account credentials** (in `.json` format) are currently supported. OAuth, bearer tokens, \n\nGAE default credentials, and GCE default credentials are **not** supported (contributions to\n\nadd support are particularly welcome!).\n\n\n\nTo obtain Service Account credentials if you don't already have them:\n\n\n\n* Creating a new service account?\n\n 1. Create the account \n\n 2. Download the credentials when prompted\n\n \n\n* Existing service account?\n\n 1. Open the Cloud Console (https://console.cloud.google.com)\n\n 2. Navigate to `IAM -> Service accounts`\n\n 3. Locate the service account row, click on its \"Actions\" menu (the three dots on the right) \n\n 4. Choose `Create key` and `JSON` format\n\n 5. Download the credentials when prompted\n\n\n\nMake note of the full path where the credentials are saved, it's needed in the next step.\n\n\n\n#### `roughenough-kms` Command line\n\n\n\n```bash\n\n# Set environment variable pointing to downloaded Service Account credentials\n\n$ export GOOGLE_APPLICATION_CREDENTIALS=/path/to/creds.json \n\n\n\n# Build roughenough with Google KMS support\n\n$ cargo build --release --features \"gcpkms\"\n\n\n\n# Encrypt the seed value\n\n$ target/release/roughenough-kms \\\n\n -k projects/PROJECT_NAME/locations/GCP_LOCATION/keyRings/KEYRING_NAME/cryptoKeys/KEY_NAME \\\n\n -s a0a31d76900080c3cdc42fe69de8dd0086d6b54de7814004befd0b9c4447757e\n\n \n\n# Output of above will be something like this\n\nkms_protection: \"projects/PROJECT_NAME/locations/GCP_LOCATION/keyRings/KEYRING_NAME/cryptoKeys/KEY_NAME\"\n\nseed: 71000c000a2400c7f2553954873ef29aeb37384c25d7a937d389221207c3368657870129d601d084c8da1249008d6fd4640f815596788e97bb3ce02fd007bc25a1019ca51945c3b99283d3945baacd77b1b991f5f6f8848c549a5767f57c9c999e97fe6d28fdb17db1d63c2ea966d8236d20c71e8e9c757c5bab62472c65b48376bc8951700aceb22545fce58d77e7cc147f7134da7a2cca790b54f29e4798442cee6e0d34e57f80ce983f7e5928cceff2\n\n```\n\n\n", "file_path": "doc/OPTIONAL-FEATURES.md", "rank": 19, "score": 39085.2872324983 }, { "content": "#### Configuration\n\n\n\nCopy and paste the output `kms_protection` and `seed` values into a config or\n\nset the corresponding environment variables. The `roughenough-server` will detect that\n\nAWS KMS is being used and decrypt the seed automatically. For example:\n\n\n\n```yaml\n\ninterface: 127.0.0.1\n\nport: 8686\n\nkms_protection: \"arn:aws:kms:SOME_AWS_REGION:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"\n\nseed: b8000c000102020078d39e85c7386e9e2bed1f30fac6dd322db96b8aaac8974fc6c0e0f566f8f6c971012fca1e69fffffd947fe82a9e505baf580000007e307c06092a864886f70d010706a06f306d020100306806092a864886f70d010701301e060960864801650304012e3011040c55d16d891b3b2a1ae2587a9c020110803bcc74dd96336009087772b28ec908c40e4113b1ab9b98934bd3b4f3dd3c1e8cdc6da82a4321fd8378ad0e2e0507bf0c5ea0e28d447e5f8482533baa423b7af8459ae87736f381d87fe38c21a805fae1c25c43d59200f42cae0d07f741e787a04c0ad72774942dddf818be0767e4963fe5a810f734a0125c\n\n```\n\n\n\nor using environment based configuration:\n\n\n\n```bash\n\n$ export ROUGHENOUGH_INTERFACE=127.0.0.1\n\n$ export ROUGHENOUGH_PORT=8686\n\n$ export ROUGHENOUGH_KMS_PROTECTION=\"arn:aws:kms:SOME_AWS_REGION:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"\n\n$ export ROUGHENOUGH_SEED=b8000c000102020078d39e85c7386e9e2bed1f30fac6dd322db96b8aaac8974fc6c0e0f566f8f6c971012fca1e69fffffd947fe82a9e505baf580000007e307c06092a864886f70d010706a06f306d020100306806092a864886f70d010701301e060960864801650304012e3011040c55d16d891b3b2a1ae2587a9c020110803bcc74dd96336009087772b28ec908c40e4113b1ab9b98934bd3b4f3dd3c1e8cdc6da82a4321fd8378ad0e2e0507bf0c5ea0e28d447e5f8482533baa423b7af8459ae87736f381d87fe38c21a805fae1c25c43d59200f42cae0d07f741e787a04c0ad72774942dddf818be0767e4963fe5a810f734a0125c\n\n```\n\n\n\n### GCP Example\n\n\n", "file_path": "doc/OPTIONAL-FEATURES.md", "rank": 20, "score": 39085.030252563076 }, { "content": "#### Configuration\n\n\n\nCopy and paste the output `kms_protection` and `seed` values into a config or\n\nset the corresponding environment variables. `roughenough-server` will detect that\n\nGoogle KMS is being used and decrypt the seed automatically. For example:\n\n\n\n```yaml\n\ninterface: 127.0.0.1\n\nport: 8686\n\nkms_protection: \"projects/PROJECT_NAME/locations/GCP_LOCATION/keyRings/KEYRING_NAME/cryptoKeys/KEY_NAME\"\n\nseed: 71000c000a2400c7f2553954873ef29aeb37384c25d7a937d389221207c3368657870129d601d084c8da1249008d6fd4640f815596788e97bb3ce02fd007bc25a1019ca51945c3b99283d3945baacd77b1b991f5f6f8848c549a5767f57c9c999e97fe6d28fdb17db1d63c2ea966d8236d20c71e8e9c757c5bab62472c65b48376bc8951700aceb22545fce58d77e7cc147f7134da7a2cca790b54f29e4798442cee6e0d34e57f80ce983f7e5928cceff2\n\n```\n\n\n\nor using environment based configuration:\n\n\n\n```bash\n\n$ export ROUGHENOUGH_INTERFACE=127.0.0.1\n\n$ export ROUGHENOUGH_PORT=8686\n\n$ export ROUGHENOUGH_KMS_PROTECTION=\"projects/PROJECT_NAME/locations/GCP_LOCATION/keyRings/KEYRING_NAME/cryptoKeys/KEY_NAME\"\n\n$ export ROUGHENOUGH_SEED=71000c000a2400c7f2553954873ef29aeb37384c25d7a937d389221207c3368657870129d601d084c8da1249008d6fd4640f815596788e97bb3ce02fd007bc25a1019ca51945c3b99283d3945baacd77b1b991f5f6f8848c549a5767f57c9c999e97fe6d28fdb17db1d63c2ea966d8236d20c71e8e9c757c5bab62472c65b48376bc8951700aceb22545fce58d77e7cc147f7134da7a2cca790b54f29e4798442cee6e0d34e57f80ce983f7e5928cceff2\n\n```\n", "file_path": "doc/OPTIONAL-FEATURES.md", "rank": 21, "score": 39084.838538343916 }, { "content": "/// Inspect the message in `buf` and guess which Roughtime protocol it corresponds to.\n\nfn guess_protocol_version(buf: &[u8]) -> Version {\n\n if &buf[0..8] == RFC_REQUEST_FRAME_BYTES {\n\n Version::Rfc\n\n } else {\n\n Version::Classic\n\n }\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 22, "score": 34257.038781619616 }, { "content": "// Convenience function to create zero-filled Vec of given size\n\nfn vec_zero_filled(len: usize) -> Vec<u8> {\n\n (0..len).map(|_| 0).collect()\n\n}\n\n\n\n/// Envelope encryption of the long-term key seed value.\n\n///\n\n/// The seed is encrypted using AES-GCM-256 with:\n\n///\n\n/// * 32 byte (256 bit) random key\n\n/// * 12 byte (96 bit) random nonce\n\n/// * 16 byte (128 bit) authentication tag\n\n///\n\n/// Randomness obtained from\n\n/// [`ring::rand::SecureRandom`](https://briansmith.org/rustdoc/ring/rand/trait.SecureRandom.html).\n\n///\n\n/// The key used to encrypt the seed is wrapped (encrypted) using a\n\n/// [`KmsProvider`](trait.KmsProvider.html) implementation.\n\n///\n\npub struct EnvelopeEncryption;\n\n\n", "file_path": "src/kms/envelope.rs", "rank": 23, "score": 31720.957229524443 }, { "content": " use std::fmt;\n\n use std::fmt::Formatter;\n\n use std::str::FromStr;\n\n\n\n use bytes::Bytes;\n\n use futures::executor::block_on;\n\n use rusoto_core::Region;\n\n use rusoto_kms::{DecryptRequest, EncryptRequest, Kms, KmsClient};\n\n\n\n use crate::kms::{AD, DEK_LEN_BYTES, EncryptedDEK, KmsError, KmsProvider, PlaintextDEK};\n\n\n\n /// Amazon Web Services Key Management Service\n\n /// https://aws.amazon.com/kms/\n\n pub struct AwsKms {\n\n kms_client: KmsClient,\n\n key_id: String,\n\n }\n\n\n\n impl AwsKms {\n\n /// Create a new instance from the full ARN of a AWS KMS key. The ARN is expected\n", "file_path": "src/kms/awskms.rs", "rank": 24, "score": 31293.560368147304 }, { "content": "// Copyright 2017-2021 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n#[cfg(feature = \"awskms\")]\n\npub mod inner {\n\n extern crate bytes;\n\n\n\n use std::collections::HashMap;\n\n use std::default::Default;\n", "file_path": "src/kms/awskms.rs", "rank": 25, "score": 31291.695737295366 }, { "content": "\n\n match block_on(self.kms_client.encrypt(encrypt_req)) {\n\n Ok(result) => {\n\n if let Some(ciphertext) = result.ciphertext_blob {\n\n Ok(ciphertext.to_vec())\n\n } else {\n\n Err(KmsError::OperationFailed(\n\n \"no ciphertext despite successful response\".to_string(),\n\n ))\n\n }\n\n }\n\n Err(e) => Err(KmsError::OperationFailed(e.to_string())),\n\n }\n\n }\n\n\n\n fn decrypt_dek(&self, encrypted_dek: &EncryptedDEK) -> Result<PlaintextDEK, KmsError> {\n\n let mut decrypt_req: DecryptRequest = Default::default();\n\n decrypt_req.ciphertext_blob = Bytes::from(encrypted_dek.to_vec());\n\n\n\n let mut dec_context = HashMap::new();\n", "file_path": "src/kms/awskms.rs", "rank": 26, "score": 31290.602695005746 }, { "content": " Err(e) => Err(KmsError::OperationFailed(e.to_string())),\n\n }\n\n }\n\n }\n\n\n\n #[cfg(feature = \"awskms\")]\n\n impl fmt::Display for AwsKms {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.key_id)\n\n }\n\n }\n\n}\n", "file_path": "src/kms/awskms.rs", "rank": 27, "score": 31288.637531104483 }, { "content": " /// to be of the form `arn:aws:kms:some-aws-region:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab`\n\n pub fn from_arn(arn: &str) -> Result<Self, KmsError> {\n\n let parts: Vec<&str> = arn.split(':').collect();\n\n\n\n if parts.len() != 6 {\n\n return Err(KmsError::InvalidConfiguration(format!(\n\n \"invalid KMS arn: too few parts {}\",\n\n parts.len()\n\n )));\n\n }\n\n\n\n let region_part = parts.get(3).expect(\"region is missing\");\n\n let region = match Region::from_str(region_part) {\n\n Ok(r) => r,\n\n Err(e) => return Err(KmsError::InvalidConfiguration(e.to_string())),\n\n };\n\n\n\n Ok(AwsKms {\n\n kms_client: KmsClient::new(region),\n\n key_id: arn.to_string(),\n", "file_path": "src/kms/awskms.rs", "rank": 28, "score": 31286.366257586822 }, { "content": " })\n\n }\n\n }\n\n\n\n impl KmsProvider for AwsKms {\n\n fn encrypt_dek(&self, plaintext_dek: &PlaintextDEK) -> Result<EncryptedDEK, KmsError> {\n\n if plaintext_dek.len() != DEK_LEN_BYTES {\n\n return Err(KmsError::InvalidKey(format!(\n\n \"provided DEK wrong length: {}\",\n\n plaintext_dek.len()\n\n )));\n\n }\n\n\n\n let mut encrypt_req: EncryptRequest = Default::default();\n\n encrypt_req.key_id = self.key_id.clone();\n\n encrypt_req.plaintext = Bytes::from(plaintext_dek.to_vec());\n\n\n\n let mut enc_context = HashMap::new();\n\n enc_context.insert(\"AD\".to_string(), AD.to_string());\n\n encrypt_req.encryption_context = Some(enc_context);\n", "file_path": "src/kms/awskms.rs", "rank": 29, "score": 31285.087277019764 }, { "content": " dec_context.insert(\"AD\".to_string(), AD.to_string());\n\n decrypt_req.encryption_context = Some(dec_context);\n\n\n\n match block_on(self.kms_client.decrypt(decrypt_req)) {\n\n Ok(result) => {\n\n if let Some(plaintext_dek) = result.plaintext {\n\n if plaintext_dek.len() == DEK_LEN_BYTES {\n\n Ok(plaintext_dek.to_vec())\n\n } else {\n\n Err(KmsError::InvalidKey(format!(\n\n \"decrypted DEK wrong length: {}\",\n\n plaintext_dek.len()\n\n )))\n\n }\n\n } else {\n\n Err(KmsError::OperationFailed(\n\n \"decrypted payload is empty\".to_string(),\n\n ))\n\n }\n\n }\n", "file_path": "src/kms/awskms.rs", "rank": 30, "score": 31282.875625302047 }, { "content": "#[macro_use]\n\nextern crate criterion;\n\nextern crate roughenough;\n\n\n\nuse criterion::{black_box, Criterion};\n\n\n\nuse roughenough::merkle::{MerkleTree, root_from_paths};\n\nuse roughenough::RtMessage;\n\nuse roughenough::Tag;\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 31, "score": 30683.74755529729 }, { "content": " create_empty_message,\n\n create_single_field_message,\n\n create_two_field_message,\n\n create_four_field_message,\n\n create_nested_message\n\n);\n\n\n\ncriterion_group!(merkle_tree, create_new_merkle_tree, reuse_merkle_trees);\n\n\n\ncriterion_main!(message_creation, merkle_tree);\n", "file_path": "benches/roughenough-bench.rs", "rank": 32, "score": 30669.799644616527 }, { "content": "fn nonce_from_rfc_request(buf: &[u8]) -> Result<(Vec<u8>, Version), Error> {\n\n // first 8 bytes were RFC_REQUEST_FRAME_BYTES, [0..8]\n\n let mut cur = Cursor::new(&buf[8..12]);\n\n let reported_len = cur.read_u32::<LittleEndian>()?;\n\n let actual_len = (buf.len() - 12) as u32;\n\n\n\n if reported_len != actual_len {\n\n return Err(Error::LengthMismatch(reported_len, actual_len));\n\n }\n\n\n\n let msg = RtMessage::from_bytes(&buf[12..])?;\n\n\n\n if !has_supported_version(&msg) {\n\n return Err(Error::NoCompatibleVersion);\n\n }\n\n\n\n match msg.get_field(Tag::NONC) {\n\n Some(nonce) => Ok((nonce.to_vec(), Version::Rfc)),\n\n None => Err(Error::InvalidRequest),\n\n }\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 33, "score": 28601.618426542107 }, { "content": "fn nonce_from_classic_request(buf: &[u8]) -> Result<(Vec<u8>, Version), Error> {\n\n let msg = RtMessage::from_bytes(buf)?;\n\n match msg.get_field(Tag::NONC) {\n\n Some(nonce) => Ok((nonce.to_vec(), Version::Classic)),\n\n None => Err(Error::InvalidRequest),\n\n }\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 34, "score": 28601.618426542107 }, { "content": "# The public key of 'roughtime.int08h.com' is stored in a DNS TXT record \n\n$ host -t TXT roughtime.int08h.com\n\nroughtime.int08h.com descriptive text \"016e6e0284d24c37c6e4d7d8d5b4e1d3c1949ceaa545bf875616c9dce0c9bec1\"\n\n\n\n# Validate the server response using its public key\n\n$ target/release/roughenough-client -v roughtime.int08h.com 2002 -p 016e6e0284d24c37c6e4d7d8d5b4e1d3c1949ceaa545bf875616c9dce0c9bec1\n\nRequesting time from: \"roughtime.int08h.com\":2002\n\nReceived time from server: midpoint=\"Oct 26 2018 23:22:20\", radius=1000000, verified=Yes (merkle_index=0)\n\nOct 26 2018 23:22:20\n\n```\n\n\n\nThe **`verified=Yes`** in the output confirms that the server's response had a valid signature.\n\n\n\n### Server Configuration\n\n\n\nThere are two (mutually exclusive) ways to configure the Roughenough server: \n\n\n\n1. A YAML file, or\n\n2. Environment variables\n\n\n\nThe server accepts the following configuration parameters:\n\n\n\nYAML Key | Environment Variable | Necessity | Description\n\n--- | --- | --- | ---\n\n`interface` | `ROUGHENOUGH_INTERFACE` | Required | IP address or interface name for listening to client requests\n\n`port` | `ROUGHENOUGH_PORT` | Required | UDP port to listen for requests\n\n`seed` | `ROUGHENOUGH_SEED` | Required | A 32-byte hexadecimal value used to generate the server's long-term key pair. **This is a secret value and must be un-guessable**, treat it with care. (If compiled with KMS support, length will vary; see [Optional Features](#optional-features))\n\n`batch_size` | `ROUGHENOUGH_BATCH_SIZE` | Optional | The maximum number of requests to process in one batch. All nonces in a batch are used to build a Merkle tree, the root of which is signed. Default is `64` requests per batch.\n\n`status_interval` | `ROUGHENOUGH_STATUS_INTERVAL` | Optional | Number of _seconds_ between each logged status update. Default is `600` seconds (10 minutes).\n\n`health_check_port` | `ROUGHENOUGH_HEALTH_CHECK_PORT` | Optional | If present, enable an HTTP health check responder on the provided port. **Use with caution**, see [Optional Features](#optional-features).\n\n`kms_protection` | `ROUGHENOUGH_KMS_PROTECTION` | Optional | If compiled with KMS support, the ID of the KMS key used to protect the long-term identity. See [Optional Features](#optional-features).\n\n`fault_percentage` | `ROUGHENOUGH_FAULT_PERCENTAGE` | Optional | Likelihood (as a percentage) that the server will intentionally return an invalid client response. An integer range from `0` (disabled, all responses valid) to `50` (50% of responses will be invalid). Default is `0` (disabled).\n\n\n", "file_path": "README.md", "rank": 35, "score": 20754.27537364356 }, { "content": "# Or using environment variables\n\n$ export ROUGHENOUGH_INTERFACE=127.0.0.1\n\n$ export ROUGHENOUGH_PORT=8686\n\n$ export ROUGHENOUGH_SEED=f61075c988feb9cb700a4a6a3291bfbc9cab11b9c9eca8c802468eb38a43d7d3\n\n$ target/release/roughenough-server ENV\n\n2018-07-25 00:05:09 INFO [server] Roughenough server v1.0.5 starting\n\n2018-07-25 00:05:09 INFO [server] Long-term public key: d0756ee69ff5fe96cbcf9273208fec53124b1dd3a24d3910e07c7c54e2473012\n\n2018-07-25 00:05:09 INFO [server] Ephemeral public key: 25fd5dc31ceee241aed3e643534e95ed0609e9a20982a45ac0312a5f55e2cc66\n\n2018-07-25 00:05:09 INFO [server] Server listening on 127.0.0.1:8686\n\n```\n\n\n\nThe resulting binary is `target/release/roughenough-server`. After building you can copy the \n\nbinary and run on its own (no `cargo` needed):\n\n\n\n```bash\n\n$ cp target/release/roughenough-server /usr/local/bin \n\n```\n\n\n\n### Stopping the Server\n\n\n\nUse Ctrl-C or `kill` the process.\n\n\n\n\n\n## Optional Features\n\n\n\nRoughenough has two opt-in (disabled by default) features that are enabled either \n\nA) via a config setting, or B) at compile-time.\n\n\n\n* [HTTP Health Check responder](doc/OPTIONAL-FEATURES.md#http-health-check) \n\n to facilitate detection and replacement of \"sick\" Roughenough servers.\n\n* [Key Management System (KMS) support](doc/OPTIONAL-FEATURES.md#key-management-system-kms-support)\n\n to protect the long-term server identity using envelope encryption and \n\n AWS or Google KMS.\n\n\n\nSee [OPTIONAL-FEATURES.md](doc/OPTIONAL-FEATURES.md) for details and instructions\n\nhow to enable and use.\n\n\n\n\n\n## Limitations\n\n\n\nRoughtime features not implemented by the server:\n\n\n\n* On-line (while server is running) key rotation. The server must be restarted to generate a new delegated key. \n\n* The Roughenough server depends on the host's time source to comply with the smeared leap-second \n\n requirement of the Roughtime protocol. A Roughenough server sourcing time from \n\n [Google's public NTP servers](https://developers.google.com/time/) would produce compliant\n\n smeared leap-seconds but time sourced from members of `pool.ntp.org` likely will not.\n\n\n", "file_path": "README.md", "rank": 36, "score": 20748.935338094987 }, { "content": "## Version 1.1.0\n\n\n\n* Optional HTTP health check (requested in #8), see the\n\n [feature's documentation](https://github.com/int08h/roughenough/blob/master/doc/OPTIONAL-FEATURES.md#http-health-check)\n\n* Support AWS and Google Key Management Systems (KMS) to protect the server's long-term key.\n\n See the [KMS documentation](https://github.com/int08h/roughenough/blob/master/doc/OPTIONAL-FEATURES.md#key-management-system-kms-support).\n\n* Numerous refactorings and clean ups to support fuzzing of \n\n server components (b801eda, thanks to @Aaron1011)\n\n\n\n## Version 1.0.6\n\n\n\n* As pointed out in #10, the client and server binary names were too generic. Rename \n\n them to be packaging friendly. Thank you @grempe. (b43bcb27ad)\n\n \n\n## Version 1.0.5\n\n\n\n* The server now supports configuration from \n\n [environment variables](https://github.com/int08h/roughenough#server-configuration)\n\n \n\n## Version 1.0.4\n\n\n\n* Update `untrusted` dependency to incorporate security fix (see https://github.com/RustSec/advisory-db/pull/24). \n\n Fixes #6 reported by @tirkarthi (383b0347).\n\n \n\n## Release 1.0.3\n\n\n\n* Limit the number of tags in a message to 1024 (0b8c965)\n\n\n\n## Release 1.0.2\n\n\n\n* Merge input validation and error handling improvements from #5. Fuzzing FTW.\n\n* Misc docstring and README updates\n\n* Fix incorrect range-check introduced in 9656fda and released as 1.0.1.\n\n\n\n## Release 1.0.1 (yanked)\n\n\n\n* Release 1.0.1 was removed from Github and yanked from crates.io due to a range-check bug. \n\n 1.0.2 is its replacement. \n\n \n\n## Release 1.0.0\n\n\n\nThanks to @Aaron1011's work, Roughenough has 1.0 level of functionality.\n\n\n\n* Server batches responses and signs Merkle tree root (3471e04, ee38933f, and 31bf8b3)\n\n* `mio` error handling improvement (613fb01f)\n\n* Build on Rust Nightly (350b23a)\n", "file_path": "CHANGELOG.md", "rank": 37, "score": 20746.520369998954 }, { "content": "# Roughenough \n\n\n\n[![crates.io](https://img.shields.io/crates/v/roughenough.svg?style=flat-square)](https://crates.io/crates/roughenough)\n\n[![Build Status](https://img.shields.io/travis/int08h/roughenough/master.svg?style=flat-square)](https://travis-ci.org/int08h/roughenough)\n\n[![Apache License 2](https://img.shields.io/badge/license-ASF2-blue.svg?style=flat-square)](https://www.apache.org/licenses/LICENSE-2.0.txt)\n\n\n\n**Roughenough** is an RFC-draft compliant [Roughtime](https://roughtime.googlesource.com/roughtime) secure time \n\nsynchronization client and server implementation in Rust. \n\n\n\nRoughenough's server and client are functionally complete and \n\nat feature parity with the reference C++ and Golang implementations. \n\n\n\nRequires latest stable Rust to compile. Contributions welcome, see\n\n[CONTRIBUTING](../master/CONTRIBUTING.md) for instructions and [limitations](#limitations) for areas that could use attention.\n\n\n\n## RFC Work-In-Progress\n\n\n\nRoughenough implements the Roughtime protocol as specified in [the draft-5 RFC](https://www.ietf.org/archive/id/draft-ietf-ntp-roughtime-05.html).\n\n \n\n**Important differences from the draft RFC**\n\n1. Roughenough uses SHA-512/256 to compute the Merkle tree. Draft-5 of the RFC uses a\n\n bespoke 32-byte SHA-512 prefix without rationale or justification. Given that \n\n standardized 32-byte SHA-512/256 exists and is already implemented widely, I'm \n\n sticking with it while I advocate for the RFC to move away from the custom prefix\n\n and adopt SHA-512/256.\n\n2. The server and client send/expect RFC protocol version `1` (VER tag is `0x00000001`) \n\n instead of the draft's suggested `0x80000000 + version`.\n\n\n\nThe Roughenough server operates both the \"classic\" protocol **and** the RFC compliant \n\nprotocol at the same time on a single serving port (the 8-byte magic frame value added \n", "file_path": "README.md", "rank": 38, "score": 20746.20565054545 }, { "content": "#### YAML Configuration \n\n\n\nThe table above lists the YAML keys available in the config file. An example:\n\n\n\n```yaml\n\ninterface: 127.0.0.1\n\nport: 8686\n\nseed: f61075c988feb9cb700a4a6a3291bfbc9cab11b9c9eca8c802468eb38a43d7d3\n\n```\n\n\n\nProvide the config file as the single command-line argument to the Roughenough server binary:\n\n\n\n```bash\n\n$ /path/to/roughenough-server /path/to/config.yaml\n\n```\n\n\n\n#### Environment Configuration\n\n\n\nRoughenough can be configured via the `ROUGHENOUGH_*` [environment variables](https://12factor.net/config) \n\nlisted in the table above. Start the server with a single `ENV` argument to have Roughenough configure itself\n\nfrom the environment. Example:\n\n\n\n```bash\n\n$ export ROUGHENOUGH_INTERFACE=127.0.0.1\n\n$ export ROUGHENOUGH_PORT=8686\n\n$ export ROUGHENOUGH_SEED=f61075c988feb9cb700a4a6a3291bfbc9cab11b9c9eca8c802468eb38a43d7d3\n\n$ /path/to/roughenough-server ENV\n\n```\n\n\n\n### Starting the Server\n\n\n\n```bash\n\n# Build roughenough\n\n$ cargo build --release\n\n\n\n# Via a config file\n\n$ target/release/roughenough-server example.cfg\n\n2018-07-25 00:05:09 INFO [server] Roughenough server v1.0.5 starting\n\n2018-07-25 00:05:09 INFO [server] Long-term public key: d0756ee69ff5fe96cbcf9273208fec53124b1dd3a24d3910e07c7c54e2473012\n\n2018-07-25 00:05:09 INFO [server] Ephemeral public key: 25fd5dc31ceee241aed3e643534e95ed0609e9a20982a45ac0312a5f55e2cc66\n\n2018-07-25 00:05:09 INFO [server] Server listening on 127.0.0.1:8686\n\n\n", "file_path": "README.md", "rank": 39, "score": 20743.0006065057 }, { "content": "# send RFC protocol Roughtime requests\n\n$ roughenough-client -p 1 roughtime.int08h.com 2002\n\n```\n\n\n\n## Links\n\n* [Roughenough Github repo](https://github.com/int08h/roughenough)\n\n* Original [Roughtime project](https://roughtime.googlesource.com/roughtime)\n\n* My blog posts giving a [technical deep-dive into Roughtime](https://int08h.com/post/to-catch-a-lying-timeserver/) and\n\n exploring details of [on-the-wire Roughtime messages](https://int08h.com/post/roughtime-message-anatomy/).\n\n* Cloudflare's fantastic [blog post](https://blog.cloudflare.com/roughtime/) and accompanying \n\n [open-source project](https://developers.cloudflare.com/roughtime/).\n\n\n\n## Building and Running\n\n\n\n### Minimum Supported Rust Version (MSRV)\n\n\n\nRoughenough uses [2018 edition](https://rust-lang-nursery.github.io/edition-guide/rust-2018/index.html) \n\nfeatures and requires Rust 1.31 or newer to build.\n\n\n\n### Building\n\n\n\n```bash\n\n# Build roughenough\n\n$ cargo build --release\n\n```\n\n\n\nThe client binary is `target/release/roughenough-client`. After building you can copy the \n\nbinary and run on its own (no `cargo` needed) if you wish.\n\n\n\n```bash\n\n$ cp target/release/roughenough-client /usr/local/bin \n\n```\n\n\n\n### Using the Client to Query a Roughtime Server \n\n\n\n```bash\n\n$ target/release/roughenough-client -v roughtime.int08h.com 2002\n\nRequesting time from: \"roughtime.int08h.com\":2002\n\nReceived time from server: midpoint=\"Oct 26 2018 23:20:44\", radius=1000000, verified=No (merkle_index=0)\n\nOct 26 2018 23:20:44\n\n```\n\n\n\n### Setting The System Time on Linux\n\n\n\nYou can use the `date` utility on Linux machines to set the system time to the time determined by the Roughenough client:\n\n\n\n```bash\n\nsudo date --utc --set \"$(roughenough-client -z roughtime.int08h.com 2002)\"\n\n```\n\n\n\n### Validating Server Responses \n\n\n\nUse the `-p` flag with the client to validate the server's response with its public key.\n\n\n\n```bash\n", "file_path": "README.md", "rank": 40, "score": 20742.356588898107 }, { "content": "## Version 1.2.0-draft-5\n\n* Roughenough (mostly) implements the Roughtime protocol as specified in [the draft-5 RFC](https://www.ietf.org/archive/id/draft-ietf-ntp-roughtime-05.html).\n\n \n\n **Important differences from the draft RFC**\n\n 1. Roughenough uses SHA-512/256 to compute the Merkle tree. Draft-5 of the RFC uses a\n\n bespoke 32-byte SHA-512 prefix without rationale or justification. Given that\n\n standardized 32-byte SHA-512/256 exists and is already implemented widely, I'm \n\n sticking with it while I advocate for the RFC to move away from the custom prefix\n\n and adopt SHA-512/256.\n\n 2. The server and client send/expect RFC protocol version `1` (VER tag is `0x00000001`) \n\n instead of the draft's suggested `0x80000000 + version`.\n\n\n\n* The Roughenough server operates both the \"classic\" protocol **and** the RFC compliant \n\n protocol at the same time on a single serving port (the 8-byte magic frame value added \n\n by the RFC is used to distinguish classic vs. rfc requests).\n\n\n\n The new `-p/--protocol` flag of `roughenough-client` controls the protocol version to\n\n use in requests (`0` = classic protocol, `1` = RFC protocol). The default is `0` the\n\n \"classic\" protocol, until the RFC is finalized:\n\n\n\n ```\n\n # send RFC protocol Roughtime requests\n\n $ roughenough-client -p 1 roughtime.int08h.com 2002\n\n ```\n\n* Added `-d/--dump` to `roughenough-client` that will pretty-print text representations \n\n of the messages it sends and receives.\n\n\n\n## Version 1.1.9\n\n\n\nHousekeeping:\n\n* 8f088f1 Overdue Ring update 0.13 -> 0.16\n\n* 43b1de3 GCK KMS updated to the latest dependencies\n\n* 7ff2e53 AWS KMS also updated to latest dependencies \n\n\n\n## Version 1.1.8\n\n\n\nNew feature:\n\n* 407f12d client: output local time by default, add -z/--zulu for UTC \n\n\n\nHousekeeping:\n\n* 02212e2 Switch to std::time and drop use of 'time' crate \n\n* d42db50 Upgrade several dependencies to latest versions \n\n* e13d6fd Remove deprecated `std::error::Error::description` calls \n\n* 32f11aa Update Dockerfile to Rust 1.42 \n\n\n", "file_path": "CHANGELOG.md", "rank": 41, "score": 20742.030387574512 }, { "content": "## Version 1.1.7\n\n\n\n* Improved options for client output thanks to @zicklag (f1f834e8c).\n\n\n\n By default the client now outputs just the time reported by the queried server. \n\n The `-v` or `--verbose` flag will print additional information such as the response's \n\n midpoint and radius. `-j` or `--json` outputs responses in JSON format instead.\n\n\n\n Non-response text output is written to standard error to enable verbose output \n\n while redirecting the response(s) to a file or pipe like so:\n\n \n\n ```\n\n $ roughenough-client -v roughtime.int08h.com 2002 > time.txt\n\n Requesting time from: \"roughtime.int08h.com\":2002\n\n Received time from server: midpoint=\"Oct 08 2019 18:40:38\", radius=1000000, verified=No (merkle_index=0)\n\n \n\n $ cat time.txt\n\n Oct 08 2019 18:40:38\n\n ```\n\n\n\n## Version 1.1.6\n\n\n\n* Fix several Clippy items (266f1adc9) \n\n* Update to latest Rusoto (6ff01af52)\n\n* Update to latest google-cloudkms (a0165c019)\n\n* Update Dockerfile to Rust 1.38 (a14c2e8)\n\n\n\n## Version 1.1.5\n\n\n\n* Improved error messages (3841942)\n\n* Update fuzzer server target to sync with roughenough-fuzz\n\n* Add Dockerfile to create a server container\n\n\n\n## Version 1.1.4\n\n\n\n* Implement Roughtime ecosystem response mangling (177372f, f851deb)\n\n* Doc fix from @Muncan90 (20ba144)\n\n\n\n## Version 1.1.3\n\n\n\n* Add decrypt option to `roughenough-kms` \n\n\n\n## Version 1.1.2 \n\n\n\n* Add client request statistics tracking.\n\n* Clean-up and simplification of server inner loop.\n\n* Rust 2018 edition required to compile.\n\n\n\n## Version 1.1.1\n\n\n\n* Provide auxiliary data to the AWS KMS decryption call. The auxiliary data _was_ provided in encrypt, but not decrypt, resulting in unconditional failure when unwrapping the long-term identity. See https://github.com/int08h/roughenough/commit/846128d08bd3fcd72f23b3123b332d0692782e41#diff-7f7c3059af30a5ded26269301caf8531R102\n\n\n", "file_path": "CHANGELOG.md", "rank": 42, "score": 20740.198873588204 }, { "content": "# Contributing to Roughenough\n\n\n\nDo you enjoy working on obscure cryptographically secure time synchronization protocols? \n\n\n\n:+1::tada: nice, me too!\n\n\n\n# Contribute via Pull Requests\n\n\n\nPlease open a pull request (PR) for your changes and include:\n\n\n\n* An overall description/rationale of the PR\n\n* Tests for any new or modified functionality\n\n* Code formatted with `rustfmt` default style settings \n\n* License (Apache 2.0) and copyright statements for your code\n\n* A Developer Certificate of Origin (DCO) sign-off as described below\n\n* A willingness to iterate and make changes ;)\n\n\n\n`Roughenough` targets **stable Rust** only. Contributions that don't compile\n\non `stable` will be declined. Sorry.\n\n\n\n# Developer Certificate of Origin\n\n\n\nTo provide assurance of the provenance and integrity of contributions \n\nRoughenough uses the [Developer Certificate of Origin](https://developercertificate.org/)\n\ncreated by the Linux Foundation instead of lengthy Contributor License \n\nAgreements (CLAs). \n\n\n\nPlease include *verbatim* and *unchanged* the full DCO statement \n\nbelow with your PR:\n\n\n\n```\n\nDeveloper Certificate of Origin\n\nVersion 1.1\n\n\n\nCopyright (C) 2004, 2006 The Linux Foundation and its contributors.\n\n1 Letterman Drive\n\nSuite D4700\n\nSan Francisco, CA, 94129\n\n\n\nEveryone is permitted to copy and distribute verbatim copies of this\n\nlicense document, but changing it is not allowed.\n\n\n\nDeveloper's Certificate of Origin 1.1\n\n\n\nBy making a contribution to this project, I certify that:\n\n\n\n(a) The contribution was created in whole or in part by me and I\n\n have the right to submit it under the open source license\n\n indicated in the file; or\n\n\n\n(b) The contribution is based upon previous work that, to the best\n\n of my knowledge, is covered under an appropriate open source\n\n license and I have the right under that license to submit that\n\n work with modifications, whether created in whole or in part\n\n by me, under the same open source license (unless I am\n\n permitted to submit under a different license), as indicated\n", "file_path": "CONTRIBUTING.md", "rank": 43, "score": 20737.62226235527 }, { "content": "by the RFC is used to distinguish classic vs. rfc requests).\n\n\n\nThe new `-p/--protocol` flag of `roughenough-client` controls the protocol version to\n\nuse in requests (`0` = classic protocol, `1` = RFC protocol). The default is `0` the\n\n\"classic\" protocol, until the RFC is finalized:\n\n\n\n```\n", "file_path": "README.md", "rank": 44, "score": 20736.22188013426 }, { "content": " in the file; or\n\n\n\n(c) The contribution was provided directly to me by some other\n\n person who certified (a), (b) or (c) and I have not modified\n\n it.\n\n\n\n(d) I understand and agree that this project and the contribution\n\n are public and that a record of the contribution (including all\n\n personal information I submit with it, including my sign-off) is\n\n maintained indefinitely and may be redistributed consistent with\n\n this project or the open source license(s) involved.\n\n```\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 45, "score": 20735.86324062343 }, { "content": "## About the Roughtime Protocol\n\n[Roughtime](https://roughtime.googlesource.com/roughtime) is a protocol that aims to achieve rough \n\ntime synchronisation in a secure way that doesn't depend on any particular time server, and in such\n\na way that, if a time server does misbehave, clients end up with cryptographic proof of it. It was \n\ncreated by Adam Langley and Robert Obryk.\n\n \n\n## Contributors\n\n* Stuart Stock (stuart {at} int08h.com)\n\n* Aaron Hill (aa1ronham {at} gmail.com)\n\n* Peter Todd (pete {at} petertodd.org)\n\n* Muncan90 (github.com/muncan90)\n\n* Zicklag (github.com/zicklag)\n\n\n\n## Copyright and License\n\nRoughenough is copyright (c) 2017-2021 int08h LLC. All rights reserved. \n\n\n\nint08h LLC licenses Roughenough (the \"Software\") to you under the Apache License, version 2.0 \n\n(the \"License\"); you may not use this Software except in compliance with the License. You may obtain \n\na copy of the License from the [LICENSE](../master/LICENSE) file included with the Software or at:\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software distributed under the License \n\nis distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or \n\nimplied. See the License for the specific language governing permissions and limitations under \n\nthe License.\n", "file_path": "README.md", "rank": 46, "score": 20733.760952665063 }, { "content": "# Thanks In Advance for Helping Roughenough!\n\n\n\nStuart @int08h\n", "file_path": "CONTRIBUTING.md", "rank": 47, "score": 20733.560382657583 }, { "content": "## Restore the 1024 byte minimum request size requirement\n\n\n\nDraft 02 states that: \n\n\n\n Responding to requests shorter than 1024 bytes is OPTIONAL \n\n and servers MUST NOT send responses larger than the requests \n\n they are replying to.\n\n\n\nThe minimum request size requirement exists to prevent a roughtime server from becoming\n\na DDoS amplifier [CF-DDoS]. A minimum request size of of 1024 bytes ensures that even \n\na busy roughtime server signing a Merkle tree of 64 requests generates a response *smaller* \n\nthan the 1024 byte request minimum (response would be 744 bytes, see [int08h]).\n\n\n\nIf requests smaller than 1024 bytes are permitted, how small could a request be? A valid \n\nRoughtime request *without* a PAD field would be 72 bytes long: \n\n\n\n 4 bytes number of fields \n\n 4 bytes NONC tag \n\n 64 bytes NONC value\n\n \n\nGiven that the *minimum* Server response size is 360 bytes [int08h], a minimal size request \n\npresents a DDoS attacker with a potential 5x gain in size. \n\n\n\nMaking the minimum response size OPTIONAL requires Roughtime server operators to decide \n\n\"how small is too small\" and a wrong choice will create more DDoS amplifiers in the world.\n\n\n\nSuggestion: mandate requests >= 1024 bytes\n\n\n", "file_path": "doc/rfc-commentary.md", "rank": 48, "score": 19346.449925651235 }, { "content": "## The LEAP field is not necessary \n\n\n\nRoughtime can handle leap seconds and time uncertainty/discontinuity complications without the \n\naddition of new tags/fields. The RADI field already provides a method for a Roughtime \n\nimplementation to express time uncertainty in responses [draf02]: \n\n\n\n The RADI tag value is a uint32 representing the server's estimate of\n\n the accuracy of MIDP in microseconds. Servers MUST ensure that the\n\n true time is within (MIDP-RADI, MIDP+RADI) at the time they compose\n\n the response message.\n\n\n\nIn the case of a leap second, a Roughtime server can increase the value returned in \n\nthe RADI field to account for the uncertainty introduced by a leap second. The increase \n\nin RADI value can persist as long as necessary (the duration of a leap second \"smear\" \n\nfor example [AWS-smear] and [Google-smear]).\n\n \n\nSuggestion: remove DUT, DTAI, and LEAP fields along with int32 type\n\n\n\n## Introduction of a signed int32 type creates a new security risk \n\n\n\nThe addition of the signed int32 field type (required to support the DUT, DTAI, and \n\nLEAP fields) exposes Roughtime implementations to a new *class* of software errors\n\naround sign/unsigned conversions. \n\n\n\nThe Common Weaknesses Enumeration Top 25 Most Dangerous Software Errors [CWE-TOP25] \n\nlists \"Integer Overflow or Wraparound\" as the #8 most frequent software weakness. The \n\ndetail page on integer overflow [CWE-190] identifies signed/unsigned confusion, \n\nunintentional wrap-around, and lack of range checks as sources of software errors.\n\n\n\nIntroducing a signed integer type into Roughtime does not guarantee these issues\n\noccur. However it does introduces an entirely new area of concern that would not\n\nexist if these signed fields were omitted. \n\n\n\nSuggestion: remove DUT, DTAI, and LEAP fields along with int32 type\n\n\n", "file_path": "doc/rfc-commentary.md", "rank": 49, "score": 19346.070920375896 }, { "content": "## Checking response vs. request size complicates server implementation\n\n\n\nIn Draft 02: \n\n\n\n Responding to requests shorter than 1024 bytes is OPTIONAL \n\n and servers MUST NOT send responses larger than the requests \n\n they are replying to.\n\n\n\nRoughtime servers can batch multiple requests into a single response making response \n\nsize a function of server load/batching parameters plus concurrent requests. Roughtime \n\nServer implementations may gather or buffer client requests prior to constructing the \n\nresponse. \n\n\n\n\"...servers MUST NOT send responses larger than the requests...\" will require implementations\n\nto perform additional tracking of per-request sizes and then compute the resulting response\n\nsize once the response *after* batch size has been determined. \n\n\n\nThis is more complex and incurs additional processing compared to simply rejecting all \n\nrequests <1024 bytes.\n\n\n\nSuggestion: mandate requests >= 1024 bytes\n\n\n\n## The \"ROUGHTIM\" packet format is redundant\n\n\n\nThe addition of the constant \"ROUGHTIM\" plus additional length field is redundant to \n\nthe message format (which also has a length field). The value this additional \n\npacket format is not clear.\n\n\n\nSuggestion: use \"bare\" Roughtime messages as the packet format \n\n\n\n## Stick with SHA-512; eliminate use of truncated SHA-512/256 \n\n\n\nTruncated SHA-512/256 is performed by a) compute SHA-512, then b) truncate the result. \n\nThe resulting computational effort of SHA-512 and SHA-512/256 is equivalent. \n\n\n\nThe draft utilizes SHA-512/256 for its 32 byte output, as opposed to 64 bytes for\n\nSHA-512. The motivation for this change is unclear and it complicates implementations\n\nwhich now need two hashing primitives (SHA-512/256 initialization is different than SHA-512).\n\n\n\nSuggestion: use SHA-512 throughout and drop any use of SHA-512/256\n\n\n\n## References \n\n\n\n* [AWS-smear] https://aws.amazon.com/blogs/aws/look-before-you-leap-the-coming-leap-second-and-aws/\n\n* [CF-DDoS] https://www.cloudflare.com/learning/ddos/ntp-amplification-ddos-attack/\n\n* [CWE-190] https://cwe.mitre.org/data/definitions/190.html\n\n* [CWE-TOP25] https://cwe.mitre.org/top25/archive/2019/2019_cwe_top25.html\n\n* [draft02] https://tools.ietf.org/html/draft-ietf-ntp-roughtime-02\n\n* [Google-smear] https://developers.google.com/time/smear\n\n* [int08h] https://int08h.com/post/to-catch-a-lying-timeserver/#keeping-response-sizes-compact\n\n* [roughtime] https://roughtime.googlesource.com/roughtime\n\n\n", "file_path": "doc/rfc-commentary.md", "rank": 50, "score": 19345.17244966042 }, { "content": "# Comments on draft-ietf-ntp-roughtime-02\n\n\n\nAuthor: Stuart Stock\n\nDate : 2020-06-14\n\n\n\n## Introduction\n\n\n\nI am the original creator and current maintainer of two Roughtime implementations:\n\n\n\n * Roughenough (Rust) https://github.com/int08h/roughenough\n\n * Nearenough (Java) https://github.com/int08h/nearenough\n\n\n\nHave written deep-dive articles about the Roughtime protocol: \n\n\n\n * https://int08h.com/post/to-catch-a-lying-timeserver/\n\n * https://int08h.com/post/roughtime-message-anatomy/\n\n \n\nAnd operate the longest running publicly accessible (non-Googler) Roughtime server \n\non the internet\n\n\n\n * roughtime.int08h.com:2002\n\n \n\nI offer my comments on draft-ietf-ntp-roughtime-02 as someone well versed in the Roughtime \n\nprotocol and intimately familiar with its implementation and trade-offs.\n\n\n\n## Keep the \"rough\" in \"Roughtime\"\n\n\n\nThe authors of the original Roughtime protocol definition [roughtime] state that a \n\ndeliberate goal of the Roughtime protocol was \"...time synchronisation to within 10 \n\nseconds of the correct time.\" Those authors go on to state that \"[i]f you have \n\n_serious_ time synchronisation needs you‘ll want the machinery in NTP or even PTP...\"\n\n\n\nAddition of the DUT, DTAI, and LEAP fields run contrary to this design intent. These fields\n\nprovide time synchronization features that duplicate those in NTP and PTP. Precise time \n\nsync should be delegated to NTP and PTP and their ecosystems.\n\n\n\nSimplicity of the Roughtime protocol encourages simple implementations. Simplicity\n\nin implementation is a key to assurance that implementations are correct and secure. \n\nIncreasing the number of fields and field types that must be implemented runs contrary \n\nto ease of implementation. Simplicity, and by extension security, should be a deliberate \n\nand top-of-mind design goal of the Roughtime standardization process.\n\n\n\nMultiple existing IETF protocols address needs of highly accurate time synchronization \n\nand should be used where timing precision requires. Keep Roughtime \"rough\".\n\n\n\nSuggestion: remove DUT, DTAI, and LEAP fields along with int32 type\n\n\n", "file_path": "doc/rfc-commentary.md", "rank": 51, "score": 19343.95215021442 }, { "content": " client_stats: false,\n\n fault_percentage: 0,\n\n };\n\n\n\n for (key, value) in cfg[0].as_hash().unwrap() {\n\n match key.as_str().unwrap() {\n\n \"port\" => config.port = value.as_i64().unwrap() as u16,\n\n \"interface\" => config.interface = value.as_str().unwrap().to_string(),\n\n \"batch_size\" => config.batch_size = value.as_i64().unwrap() as u8,\n\n \"seed\" => {\n\n let val = value.as_str().unwrap().to_string();\n\n config.seed = HEX\n\n .decode(val.as_bytes())\n\n .expect(\"seed value invalid; 'seed' must be a valid hex value\");\n\n }\n\n \"status_interval\" => {\n\n let val = value.as_i64().expect(\"status_interval value invalid\");\n\n config.status_interval = Duration::from_secs(val as u64)\n\n }\n\n \"kms_protection\" => {\n", "file_path": "src/config/file.rs", "rank": 52, "score": 27.686003752542824 }, { "content": " .parse()\n\n .unwrap_or_else(|_| panic!(\"invalid port: {}\", port));\n\n };\n\n\n\n if let Ok(interface) = env::var(ROUGHENOUGH_INTERFACE) {\n\n cfg.interface = interface.to_string();\n\n };\n\n\n\n if let Ok(seed) = env::var(ROUGHENOUGH_SEED) {\n\n cfg.seed = HEX\n\n .decode(seed.as_bytes())\n\n .expect(\"invalid seed value; 'seed' should be a hex value\");\n\n };\n\n\n\n if let Ok(batch_size) = env::var(ROUGHENOUGH_BATCH_SIZE) {\n\n cfg.batch_size = batch_size\n\n .parse()\n\n .unwrap_or_else(|_| panic!(\"invalid batch_size: {}\", batch_size));\n\n };\n\n\n", "file_path": "src/config/environment.rs", "rank": 53, "score": 26.850809330150312 }, { "content": " assert!(ciphertext_blob.len() < MIN_PAYLOAD_SIZE);\n\n\n\n let kms = MockKmsProvider {};\n\n let result = EnvelopeEncryption::decrypt_seed(&kms, ciphertext_blob.as_bytes());\n\n\n\n match result.expect_err(\"expected KmsError\") {\n\n KmsError::InvalidData(msg) => assert!(msg.contains(\"ciphertext too short\")),\n\n e => panic!(\"Unexpected error {:?}\", e),\n\n }\n\n }\n\n\n\n #[test]\n\n fn encrypt_decrypt_round_trip() {\n\n let kms = MockKmsProvider {};\n\n let plaintext = Vec::from(\"This is the plaintext used for this test 1\");\n\n\n\n let enc_result = EnvelopeEncryption::encrypt_seed(&kms, &plaintext);\n\n assert_eq!(enc_result.is_ok(), true);\n\n\n\n let ciphertext = enc_result.unwrap();\n", "file_path": "src/kms/envelope.rs", "rank": 54, "score": 26.113755650455992 }, { "content": "pub const DEFAULT_BATCH_SIZE: u8 = 64;\n\n\n\n/// Amount of time between each logged status update.\n\npub const DEFAULT_STATUS_INTERVAL: Duration = Duration::from_secs(600);\n\n\n\n///\n\n/// Specifies parameters needed to configure a Roughenough server.\n\n///\n\n/// Parameters labeled \"**Required**\" must always be provided and have no default value\n\n/// while those labeled \"**Optional**\" provide sane default values that can be overridden.\n\n///\n\n/// YAML Key | Environment Variable | Necessity | Description\n\n/// --- | --- | --- | ---\n\n/// `interface` | `ROUGHENOUGH_INTERFACE` | Required | IP address or interface name for listening to client requests\n\n/// `port` | `ROUGHENOUGH_PORT` | Required | UDP port to listen for requests\n\n/// `seed` | `ROUGHENOUGH_SEED` | Required | A 32-byte hexadecimal value used to generate the server's long-term key pair. **This is a secret value and must be un-guessable**, treat it with care. (If compiled with KMS support, length will vary)\n\n/// `batch_size` | `ROUGHENOUGH_BATCH_SIZE` | Optional | The maximum number of requests to process in one batch. All nonces in a batch are used to build a Merkle tree, the root of which is signed. Default is `64` requests per batch.\n\n/// `status_interval` | `ROUGHENOUGH_STATUS_INTERVAL` | Optional | Number of _seconds_ between each logged status update. Default is `600` seconds (10 minutes).\n\n/// `health_check_port` | `ROUGHENOUGH_HEALTH_CHECK_PORT` | Optional | If present, enable an HTTP health check responder on the provided port. **Use with caution**.\n\n/// `kms_protection` | `ROUGHENOUGH_KMS_PROTECTION` | Optional | If compiled with KMS support, the ID of the KMS key used to protect the long-term identity.\n\n/// `client_stats` | `ROUGHENOUGH_CLIENT_STATS` | Optional | A value of `on` or `yes` will enable tracking of per-client request statistics that will be output each time server status is logged. Default is `off` (disabled).\n\n/// `fault_percentage` | `ROUGHENOUGH_FAULT_PERCENTAGE` | Optional | Likelihood (as a percentage) that the server will intentionally return an invalid client response. An integer range from `0` (disabled, all responses valid) to `50` (50% of responses will be invalid). Default is `0` (disabled).\n\n///\n\n/// Implementations of this trait obtain a valid configuration from different back-end\n\n/// sources. See:\n\n/// * [FileConfig](struct.FileConfig.html) - configure via a YAML file\n\n/// * [EnvironmentConfig](struct.EnvironmentConfig.html) - configure via environment variables\n\n/// * [MemoryConfig](struct.MemoryConfig.html) - in-memory configuration for testing\n\n///\n", "file_path": "src/config/mod.rs", "rank": 55, "score": 25.87719205307963 }, { "content": "\n\nuse byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};\n\nuse data_encoding::{Encoding, HEXLOWER_PERMISSIVE};\n\n\n\nuse crate::error::Error;\n\nuse crate::RFC_REQUEST_FRAME_BYTES;\n\nuse crate::tag::Tag;\n\n\n\nconst HEX: Encoding = HEXLOWER_PERMISSIVE;\n\n\n\n///\n\n/// A Roughtime protocol message; a map of u32 tags to arbitrary byte-strings.\n\n///\n\n#[derive(Debug, Clone)]\n\npub struct RtMessage {\n\n tags: Vec<Tag>,\n\n values: Vec<Vec<u8>>,\n\n}\n\n\n\nimpl RtMessage {\n", "file_path": "src/message.rs", "rank": 56, "score": 24.876111238983725 }, { "content": "\n\n /// Create a CERT message with a DELE containing the provided online key\n\n /// and a SIG of the DELE value signed by the long-term key\n\n pub fn make_cert(&mut self, online_key: &OnlineKey) -> RtMessage {\n\n let dele_bytes = online_key.make_dele().encode().unwrap();\n\n\n\n self.signer.update(CERTIFICATE_CONTEXT.as_bytes());\n\n self.signer.update(&dele_bytes);\n\n\n\n let dele_signature = self.signer.sign();\n\n\n\n let mut cert_msg = RtMessage::with_capacity(2);\n\n cert_msg.add_field(Tag::SIG, &dele_signature).unwrap();\n\n cert_msg.add_field(Tag::DELE, &dele_bytes).unwrap();\n\n\n\n cert_msg\n\n }\n\n\n\n /// Return the public key for the provided seed\n\n pub fn public_key(&self) -> &[u8] {\n", "file_path": "src/key/longterm.rs", "rank": 57, "score": 23.553744771745027 }, { "content": "\n\nuse crate::CERTIFICATE_CONTEXT;\n\nuse crate::key::OnlineKey;\n\nuse crate::message::RtMessage;\n\nuse crate::sign::Signer;\n\nuse crate::tag::Tag;\n\n\n\n///\n\n/// Represents the server's long-term identity.\n\n///\n\npub struct LongTermKey {\n\n signer: Signer,\n\n}\n\n\n\nimpl LongTermKey {\n\n pub fn new(seed: &[u8]) -> Self {\n\n LongTermKey {\n\n signer: Signer::from_seed(seed),\n\n }\n\n }\n", "file_path": "src/key/longterm.rs", "rank": 58, "score": 22.6152522866697 }, { "content": "\n\nuse byteorder::{LittleEndian, WriteBytesExt};\n\nuse data_encoding::{Encoding, HEXLOWER_PERMISSIVE};\n\nuse mio::net::UdpSocket;\n\n\n\nuse crate::{RtMessage, Tag};\n\nuse crate::config::ServerConfig;\n\nuse crate::grease::Grease;\n\nuse crate::key::{LongTermKey, OnlineKey};\n\nuse crate::merkle::MerkleTree;\n\nuse crate::stats::ServerStats;\n\nuse crate::version::Version;\n\n\n\nconst HEX: Encoding = HEXLOWER_PERMISSIVE;\n\n\n\npub struct Responder {\n\n version: Version,\n\n online_key: OnlineKey,\n\n long_term_public_key: String,\n\n cert_bytes: Vec<u8>,\n", "file_path": "src/responder.rs", "rank": 59, "score": 22.568473263776212 }, { "content": "pub use crate::error::Error;\n\npub use crate::message::RtMessage;\n\npub use crate::tag::Tag;\n\n\n\nmod error;\n\nmod message;\n\nmod tag;\n\n\n\npub mod config;\n\npub mod grease;\n\npub mod key;\n\npub mod kms;\n\npub mod merkle;\n\npub mod request;\n\npub mod responder;\n\npub mod server;\n\npub mod sign;\n\npub mod stats;\n\npub mod version;\n\n\n\n/// Version of Roughenough\n\npub const VERSION: &str = \"1.2.0-draft-5\";\n\n\n\n/// Roughenough version string enriched with any compile-time optional features\n", "file_path": "src/lib.rs", "rank": 60, "score": 21.716071381167772 }, { "content": " // Invoke KMS to decrypt the DEK\n\n let dek = kms.decrypt_dek(&encrypted_dek)?;\n\n\n\n // Decrypt the seed value using the DEK\n\n let unbound_dek = UnboundKey::new(&AES_256_GCM, &dek)?;\n\n let dek_opening_key = LessSafeKey::new(unbound_dek);\n\n match dek_opening_key.open_in_place(nonce, Aad::from(AD), &mut encrypted_seed) {\n\n Ok(plaintext) => Ok(plaintext.to_vec()),\n\n Err(_) => Err(KmsError::OperationFailed(\n\n \"failed to decrypt plaintext seed\".to_string(),\n\n )),\n\n }\n\n }\n\n\n\n ///\n\n /// Encrypt the seed value and protect the seed's encryption key using a\n\n /// [`KmsProvider`](trait.KmsProvider.html).\n\n ///\n\n /// The returned encrypted byte blob is safe to store on unsecured media.\n\n ///\n", "file_path": "src/kms/envelope.rs", "rank": 61, "score": 21.636798553439654 }, { "content": " let dec_result = EnvelopeEncryption::decrypt_seed(&kms, &ciphertext_copy);\n\n match dec_result.expect_err(\"expected an error\") {\n\n KmsError::InvalidData(msg) => assert!(msg.contains(\"nonce (1)\")),\n\n e => panic!(\"unexpected error {:?}\", e),\n\n }\n\n }\n\n\n\n #[test]\n\n fn modified_ciphertext_is_detected() {\n\n let kms = MockKmsProvider {};\n\n let plaintext = Vec::from(\"This is the plaintext used for this test 4\");\n\n\n\n let enc_result = EnvelopeEncryption::encrypt_seed(&kms, &plaintext);\n\n assert_eq!(enc_result.is_ok(), true);\n\n\n\n let ciphertext = enc_result.unwrap();\n\n assert_ne!(plaintext, ciphertext);\n\n\n\n // start corruption 4 bytes in, after the DEK and NONCE length fields\n\n for i in (DEK_LEN_FIELD + NONCE_LEN_FIELD)..ciphertext.len() {\n", "file_path": "src/kms/envelope.rs", "rank": 62, "score": 21.552732184807194 }, { "content": "use crate::key::KmsProtection;\n\n\n\nconst HEX: Encoding = HEXLOWER_PERMISSIVE;\n\n\n\n/// A purely in-memory Roughenough config for testing purposes.\n\n///\n\n/// This is useful for testing or fuzzing a server without the need to create additional files.\n\npub struct MemoryConfig {\n\n pub port: u16,\n\n pub interface: String,\n\n pub seed: Vec<u8>,\n\n pub batch_size: u8,\n\n pub status_interval: Duration,\n\n pub kms_protection: KmsProtection,\n\n pub health_check_port: Option<u16>,\n\n pub client_stats: bool,\n\n pub fault_percentage: u8,\n\n}\n\n\n\nimpl MemoryConfig {\n", "file_path": "src/config/memory.rs", "rank": 63, "score": 21.157763257227657 }, { "content": " ciphertext_copy[1] = 99;\n\n let dec_result = EnvelopeEncryption::decrypt_seed(&kms, &ciphertext_copy);\n\n match dec_result.expect_err(\"expected an error\") {\n\n KmsError::InvalidData(msg) => assert!(msg.contains(\"invalid DEK\")),\n\n e => panic!(\"unexpected error {:?}\", e),\n\n }\n\n }\n\n\n\n #[test]\n\n fn invalid_nonce_length_detected() {\n\n let kms = MockKmsProvider {};\n\n let plaintext = Vec::from(\"This is the plaintext used for this test 3\");\n\n\n\n let enc_result = EnvelopeEncryption::encrypt_seed(&kms, &plaintext);\n\n assert_eq!(enc_result.is_ok(), true);\n\n\n\n let ciphertext = enc_result.unwrap();\n\n let mut ciphertext_copy = ciphertext.clone();\n\n\n\n ciphertext_copy[2] = 1;\n", "file_path": "src/kms/envelope.rs", "rank": 64, "score": 20.6000397847875 }, { "content": "\n\n match result {\n\n Ok((http_resp, enc_resp)) => {\n\n if http_resp.status() == StatusCode::OK {\n\n let ciphertext = enc_resp.ciphertext.unwrap();\n\n let ct = BASE64.decode(ciphertext.as_bytes())?;\n\n Ok(ct)\n\n } else {\n\n Err(self.pretty_http_error(&http_resp))\n\n }\n\n }\n\n Err(e) => Err(KmsError::OperationFailed(format!(\"encrypt_dek() {:?}\", e))),\n\n }\n\n }\n\n\n\n fn decrypt_dek(&self, encrypted_dek: &EncryptedDEK) -> Result<PlaintextDEK, KmsError> {\n\n let mut request = DecryptRequest::default();\n\n request.ciphertext = Some(BASE64.encode(encrypted_dek));\n\n request.additional_authenticated_data = Some(BASE64.encode(AD.as_bytes()));\n\n\n", "file_path": "src/kms/gcpkms.rs", "rank": 65, "score": 20.341562285010866 }, { "content": "impl EnvelopeEncryption {\n\n /// Decrypt a seed previously encrypted with `encrypt_seed()`\n\n pub fn decrypt_seed(\n\n kms: &dyn KmsProvider,\n\n ciphertext_blob: &[u8],\n\n ) -> Result<Vec<u8>, KmsError> {\n\n if ciphertext_blob.len() < MIN_PAYLOAD_SIZE {\n\n return Err(KmsError::InvalidData(format!(\n\n \"ciphertext too short: min {}, found {}\",\n\n MIN_PAYLOAD_SIZE,\n\n ciphertext_blob.len()\n\n )));\n\n }\n\n\n\n let mut tmp = Cursor::new(ciphertext_blob);\n\n\n\n // Read the lengths of the wrapped DEK and of the nonce\n\n let dek_len = tmp.read_u16::<LittleEndian>()? as usize;\n\n let nonce_len = tmp.read_u16::<LittleEndian>()? as usize;\n\n\n", "file_path": "src/kms/envelope.rs", "rank": 66, "score": 20.31883959760549 }, { "content": "const NONCE_LEN_BYTES: usize = 12;\n\n\n\n// Size of the AEAD authentication tag in bytes.\n\nconst TAG_LEN_BYTES: usize = 16;\n\n\n\n// Size of the 256-bit Data Encryption Key (DEK) in bytes.\n\nconst DEK_LEN_BYTES: usize = 32;\n\n\n\n// Trivial domain separation to guard against KMS key reuse\n\nconst AD: &str = \"roughenough\";\n\n\n\n/// An unencrypted (plaintext) 256-bit Data Encryption Key (DEK).\n\npub type PlaintextDEK = Vec<u8>;\n\n\n\n/// A Data Encryption Key (DEK) that has been encrypted (wrapped) by a Key Management System (KMS).\n\n///\n\n/// This is an opaque, implementation-specific value. AEAD tag size, nonce size,\n\n/// provider metadata, and so on will vary between [`KmsProvider`](trait.KmsProvider.html)\n\n/// implementations.\n\npub type EncryptedDEK = Vec<u8>;\n\n\n\n///\n\n/// A key management system that wraps/unwraps a data encryption key (DEK).\n\n///\n", "file_path": "src/kms/mod.rs", "rank": 67, "score": 19.66499171502273 }, { "content": " pub fn new(port: u16) -> Self {\n\n let seed = b\"a32049da0ffde0ded92ce10a0230d35fe615ec8461c14986baa63fe3b3bac3db\";\n\n MemoryConfig {\n\n port,\n\n interface: \"127.0.0.1\".to_string(),\n\n seed: HEX.decode(seed).unwrap(),\n\n batch_size: DEFAULT_BATCH_SIZE,\n\n status_interval: DEFAULT_STATUS_INTERVAL,\n\n kms_protection: KmsProtection::Plaintext,\n\n health_check_port: None,\n\n client_stats: false,\n\n fault_percentage: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl ServerConfig for MemoryConfig {\n\n fn interface(&self) -> &str {\n\n self.interface.as_ref()\n\n }\n", "file_path": "src/config/memory.rs", "rank": 68, "score": 19.496726169340434 }, { "content": " response.add_field(Tag::CERT, cert_bytes).unwrap();\n\n response.add_field(Tag::INDX, &index).unwrap();\n\n\n\n response\n\n }\n\n\n\n /// Returns a reference to the long-term public key\n\n pub fn get_public_key(&self) -> &str {\n\n &self.long_term_public_key\n\n }\n\n\n\n /// Returns a reference to the on-line (delegated) key\n\n pub fn get_online_key(&self) -> &OnlineKey {\n\n &self.online_key\n\n }\n\n}\n", "file_path": "src/responder.rs", "rank": 69, "score": 19.46895231377104 }, { "content": "use crate::config::ServerConfig;\n\nuse crate::Error;\n\nuse crate::key::KmsProtection;\n\n\n\nconst HEX: Encoding = HEXLOWER_PERMISSIVE;\n\n\n\n///\n\n/// Obtain a Roughenough server configuration ([ServerConfig](trait.ServerConfig.html))\n\n/// from environment variables.\n\n///\n\n/// Config parameter | Environment Variable\n\n/// ---------------- | --------------------\n\n/// port | `ROUGHENOUGH_PORT`\n\n/// interface | `ROUGHENOUGH_INTERFACE`\n\n/// seed | `ROUGHENOUGH_SEED`\n\n/// batch_size | `ROUGHENOUGH_BATCH_SIZE`\n\n/// status_interval | `ROUGHENOUGH_STATUS_INTERVAL`\n\n/// kms_protection | `ROUGHENOUGH_KMS_PROTECTION`\n\n/// health_check_port | `ROUGHENOUGH_HEALTH_CHECK_PORT`\n\n/// client_stats | `ROUGHENOUGH_CLIENT_STATS`\n", "file_path": "src/config/environment.rs", "rank": 70, "score": 19.36556511506295 }, { "content": "\n\nuse crate::config::{DEFAULT_BATCH_SIZE, DEFAULT_STATUS_INTERVAL};\n\nuse crate::config::ServerConfig;\n\nuse crate::Error;\n\nuse crate::key::KmsProtection;\n\n\n\nconst HEX: Encoding = HEXLOWER_PERMISSIVE;\n\n\n\n///\n\n/// Read a Roughenough server configuration ([ServerConfig](trait.ServerConfig.html))\n\n/// from a YAML file.\n\n///\n\n/// Example minimal config:\n\n///\n\n/// ```yaml\n\n/// interface: 127.0.0.1\n\n/// port: 8686\n\n/// seed: f61075c988feb9cb700a4a6a3291bfbc9cab11b9c9eca8c802468eb38a43d7d3\n\n/// ```\n\n///\n", "file_path": "src/config/file.rs", "rank": 71, "score": 18.88428506251361 }, { "content": "// Copyright 2017-2021 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//!\n\n//! Protect the server's long-term key with envelope encryption and a key management system.\n\n//!\n\n//! Note: KMS support must be enabled at compile time, see the Roughenough's [documentation\n\n//! on optional features](https://github.com/int08h/roughenough/blob/master/doc/OPTIONAL-FEATURES.md#key-management-system-kms-support)\n\n//! for instructions.\n", "file_path": "src/kms/mod.rs", "rank": 72, "score": 18.556314814850282 }, { "content": "use crate::config::ServerConfig;\n\nuse crate::error;\n\nuse crate::key::KmsProtection;\n\n#[cfg(feature = \"awskms\")]\n\npub use crate::kms::awskms::inner::AwsKms;\n\n#[cfg(feature = \"gcpkms\")]\n\npub use crate::kms::gcpkms::inner::GcpKms;\n\n\n\npub use self::envelope::EnvelopeEncryption;\n\n\n\nmod envelope;\n\n\n\n/// Errors generated by KMS operations\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Hash, Clone)]\n\npub enum KmsError {\n\n OperationFailed(String),\n\n InvalidConfiguration(String),\n\n InvalidData(String),\n\n InvalidKey(String),\n\n}\n", "file_path": "src/kms/mod.rs", "rank": 73, "score": 18.4565171862957 }, { "content": "\n\nuse data_encoding::{Encoding, HEXLOWER_PERMISSIVE};\n\nuse ring::rand;\n\nuse ring::rand::SecureRandom;\n\nuse ring::signature::{self, Ed25519KeyPair, KeyPair};\n\n\n\nconst HEX: Encoding = HEXLOWER_PERMISSIVE;\n\n\n\nconst INITIAL_BUF_SIZE: usize = 1024;\n\n\n\n/// A multi-step (init-update-finish) interface for verifying an Ed25519 signature\n\n#[derive(Debug)]\n\npub struct Verifier {\n\n pubkey: Vec<u8>,\n\n buf: Vec<u8>,\n\n}\n\n\n\nimpl Verifier {\n\n pub fn new(pubkey: &[u8]) -> Self {\n\n Verifier {\n", "file_path": "src/sign.rs", "rank": 74, "score": 18.34684140210263 }, { "content": "\n\n let expected_sig = hex::decode(\n\n \"d9868d52c2bebce5f3fa5a79891970f309cb6591e3e1702a70276fa97c24b3a8e58606c38c9758529da50ee31b8219cba45271c689afa60b0ea26c99db19b00c\"\n\n ).unwrap();\n\n\n\n let mut s = Signer::from_seed(&seed);\n\n s.update(&message);\n\n let sig = s.sign();\n\n assert_eq!(sig, expected_sig);\n\n }\n\n\n\n #[test]\n\n fn sign_verify_round_trip() {\n\n let seed = hex::decode(\"334a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9\")\n\n .unwrap();\n\n\n\n let message = \"Hello world\".as_bytes();\n\n\n\n let mut signer = Signer::from_seed(&seed);\n\n signer.update(&message);\n", "file_path": "src/sign.rs", "rank": 75, "score": 17.947153566770965 }, { "content": " extern crate tokio;\n\n extern crate yup_oauth2 as oauth2;\n\n\n\n use std::default::Default;\n\n use std::env;\n\n use std::path::Path;\n\n use std::result::Result;\n\n\n\n use data_encoding::BASE64;\n\n use tokio::runtime::Runtime;\n\n\n\n use crate::kms::{AD, EncryptedDEK, KmsError, KmsProvider, PlaintextDEK};\n\n\n\n use self::cloudkms1::api::{DecryptRequest, EncryptRequest};\n\n use self::cloudkms1::CloudKMS;\n\n use self::hyper::{Body, StatusCode};\n\n use self::oauth2::ServiceAccountKey;\n\n\n\n const GOOGLE_APP_CREDS: &str = &\"GOOGLE_APPLICATION_CREDENTIALS\";\n\n\n", "file_path": "src/kms/gcpkms.rs", "rank": 76, "score": 17.870414253500776 }, { "content": " pub fn public_key_bytes(&self) -> &[u8] {\n\n self.key_pair.public_key().as_ref()\n\n }\n\n}\n\n\n\nimpl fmt::Display for Signer {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", HEX.encode(self.public_key_bytes()))\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Signer {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"Signer({}, {:?})\",\n\n HEX.encode(self.public_key_bytes()),\n\n self.buf\n\n )\n\n }\n", "file_path": "src/sign.rs", "rank": 77, "score": 17.680551417190358 }, { "content": "use std::str::FromStr;\n\n\n\npub use self::longterm::LongTermKey;\n\npub use self::online::OnlineKey;\n\n\n\nmod longterm;\n\nmod online;\n\n\n\n/// Methods for protecting the server's long-term identity\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Hash, Clone)]\n\npub enum KmsProtection {\n\n /// No protection, seed is in plaintext\n\n Plaintext,\n\n\n\n /// Envelope encryption of the seed using AWS Key Management Service\n\n AwsKmsEnvelope(String),\n\n\n\n /// Envelope encryption of the seed using Google Cloud Key Management Service\n\n GoogleKmsEnvelope(String),\n\n}\n", "file_path": "src/key/mod.rs", "rank": 78, "score": 17.619224713266348 }, { "content": "\n\n /// Returns a slice of the tags in the message\n\n pub fn tags(&self) -> &[Tag] {\n\n &self.tags\n\n }\n\n\n\n /// Returns a slice of the values in the message\n\n pub fn values(&self) -> &[Vec<u8>] {\n\n &self.values\n\n }\n\n\n\n /// Converts the message into a `HashMap` mapping each tag to its value\n\n pub fn into_hash_map(self) -> HashMap<Tag, Vec<u8>> {\n\n self.tags.into_iter().zip(self.values.into_iter()).collect()\n\n }\n\n\n\n /// Encode this message into an on-the-wire representation prefixed with RFC framing.\n\n pub fn encode_framed(&self) -> Result<Vec<u8>, Error> {\n\n let encoded = self.encode()?;\n\n let mut frame = Vec::with_capacity(RFC_REQUEST_FRAME_BYTES.len() + 4 + encoded.len());\n", "file_path": "src/message.rs", "rank": 79, "score": 17.614830271856157 }, { "content": " use std::io::{Cursor, Read};\n\n\n\n use byteorder::{LittleEndian, ReadBytesExt};\n\n\n\n use crate::message::*;\n\n use crate::tag::Tag;\n\n\n\n #[test]\n\n fn empty_message_size() {\n\n let msg = RtMessage::with_capacity(0);\n\n\n\n assert_eq!(msg.num_fields(), 0);\n\n // Empty message is 4 bytes, a single num_tags value\n\n assert_eq!(msg.encoded_size(), 4);\n\n }\n\n\n\n #[test]\n\n fn single_field_message_size() {\n\n let mut msg = RtMessage::with_capacity(1);\n\n msg.add_field(Tag::NONC, \"1234\".as_bytes()).unwrap();\n", "file_path": "src/message.rs", "rank": 80, "score": 17.566203998567065 }, { "content": " use crate::kms::{KmsError, KmsProvider};\n\n use crate::kms::envelope::{DEK_LEN_FIELD, MIN_PAYLOAD_SIZE, NONCE_LEN_FIELD};\n\n use crate::kms::EnvelopeEncryption;\n\n\n\n struct MockKmsProvider {}\n\n\n\n // Mock provider that returns a copy of the input\n\n impl KmsProvider for MockKmsProvider {\n\n fn encrypt_dek(&self, plaintext_dek: &Vec<u8>) -> Result<Vec<u8>, KmsError> {\n\n Ok(plaintext_dek.to_vec())\n\n }\n\n\n\n fn decrypt_dek(&self, encrypted_dek: &Vec<u8>) -> Result<Vec<u8>, KmsError> {\n\n Ok(encrypted_dek.to_vec())\n\n }\n\n }\n\n\n\n #[test]\n\n fn decryption_reject_input_too_short() {\n\n let ciphertext_blob = \"1234567890\";\n", "file_path": "src/kms/envelope.rs", "rank": 81, "score": 17.523290792840143 }, { "content": " Classic,\n\n\n\n /// IETF standardized version\n\n Rfc,\n\n}\n\n\n\n// RFC version 1\n\nconst VERSION_1: &'static [u8] = &[0x01, 0x00, 0x00, 0x00];\n\n\n\nimpl Version {\n\n /// On-the-wire representation of the version value\n\n pub fn wire_bytes(self) -> &'static [u8] {\n\n match self {\n\n Version::Classic => unreachable!(\"invalid, no version bytes for Classic\"),\n\n Version::Rfc => VERSION_1,\n\n }\n\n }\n\n\n\n /// A short (non-canonical) string representation of the `Version`\n\n pub fn to_string(&self) -> String {\n", "file_path": "src/version.rs", "rank": 82, "score": 17.345976126192177 }, { "content": " #[test]\n\n fn sign_ed25519_empty_message() {\n\n let seed = hex::decode(\"9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60\")\n\n .unwrap();\n\n\n\n let expected_sig = hex::decode(\n\n \"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b\"\n\n ).unwrap();\n\n\n\n let mut s = Signer::from_seed(&seed);\n\n let sig = s.sign();\n\n assert_eq!(sig, expected_sig);\n\n }\n\n\n\n #[test]\n\n fn sign_ed25519_message() {\n\n let seed = hex::decode(\"0d4a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9\")\n\n .unwrap();\n\n\n\n let message = hex::decode(\"cbc77b\").unwrap();\n", "file_path": "src/sign.rs", "rank": 83, "score": 17.315637332203245 }, { "content": " \"plaintext seed value must be 32 characters long, found {}\",\n\n cfg.seed().len()\n\n );\n\n is_valid = false;\n\n } else if *cfg.kms_protection() != KmsProtection::Plaintext\n\n && cfg.seed().len() <= SEED_LENGTH as usize\n\n {\n\n error!(\"KMS use enabled but seed value is too short to be an encrypted blob\");\n\n is_valid = false;\n\n }\n\n\n\n if cfg.batch_size() < 1 || cfg.batch_size() > 64 {\n\n error!(\n\n \"batch_size {} is invalid; valid range 1-64\",\n\n cfg.batch_size()\n\n );\n\n is_valid = false;\n\n }\n\n\n\n if cfg.fault_percentage() > 50 {\n", "file_path": "src/config/mod.rs", "rank": 84, "score": 17.30678753873255 }, { "content": "#[derive(Debug)]\n\npub enum Error {\n\n /// The associated tag was added to an `RtMessage` in non-increasing order.\n\n TagNotStrictlyIncreasing(Tag),\n\n\n\n /// The associated byte sequence does not correspond to a valid Roughtime tag.\n\n InvalidTag(Box<[u8]>),\n\n\n\n /// Invalid number of tags specified\n\n InvalidNumTags(u32),\n\n\n\n /// Tag value length exceeds length of source bytes\n\n InvalidValueLength(Tag, u32),\n\n\n\n /// Encoding failed. The associated `std::io::Error` should provide more information.\n\n EncodingFailure(std::io::Error),\n\n\n\n /// Request was less than 1024 bytes\n\n RequestTooShort,\n\n\n", "file_path": "src/error.rs", "rank": 85, "score": 16.525515841522164 }, { "content": " let val =\n\n value.as_str().unwrap().parse().unwrap_or_else(|_| {\n\n panic!(\"invalid kms_protection value: {:?}\", value)\n\n });\n\n config.kms_protection = val\n\n }\n\n \"health_check_port\" => {\n\n let val = value.as_i64().unwrap() as u16;\n\n config.health_check_port = Some(val);\n\n }\n\n \"client_stats\" => {\n\n let val = value.as_str().unwrap().to_ascii_lowercase();\n\n config.client_stats = val == \"yes\" || val == \"on\";\n\n }\n\n \"fault_percentage\" => {\n\n let val = value.as_i64().unwrap() as u8;\n\n config.fault_percentage = val;\n\n }\n\n unknown => {\n\n return Err(Error::InvalidConfiguration(format!(\n", "file_path": "src/config/file.rs", "rank": 86, "score": 16.48155911299641 }, { "content": "}\n\n\n\n#[rustfmt::skip] // rustfmt errors on the long signature strings\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn verify_ed25519_sig_on_empty_message() {\n\n let pubkey = hex::decode(\n\n \"d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a\",\n\n ).unwrap();\n\n\n\n let signature = hex::decode(\n\n \"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b\"\n\n ).unwrap();\n\n\n\n let v = Verifier::new(&pubkey);\n\n let result = v.verify(&signature);\n\n assert_eq!(result, true);\n", "file_path": "src/sign.rs", "rank": 87, "score": 16.456218234569725 }, { "content": "/// A multi-step (init-update-finish) interface for creating an Ed25519 signature\n\npub struct Signer {\n\n key_pair: Ed25519KeyPair,\n\n buf: Vec<u8>,\n\n}\n\n\n\nimpl Default for Signer {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Signer {\n\n pub fn new() -> Self {\n\n let rng = rand::SystemRandom::new();\n\n let mut seed = [0u8; 32];\n\n rng.fill(&mut seed).unwrap();\n\n\n\n Signer::from_seed(&seed)\n\n }\n", "file_path": "src/sign.rs", "rank": 88, "score": 16.390687017295658 }, { "content": " result.push_str(&HEX.encode(value));\n\n result.push_str(\"\\n\");\n\n }\n\n }\n\n\n\n result.push_str(&indent1);\n\n result.push_str(\"}\\n\");\n\n\n\n result\n\n }\n\n}\n\n\n\nimpl Display for RtMessage {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.to_string(1))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/message.rs", "rank": 89, "score": 16.23589900337493 }, { "content": "use crate::kms::{AD, DEK_LEN_BYTES, KmsError, KmsProvider, NONCE_LEN_BYTES, TAG_LEN_BYTES};\n\nuse crate::SEED_LENGTH;\n\n\n\nconst DEK_LEN_FIELD: usize = 2;\n\nconst NONCE_LEN_FIELD: usize = 2;\n\n\n\n// 2 bytes - encrypted DEK length\n\n// 2 bytes - nonce length\n\n// n bytes - encrypted DEK\n\n// n bytes - nonce\n\n// n bytes - opaque (AEAD encrypted seed + tag)\n\nconst MIN_PAYLOAD_SIZE: usize = DEK_LEN_FIELD\n\n + NONCE_LEN_FIELD\n\n + DEK_LEN_BYTES\n\n + NONCE_LEN_BYTES\n\n + SEED_LENGTH as usize\n\n + TAG_LEN_BYTES;\n\n\n\n// Convenience function to create zero-filled Vec of given size\n", "file_path": "src/kms/envelope.rs", "rank": 90, "score": 15.942970797289878 }, { "content": "use crate::message::RtMessage;\n\nuse crate::sign::Signer;\n\nuse crate::SIGNED_RESPONSE_CONTEXT;\n\nuse crate::tag::Tag;\n\n\n\n///\n\n/// Represents the delegated Roughtime ephemeral online key.\n\n///\n\npub struct OnlineKey {\n\n signer: Signer,\n\n}\n\n\n\nimpl Default for OnlineKey {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl OnlineKey {\n\n pub fn new() -> Self {\n", "file_path": "src/key/online.rs", "rank": 91, "score": 15.405958318771885 }, { "content": " let mut ciphertext_copy = ciphertext.clone();\n\n // flip some bits\n\n ciphertext_copy[i] = ciphertext[i].wrapping_add(1);\n\n\n\n let dec_result = EnvelopeEncryption::decrypt_seed(&kms, &ciphertext_copy);\n\n\n\n match dec_result.expect_err(\"Expected a KmsError error here\") {\n\n KmsError::OperationFailed(msg) => assert!(msg.contains(\"failed to decrypt\")),\n\n e => panic!(\"unexpected result {:?}\", e),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/kms/envelope.rs", "rank": 92, "score": 15.331490169242826 }, { "content": "\n\n /// Returns the length in bytes of this message's on-the-wire representation.\n\n pub fn encoded_size(&self) -> usize {\n\n let num_tags = self.tags.len();\n\n let tags_size = 4 * num_tags;\n\n let offsets_size = if num_tags < 2 { 0 } else { 4 * (num_tags - 1) };\n\n let values_size: usize = self.values.iter().map(|v| v.len()).sum();\n\n\n\n 4 + tags_size + offsets_size + values_size\n\n }\n\n\n\n /// Calculate the length of PAD value such that the final encoded size of this message\n\n /// will be at least 1KB.\n\n pub fn calculate_padding_length(&mut self) -> usize {\n\n let size = self.encoded_size();\n\n if size >= 1024 {\n\n return 0;\n\n }\n\n\n\n let mut padding_needed = 1024 - size;\n", "file_path": "src/message.rs", "rank": 93, "score": 15.302632022470409 }, { "content": "//! system (KMS). The resulting opaque encrypted \"blob\" (encrypted seed + encrypted DEK) is\n\n//! stored in the Roughenough configuration.\n\n//!\n\n//! At server start-up the KMS is used to decrypt the DEK, which is then used to (in memory)\n\n//! decrypt the seed. The seed is used to generate the\n\n//! [delegated on-line key](../key/struct.OnlineKey.html) after which the seed and DEK are erased\n\n//! from memory.\n\n//!\n\n//! See\n\n//! * [`EnvelopeEncryption`](struct.EnvelopeEncryption.html) for Roughenough's implementation.\n\n//! * [Google](https://cloud.google.com/kms/docs/envelope-encryption) or\n\n//! [Amazon](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#enveloping)\n\n//! for more in-depth explanations of envelope encryption.\n\n//!\n\n\n\nuse std;\n\n\n\nuse data_encoding;\n\nuse ring;\n\n\n", "file_path": "src/kms/mod.rs", "rank": 94, "score": 15.132062391034816 }, { "content": "//!\n\n\n\nuse rand::{FromEntropy, Rng};\n\nuse rand::distributions::Bernoulli;\n\nuse rand::rngs::SmallRng;\n\nuse rand::seq::index::sample as index_sample;\n\nuse rand::seq::SliceRandom;\n\n\n\nuse crate::grease::Pathologies::*;\n\nuse crate::RtMessage;\n\nuse crate::SIGNATURE_LENGTH;\n\nuse crate::tag::Tag;\n\n\n\n///\n\n/// Ways that a message can be made invalid.\n\n///\n\npub enum Pathologies {\n\n /// Randomly re-order the (tag, value) pairs in the message. This violates the protocol's\n\n /// requirement that tags must be in strictly increasing order.\n\n RandomlyOrderTags,\n", "file_path": "src/grease.rs", "rank": 95, "score": 15.095921039725532 }, { "content": " assert_ne!(plaintext, ciphertext);\n\n\n\n let dec_result = EnvelopeEncryption::decrypt_seed(&kms, &ciphertext);\n\n assert_eq!(dec_result.is_ok(), true);\n\n\n\n let new_plaintext = dec_result.unwrap();\n\n assert_eq!(plaintext, new_plaintext);\n\n }\n\n\n\n #[test]\n\n fn invalid_dek_length_detected() {\n\n let kms = MockKmsProvider {};\n\n let plaintext = Vec::from(\"This is the plaintext used for this test 2\");\n\n\n\n let enc_result = EnvelopeEncryption::encrypt_seed(&kms, &plaintext);\n\n assert_eq!(enc_result.is_ok(), true);\n\n\n\n let ciphertext = enc_result.unwrap();\n\n let mut ciphertext_copy = ciphertext.clone();\n\n\n", "file_path": "src/kms/envelope.rs", "rank": 96, "score": 15.03936637009204 }, { "content": " pub fn encrypt_seed(kms: &dyn KmsProvider, plaintext_seed: &[u8]) -> Result<Vec<u8>, KmsError> {\n\n // Generate random DEK and nonce\n\n let rng = SystemRandom::new();\n\n let mut raw_dek = [0u8; DEK_LEN_BYTES];\n\n let mut raw_nonce = [0u8; NONCE_LEN_BYTES];\n\n rng.fill(&mut raw_dek)?;\n\n rng.fill(&mut raw_nonce)?;\n\n\n\n // Ring will overwrite plaintext with ciphertext+tag in this buffer\n\n let mut buf = plaintext_seed.to_vec();\n\n\n\n // Encrypt the plaintext seed (in buf) using the DEK\n\n let nonce = Nonce::assume_unique_for_key(raw_nonce);\n\n let unbound_dek = UnboundKey::new(&AES_256_GCM, &raw_dek)?;\n\n let dek_seal_key = LessSafeKey::new(unbound_dek);\n\n\n\n // Output overwrites context of 'buf' and appends auth tag to 'buf'\n\n if let Err(_) = dek_seal_key.seal_in_place_append_tag(nonce, Aad::from(AD), &mut buf) {\n\n return Err(KmsError::OperationFailed(\n\n \"failed to encrypt plaintext seed\".to_string(),\n", "file_path": "src/kms/envelope.rs", "rank": 97, "score": 15.017686984316757 }, { "content": " fn handle_health_check(&mut self) {\n\n let listener = self.health_listener.as_ref().unwrap();\n\n match listener.accept() {\n\n Ok((ref mut stream, src_addr)) => {\n\n info!(\"health check from {}\", src_addr);\n\n self.stats.add_health_check(&src_addr.ip());\n\n\n\n match stream.write(HTTP_RESPONSE.as_bytes()) {\n\n Ok(_) => (),\n\n Err(e) => warn!(\"error writing health check {}\", e),\n\n };\n\n\n\n match stream.shutdown(Shutdown::Both) {\n\n Ok(_) => (),\n\n Err(e) => warn!(\"error in health check socket shutdown {}\", e),\n\n }\n\n }\n\n Err(ref e) if e.kind() == ErrorKind::WouldBlock => {\n\n debug!(\"blocking in TCP health check\");\n\n }\n", "file_path": "src/server.rs", "rank": 98, "score": 15.000561066992105 }, { "content": "\n\n /// Request did not provide versions compatible with this implementation\n\n NoCompatibleVersion,\n\n}\n\n\n\nimpl From<std::io::Error> for Error {\n\n fn from(err: std::io::Error) -> Self {\n\n Error::EncodingFailure(err)\n\n }\n\n}\n\n\n\nimpl From<KmsError> for Error {\n\n fn from(err: KmsError) -> Self {\n\n match err {\n\n KmsError::OperationFailed(m) => {\n\n Error::InvalidConfiguration(format!(\"KMS operation failed: {}\", m))\n\n }\n\n KmsError::InvalidConfiguration(m) => {\n\n Error::InvalidConfiguration(format!(\"invalid KMS config: {}\", m))\n\n }\n", "file_path": "src/error.rs", "rank": 99, "score": 14.998406067825659 } ]
Rust
src/read/elf/hash.rs
sunfishcode/object
aaf312e51fc6e4511e19a32c05d4b2ddf248b5b6
use core::mem; use crate::elf; use crate::read::{ReadError, ReadRef, Result}; use crate::{U32, U64}; use super::{FileHeader, Sym, SymbolTable, Version, VersionTable}; #[derive(Debug)] pub struct HashTable<'data, Elf: FileHeader> { buckets: &'data [U32<Elf::Endian>], chains: &'data [U32<Elf::Endian>], } impl<'data, Elf: FileHeader> HashTable<'data, Elf> { pub fn parse(endian: Elf::Endian, data: &'data [u8]) -> Result<Self> { let mut offset = 0; let header = data .read::<elf::HashHeader<Elf::Endian>>(&mut offset) .read_error("Invalid hash header")?; let buckets = data .read_slice(&mut offset, header.bucket_count.get(endian) as usize) .read_error("Invalid hash buckets")?; let chains = data .read_slice(&mut offset, header.chain_count.get(endian) as usize) .read_error("Invalid hash chains")?; Ok(HashTable { buckets, chains }) } pub fn symbol_table_length(&self) -> u32 { self.chains.len() as u32 } pub fn find<R: ReadRef<'data>>( &self, endian: Elf::Endian, name: &[u8], hash: u32, version: Option<&Version>, symbols: &SymbolTable<'data, Elf, R>, versions: &VersionTable<'data, Elf>, ) -> Option<(usize, &'data Elf::Sym)> { let mut index = self.buckets[(hash as usize) % self.buckets.len()].get(endian) as usize; let mut i = 0; let strings = symbols.strings(); while index != 0 && i < self.chains.len() { if let Ok(symbol) = symbols.symbol(index) { if symbol.name(endian, strings) == Ok(name) && versions.matches(endian, index, version) { return Some((index, symbol)); } } index = self.chains.get(index)?.get(endian) as usize; i += 1; } None } } #[derive(Debug)] pub struct GnuHashTable<'data, Elf: FileHeader> { symbol_base: u32, bloom_shift: u32, bloom_filters: &'data [u8], buckets: &'data [U32<Elf::Endian>], values: &'data [U32<Elf::Endian>], } impl<'data, Elf: FileHeader> GnuHashTable<'data, Elf> { pub fn parse(endian: Elf::Endian, data: &'data [u8]) -> Result<Self> { let mut offset = 0; let header = data .read::<elf::GnuHashHeader<Elf::Endian>>(&mut offset) .read_error("Invalid GNU hash header")?; let bloom_len = u64::from(header.bloom_count.get(endian)) * mem::size_of::<Elf::Word>() as u64; let bloom_filters = data .read_bytes(&mut offset, bloom_len) .read_error("Invalid GNU hash bloom filters")?; let buckets = data .read_slice(&mut offset, header.bucket_count.get(endian) as usize) .read_error("Invalid GNU hash buckets")?; let chain_count = (data.len() - offset as usize) / 4; let values = data .read_slice(&mut offset, chain_count) .read_error("Invalid GNU hash values")?; Ok(GnuHashTable { symbol_base: header.symbol_base.get(endian), bloom_shift: header.bloom_shift.get(endian), bloom_filters, buckets, values, }) } pub fn symbol_base(&self) -> u32 { self.symbol_base } pub fn symbol_table_length(&self, endian: Elf::Endian) -> Option<u32> { if self.symbol_base == 0 { return None; } let mut max_symbol = 0; for bucket in self.buckets { let bucket = bucket.get(endian); if max_symbol < bucket { max_symbol = bucket; } } for value in self .values .get(max_symbol.checked_sub(self.symbol_base)? as usize..)? { max_symbol += 1; if value.get(endian) & 1 != 0 { return Some(max_symbol); } } None } pub fn find<R: ReadRef<'data>>( &self, endian: Elf::Endian, name: &[u8], hash: u32, version: Option<&Version>, symbols: &SymbolTable<'data, Elf, R>, versions: &VersionTable<'data, Elf>, ) -> Option<(usize, &'data Elf::Sym)> { let word_bits = mem::size_of::<Elf::Word>() as u32 * 8; let bloom_count = self.bloom_filters.len() / mem::size_of::<Elf::Word>(); let offset = ((hash / word_bits) & (bloom_count as u32 - 1)) * mem::size_of::<Elf::Word>() as u32; let filter = if word_bits == 64 { self.bloom_filters .read_at::<U64<Elf::Endian>>(offset.into()) .ok()? .get(endian) } else { self.bloom_filters .read_at::<U32<Elf::Endian>>(offset.into()) .ok()? .get(endian) .into() }; if filter & (1 << (hash % word_bits)) == 0 { return None; } if filter & (1 << ((hash >> self.bloom_shift) % word_bits)) == 0 { return None; } let mut index = self.buckets[(hash as usize) % self.buckets.len()].get(endian) as usize; if index == 0 { return None; } let strings = symbols.strings(); let symbols = symbols.symbols().get(index..)?; let values = self .values .get(index.checked_sub(self.symbol_base as usize)?..)?; for (symbol, value) in symbols.iter().zip(values.iter()) { let value = value.get(endian); if value | 1 == hash | 1 { if symbol.name(endian, strings) == Ok(name) && versions.matches(endian, index, version) { return Some((index, symbol)); } } if value & 1 != 0 { break; } index += 1; } None } }
use core::mem; use crate::elf; use crate::read::{ReadError, ReadRef, Result}; use crate::{U32, U64}; use super::{FileHeader, Sym, SymbolTable, Version, VersionTable}; #[derive(Debug)] pub struct HashTable<'data, Elf: FileHeader> { buckets: &'data [U32<Elf::Endian>], chains: &'data [U32<Elf::Endian>], } impl<'data, Elf: FileHeader> HashTable<'data, Elf> { pub fn parse(endian: Elf::Endian, data: &'data [u8]) -> Result<Self> { let mut offset = 0; let header = data .read::<elf::HashHeader<Elf::Endian>>(&mut offset) .read_error("Invalid hash header")?; let buckets = data .read_slice(&mut offset, header.bucket_count.get(endian) as usize) .read_error("Invalid hash buckets")?; let chains = data .read_slice(&mut offset, header.chain_count.get(endian) as usize) .read_error("Invalid hash chains")?; Ok(HashTable { buckets, chains }) } pub fn symbol_table_length(&self) -> u32 { self.chains.len() as u32 } pub fn find<R: ReadRef<'data>>( &self, endian: Elf::Endian, name: &[u8], hash: u32, version: Option<&Version>, symbols: &SymbolTable<'data, Elf, R>, versions: &VersionTable<'data, Elf>, ) -> Option<(usize, &'data Elf::Sym)> { let mut index = self.buckets[(hash as usize) % self.buckets.len()].get(endian) as usize; let mut i = 0; let strings = symbols.strings(); while index != 0 && i < self.chains.len() { if let Ok(symbol) = symbols.symbol(index) { if symbol.name(endian, strings) == Ok(name) && versions.matches(endian, index, version) { return Some((index, symbol)); } } index = self.chains.get(index)?.get(endian) as usize; i += 1; } None } } #[derive(Debug)] pub struct GnuHashTable<'data, Elf: FileHeader> { symbol_base: u32, bloom_shift: u32, bloom_filters: &'data [u8], buckets: &'data [U32<Elf::Endian>], values: &'data [U32<Elf::Endian>], } impl<'data, Elf: FileHeader> GnuHashTable<'data, Elf> { pub fn parse(endian: Elf::Endian, data: &'data [u8]) -> Result<Self> { let mut offset = 0; let header = data .read::<elf::GnuHashHeader<Elf::Endian>>(&mut offset) .read_error("Invalid GNU hash header")?; let bloom_len = u64::from(header.bloom_count.get(endian)) * mem::size_of::<Elf::Word>() as u64; let bloom_filters = data .read_bytes(&mut offset, bloom_len) .read_error("Invalid GNU hash bloom filters")?; let buckets = data .read_slice(&mut offset, header.bucket_count.get(endian) as usize) .read_error("Invalid GNU hash buckets")?; let chain_count = (data.len() - offset as usize) / 4; let values = data .read_slice(&mut offset, chain_count) .read_error("Invalid GNU hash values")?; Ok(GnuHashTable { symbol_base: header.symbol_base.get(endian), bloom_shift: header.bloom_shift.get(endian), bloom_filters, buckets, values, }) } pub fn symbol_base(&self) -> u32 { self.symbol_base } pub fn symbol_table_length(&self, endian: Elf::Endian) -> Option<u32> { if self.symbol_base == 0 { return None; } let mut max_symbol = 0; for bucket in self.buckets { let bucket = bucket.get(endian); if max_symbol <
l); } } None } pub fn find<R: ReadRef<'data>>( &self, endian: Elf::Endian, name: &[u8], hash: u32, version: Option<&Version>, symbols: &SymbolTable<'data, Elf, R>, versions: &VersionTable<'data, Elf>, ) -> Option<(usize, &'data Elf::Sym)> { let word_bits = mem::size_of::<Elf::Word>() as u32 * 8; let bloom_count = self.bloom_filters.len() / mem::size_of::<Elf::Word>(); let offset = ((hash / word_bits) & (bloom_count as u32 - 1)) * mem::size_of::<Elf::Word>() as u32; let filter = if word_bits == 64 { self.bloom_filters .read_at::<U64<Elf::Endian>>(offset.into()) .ok()? .get(endian) } else { self.bloom_filters .read_at::<U32<Elf::Endian>>(offset.into()) .ok()? .get(endian) .into() }; if filter & (1 << (hash % word_bits)) == 0 { return None; } if filter & (1 << ((hash >> self.bloom_shift) % word_bits)) == 0 { return None; } let mut index = self.buckets[(hash as usize) % self.buckets.len()].get(endian) as usize; if index == 0 { return None; } let strings = symbols.strings(); let symbols = symbols.symbols().get(index..)?; let values = self .values .get(index.checked_sub(self.symbol_base as usize)?..)?; for (symbol, value) in symbols.iter().zip(values.iter()) { let value = value.get(endian); if value | 1 == hash | 1 { if symbol.name(endian, strings) == Ok(name) && versions.matches(endian, index, version) { return Some((index, symbol)); } } if value & 1 != 0 { break; } index += 1; } None } }
bucket { max_symbol = bucket; } } for value in self .values .get(max_symbol.checked_sub(self.symbol_base)? as usize..)? { max_symbol += 1; if value.get(endian) & 1 != 0 { return Some(max_symbo
function_block-random_span
[ { "content": "/// Calculate the GNU hash for a symbol name.\n\n///\n\n/// Used for `SHT_GNU_HASH`.\n\npub fn gnu_hash(name: &[u8]) -> u32 {\n\n let mut hash = 5381u32;\n\n for byte in name {\n\n hash = hash.wrapping_mul(33).wrapping_add(u32::from(*byte));\n\n }\n\n hash\n\n}\n\n\n\n// Motorola 68k specific definitions.\n\n\n\n// m68k values for `Rel*::r_type`.\n\n\n\n/// No reloc\n\npub const R_68K_NONE: u32 = 0;\n\n/// Direct 32 bit\n\npub const R_68K_32: u32 = 1;\n\n/// Direct 16 bit\n\npub const R_68K_16: u32 = 2;\n\n/// Direct 8 bit\n\npub const R_68K_8: u32 = 3;\n", "file_path": "src/elf.rs", "rank": 0, "score": 457849.2697326131 }, { "content": "/// Calculate the SysV hash for a symbol name.\n\n///\n\n/// Used for `SHT_HASH`.\n\npub fn hash(name: &[u8]) -> u32 {\n\n let mut hash = 0u32;\n\n for byte in name {\n\n hash = hash.wrapping_mul(16).wrapping_add(u32::from(*byte));\n\n hash ^= (hash >> 24) & 0xf0;\n\n }\n\n hash & 0xfff_ffff\n\n}\n\n\n\n/// Header of `SHT_GNU_HASH` section.\n\n#[derive(Debug, Clone, Copy)]\n\n#[repr(C)]\n\npub struct GnuHashHeader<E: Endian> {\n\n /// The number of hash buckets.\n\n pub bucket_count: U32<E>,\n\n /// The symbol table index of the first symbol in the hash.\n\n pub symbol_base: U32<E>,\n\n /// The number of words in the bloom filter.\n\n ///\n\n /// Must be a non-zero power of 2.\n", "file_path": "src/elf.rs", "rank": 1, "score": 447105.9927822598 }, { "content": "fn print_elf<Elf: FileHeader<Endian = Endianness>>(p: &mut Printer<'_>, elf: &Elf, data: &[u8]) {\n\n if let Some(endian) = elf.endian().print_err(p) {\n\n print_file_header(p, endian, elf);\n\n if let Some(segments) = elf.program_headers(endian, data).print_err(p) {\n\n print_program_headers(p, endian, data, elf, segments);\n\n }\n\n if let Some(sections) = elf.sections(endian, data).print_err(p) {\n\n print_section_headers(p, endian, data, elf, &sections);\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 2, "score": 377068.45356425154 }, { "content": "#[inline]\n\npub fn from_bytes_mut<T: Pod>(data: &mut [u8]) -> Result<(&mut T, &mut [u8])> {\n\n let size = mem::size_of::<T>();\n\n if size > data.len() {\n\n return Err(());\n\n }\n\n let (data, tail) = data.split_at_mut(size);\n\n let ptr = data.as_mut_ptr();\n\n if (ptr as usize) % mem::align_of::<T>() != 0 {\n\n return Err(());\n\n }\n\n // Safety:\n\n // The alignment and size are checked by this function.\n\n // The Pod trait ensures the type is valid to cast from bytes.\n\n let val = unsafe { &mut *ptr.cast() };\n\n Ok((val, tail))\n\n}\n\n\n\n/// Cast a byte slice to a slice of a `Pod` type.\n\n///\n\n/// Returns the type slice and the tail of the byte slice.\n", "file_path": "src/pod.rs", "rank": 3, "score": 369778.0266258335 }, { "content": "/// Find the optional header and read the `optional_header.magic`.\n\n///\n\n/// It can be useful to know this magic value before trying to\n\n/// fully parse the NT headers.\n\npub fn optional_header_magic<'data, R: ReadRef<'data>>(data: R) -> Result<u16> {\n\n let dos_header = pe::ImageDosHeader::parse(data)?;\n\n // NT headers are at an offset specified in the DOS header.\n\n let offset = dos_header.nt_headers_offset().into();\n\n // It doesn't matter which NT header type is used for the purpose\n\n // of reading the optional header magic.\n\n let nt_headers = data\n\n .read_at::<pe::ImageNtHeaders32>(offset)\n\n .read_error(\"Invalid NT headers offset, size, or alignment\")?;\n\n if nt_headers.signature() != pe::IMAGE_NT_SIGNATURE {\n\n return Err(Error(\"Invalid PE magic\"));\n\n }\n\n Ok(nt_headers.optional_header().magic())\n\n}\n\n\n\n/// A trait for generic access to `ImageNtHeaders32` and `ImageNtHeaders64`.\n", "file_path": "src/read/pe/file.rs", "rank": 4, "score": 366984.3382301446 }, { "content": "fn print_object_at(p: &mut Printer<'_>, data: &[u8], offset: u64) {\n\n let kind = match object::FileKind::parse_at(data, offset) {\n\n Ok(file) => file,\n\n Err(err) => {\n\n println!(\"Failed to parse file: {}\", err);\n\n return;\n\n }\n\n };\n\n match kind {\n\n object::FileKind::MachO32 => macho::print_macho32(p, data, offset),\n\n object::FileKind::MachO64 => macho::print_macho64(p, data, offset),\n\n // TODO\n\n _ => {}\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/mod.rs", "rank": 5, "score": 337345.104595638 }, { "content": "fn parse_sysv_extended_name<'data>(digits: &[u8], names: &'data [u8]) -> Result<&'data [u8], ()> {\n\n let offset = parse_u64_digits(digits, 10).ok_or(())?;\n\n let offset = offset.try_into().map_err(|_| ())?;\n\n let name_data = names.get(offset..).ok_or(())?;\n\n let name = match memchr::memchr2(b'/', b'\\0', name_data) {\n\n Some(len) => &name_data[..len],\n\n None => name_data,\n\n };\n\n Ok(name)\n\n}\n\n\n", "file_path": "src/read/archive.rs", "rank": 6, "score": 325886.3598654285 }, { "content": "// Multi-key quicksort.\n\n//\n\n// Ordering is such that if a string is a suffix of at least one other string,\n\n// then it is placed immediately after one of those strings. That is:\n\n// - comparison starts at the end of the string\n\n// - shorter strings come later\n\n//\n\n// Based on the implementation in LLVM.\n\nfn sort(mut ids: &mut [usize], mut pos: usize, strings: &IndexSet<&[u8]>) {\n\n loop {\n\n if ids.len() <= 1 {\n\n return;\n\n }\n\n\n\n let pivot = byte(ids[0], pos, strings);\n\n let mut lower = 0;\n\n let mut upper = ids.len();\n\n let mut i = 1;\n\n while i < upper {\n\n let b = byte(ids[i], pos, strings);\n\n if b > pivot {\n\n ids.swap(lower, i);\n\n lower += 1;\n\n i += 1;\n\n } else if b < pivot {\n\n upper -= 1;\n\n ids.swap(upper, i);\n\n } else {\n", "file_path": "src/write/string.rs", "rank": 7, "score": 325411.50905295176 }, { "content": "#[inline]\n\npub fn slice_from_bytes<T: Pod>(data: &[u8], count: usize) -> Result<(&[T], &[u8])> {\n\n let size = count.checked_mul(mem::size_of::<T>()).ok_or(())?;\n\n let tail = data.get(size..).ok_or(())?;\n\n let ptr = data.as_ptr();\n\n if (ptr as usize) % mem::align_of::<T>() != 0 {\n\n return Err(());\n\n }\n\n // Safety:\n\n // The alignment and size are checked by this function.\n\n // The Pod trait ensures the type is valid to cast from bytes.\n\n let slice = unsafe { slice::from_raw_parts(ptr.cast(), count) };\n\n Ok((slice, tail))\n\n}\n\n\n\n/// Cast a mutable byte slice to a slice of a `Pod` type.\n\n///\n\n/// Returns the type slice and the tail of the byte slice.\n", "file_path": "src/pod.rs", "rank": 8, "score": 325259.0808606861 }, { "content": "#[inline]\n\npub fn from_bytes<T: Pod>(data: &[u8]) -> Result<(&T, &[u8])> {\n\n let size = mem::size_of::<T>();\n\n let tail = data.get(size..).ok_or(())?;\n\n let ptr = data.as_ptr();\n\n if (ptr as usize) % mem::align_of::<T>() != 0 {\n\n return Err(());\n\n }\n\n // Safety:\n\n // The alignment and size are checked by this function.\n\n // The Pod trait ensures the type is valid to cast from bytes.\n\n let val = unsafe { &*ptr.cast() };\n\n Ok((val, tail))\n\n}\n\n\n\n/// Cast a mutable byte slice to a `Pod` type.\n\n///\n\n/// Returns the type and the tail of the slice.\n", "file_path": "src/pod.rs", "rank": 9, "score": 310754.0350728393 }, { "content": "fn print_file_header<Elf: FileHeader>(p: &mut Printer<'_>, endian: Elf::Endian, elf: &Elf) {\n\n p.group(\"FileHeader\", |p| {\n\n p.group(\"Ident\", |p| print_ident(p, elf.e_ident()));\n\n p.field_enum(\"Type\", elf.e_type(endian), &FLAGS_ET);\n\n p.field_enum(\"Machine\", elf.e_machine(endian), &FLAGS_EM);\n\n let version = elf.e_version(endian);\n\n if version < 256 {\n\n p.field_enum(\"Version\", version as u8, &FLAGS_EV);\n\n } else {\n\n p.field_hex(\"Version\", version);\n\n }\n\n p.field_enum(\"Type\", elf.e_type(endian), &FLAGS_ET);\n\n p.field_hex(\"Entry\", elf.e_entry(endian).into());\n\n p.field_hex(\"ProgramHeaderOffset\", elf.e_phoff(endian).into());\n\n p.field_hex(\"SectionHeaderOffset\", elf.e_shoff(endian).into());\n\n let flags = elf.e_flags(endian);\n\n p.field_hex(\"Flags\", flags);\n\n match elf.e_machine(endian) {\n\n EM_SPARC => p.flags(flags, 0, &FLAGS_EF_SPARC),\n\n EM_SPARCV9 => p.flags(flags, 0, &FLAGS_EF_SPARCV9),\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 10, "score": 307416.4899311814 }, { "content": "fn find_member(member_names: &mut [(String, bool)], name: &[u8]) -> bool {\n\n if member_names.is_empty() {\n\n return true;\n\n }\n\n match member_names.iter().position(|x| x.0.as_bytes() == name) {\n\n Some(i) => {\n\n member_names[i].1 = true;\n\n true\n\n }\n\n None => false,\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/objdump.rs", "rank": 11, "score": 294925.17773745663 }, { "content": "fn rel_flag_type<Elf: FileHeader>(endian: Elf::Endian, elf: &Elf) -> &'static [Flag<u32>] {\n\n match elf.e_machine(endian) {\n\n EM_68K => FLAGS_R_68K,\n\n EM_386 => FLAGS_R_386,\n\n EM_SPARC => FLAGS_R_SPARC,\n\n EM_MIPS => FLAGS_R_MIPS,\n\n EM_PARISC => FLAGS_R_PARISC,\n\n EM_ALPHA => FLAGS_R_ALPHA,\n\n EM_PPC => FLAGS_R_PPC,\n\n EM_PPC64 => FLAGS_R_PPC64,\n\n EM_AARCH64 => FLAGS_R_AARCH64,\n\n EM_ARM => FLAGS_R_ARM,\n\n EM_CSKY => FLAGS_R_CKCORE,\n\n EM_IA_64 => FLAGS_R_IA64,\n\n EM_SH => FLAGS_R_SH,\n\n EM_S390 => FLAGS_R_390,\n\n EM_CRIS => FLAGS_R_CRIS,\n\n EM_X86_64 => FLAGS_R_X86_64,\n\n EM_MN10300 => FLAGS_R_MN10300,\n\n EM_M32R => FLAGS_R_M32R,\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 12, "score": 294589.64894909906 }, { "content": "fn print_rel_symbol<'data, Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n symbols: Option<SymbolTable<'data, Elf>>,\n\n sym: u32,\n\n) {\n\n let name = symbols.and_then(|symbols| {\n\n symbols\n\n .symbol(sym as usize)\n\n .and_then(|symbol| symbol.name(endian, symbols.strings()))\n\n .print_err(p)\n\n });\n\n p.field_string_option(\"Symbol\", sym, name);\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 13, "score": 276827.9655994783 }, { "content": "// JamCRC\n\nfn checksum(data: &[u8]) -> u32 {\n\n let mut hasher = crc32fast::Hasher::new_with_initial(0xffff_ffff);\n\n hasher.update(data);\n\n !hasher.finalize()\n\n}\n", "file_path": "src/write/coff.rs", "rank": 14, "score": 276009.2438576237 }, { "content": "fn byte(id: usize, pos: usize, strings: &IndexSet<&[u8]>) -> u8 {\n\n let string = strings.get_index(id).unwrap();\n\n let len = string.len();\n\n if len >= pos {\n\n string[len - pos]\n\n } else {\n\n // We know the strings don't contain null bytes.\n\n 0\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn string_table() {\n\n let mut table = StringTable::default();\n\n let id0 = table.add(b\"\");\n\n let id1 = table.add(b\"foo\");\n", "file_path": "src/write/string.rs", "rank": 15, "score": 272267.34486359457 }, { "content": "/// Modifies `data` to start after the extended name.\n\nfn parse_bsd_extended_name<'data, R: ReadRef<'data>>(\n\n digits: &[u8],\n\n data: R,\n\n offset: &mut u64,\n\n size: &mut u64,\n\n) -> Result<&'data [u8], ()> {\n\n let len = parse_u64_digits(digits, 10).ok_or(())?;\n\n *size = size.checked_sub(len).ok_or(())?;\n\n let name_data = data.read_bytes(offset, len)?;\n\n let name = match memchr::memchr(b'\\0', name_data) {\n\n Some(len) => &name_data[..len],\n\n None => name_data,\n\n };\n\n Ok(name)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/read/archive.rs", "rank": 16, "score": 269351.76576054026 }, { "content": "fn dump_object<W: Write, E: Write>(w: &mut W, e: &mut E, data: &[u8]) -> Result<()> {\n\n match object::File::parse(data) {\n\n Ok(file) => {\n\n dump_parsed_object(w, e, &file)?;\n\n }\n\n Err(err) => {\n\n writeln!(e, \"Failed to parse file: {}\", err)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/examples/src/objdump.rs", "rank": 17, "score": 265849.2867492222 }, { "content": "fn print_gnu_hash<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n _sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some(hash)) = section.gnu_hash_header(endian, data).print_err(p) {\n\n p.group(\"GnuHash\", |p| {\n\n p.field(\"BucketCount\", hash.bucket_count.get(endian));\n\n p.field(\"SymbolBase\", hash.symbol_base.get(endian));\n\n p.field(\"BloomCount\", hash.bloom_count.get(endian));\n\n p.field(\"BloomShift\", hash.bloom_shift.get(endian));\n\n });\n\n }\n\n /* TODO: add this in a test somewhere\n\n if let Ok(Some((hash_table, link))) = section.gnu_hash(endian, data) {\n\n if let Ok(symbols) = _sections.symbol_table_by_index(endian, data, link) {\n\n if let Ok(versions) = _sections.versions(endian, data) {\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 18, "score": 260633.967568488 }, { "content": "#[inline]\n\npub fn bytes_of_mut<T: Pod>(val: &mut T) -> &mut [u8] {\n\n let size = mem::size_of::<T>();\n\n // Safety:\n\n // Any alignment is allowed.\n\n // The size is determined in this function.\n\n // The Pod trait ensures the type is valid to cast to bytes.\n\n unsafe { slice::from_raw_parts_mut(slice::from_mut(val).as_mut_ptr().cast(), size) }\n\n}\n\n\n\n/// Cast a slice of a `Pod` type to a byte slice.\n", "file_path": "src/pod.rs", "rank": 19, "score": 255103.8613870678 }, { "content": "fn print_pe<Pe: ImageNtHeaders>(p: &mut Printer<'_>, data: &[u8]) {\n\n if let Some(dos_header) = ImageDosHeader::parse(data).print_err(p) {\n\n p.group(\"ImageDosHeader\", |p| {\n\n p.field_hex(\"Magic\", dos_header.e_magic.get(LE));\n\n p.field_hex(\"CountBytesLastPage\", dos_header.e_cblp.get(LE));\n\n p.field_hex(\"CountPages\", dos_header.e_cp.get(LE));\n\n p.field_hex(\"CountRelocations\", dos_header.e_crlc.get(LE));\n\n p.field_hex(\"CountHeaderParagraphs\", dos_header.e_cparhdr.get(LE));\n\n p.field_hex(\"MinAllocParagraphs\", dos_header.e_minalloc.get(LE));\n\n p.field_hex(\"MaxAllocParagraphs\", dos_header.e_maxalloc.get(LE));\n\n p.field_hex(\"StackSegment\", dos_header.e_ss.get(LE));\n\n p.field_hex(\"StackPointer\", dos_header.e_sp.get(LE));\n\n p.field_hex(\"Checksum\", dos_header.e_csum.get(LE));\n\n p.field_hex(\"InstructionPointer\", dos_header.e_ip.get(LE));\n\n p.field_hex(\"CodeSegment\", dos_header.e_cs.get(LE));\n\n p.field_hex(\"AddressOfRelocations\", dos_header.e_lfarlc.get(LE));\n\n p.field_hex(\"OverlayNumber\", dos_header.e_ovno.get(LE));\n\n p.field_hex(\"OemId\", dos_header.e_oemid.get(LE));\n\n p.field_hex(\"OemInfo\", dos_header.e_oeminfo.get(LE));\n\n p.field_hex(\"AddressOfNewHeader\", dos_header.e_lfanew.get(LE));\n", "file_path": "crates/examples/src/readobj/pe.rs", "rank": 20, "score": 252949.6590471919 }, { "content": "#[inline]\n\npub fn bytes_of_slice_mut<T: Pod>(val: &mut [T]) -> &mut [u8] {\n\n let size = val.len().wrapping_mul(mem::size_of::<T>());\n\n // Safety:\n\n // Any alignment is allowed.\n\n // The size is determined in this function.\n\n // The Pod trait ensures the type is valid to cast to bytes.\n\n unsafe { slice::from_raw_parts_mut(val.as_mut_ptr().cast(), size) }\n\n}\n\n\n\nmacro_rules! unsafe_impl_pod {\n\n ($($struct_name:ident),+ $(,)?) => {\n\n $(\n\n unsafe impl Pod for $struct_name { }\n\n )+\n\n }\n\n}\n\n\n\nunsafe_impl_pod!(u8, u16, u32, u64);\n\n\n\n#[cfg(test)]\n", "file_path": "src/pod.rs", "rank": 21, "score": 251646.38411224203 }, { "content": "// Ignores bytes starting from the first space.\n\nfn parse_u64_digits(digits: &[u8], radix: u32) -> Option<u64> {\n\n if let [b' ', ..] = digits {\n\n return None;\n\n }\n\n let mut result: u64 = 0;\n\n for &c in digits {\n\n if c == b' ' {\n\n return Some(result);\n\n } else {\n\n let x = (c as char).to_digit(radix)?;\n\n result = result\n\n .checked_mul(u64::from(radix))?\n\n .checked_add(u64::from(x))?;\n\n }\n\n }\n\n Some(result)\n\n}\n\n\n", "file_path": "src/read/archive.rs", "rank": 22, "score": 249910.74978341634 }, { "content": "fn print_mach_header<Mach: MachHeader>(p: &mut Printer<'_>, endian: Mach::Endian, header: &Mach) {\n\n p.group(\"MachHeader\", |p| {\n\n p.field_hex(\"Magic\", header.magic().to_be());\n\n print_cputype(p, header.cputype(endian), header.cpusubtype(endian));\n\n p.field_enum(\"FileType\", header.filetype(endian), FLAGS_MH_FILETYPE);\n\n p.field(\"NumberOfCmds\", header.ncmds(endian));\n\n p.field_hex(\"SizeOfCmds\", header.sizeofcmds(endian));\n\n p.field_enum(\"Flags\", header.flags(endian), FLAGS_MH);\n\n });\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/macho.rs", "rank": 23, "score": 240041.98551592723 }, { "content": "fn print_archive(p: &mut Printer<'_>, data: &[u8]) {\n\n if let Some(archive) = ArchiveFile::parse(data).print_err(p) {\n\n p.field(\"Format\", format!(\"Archive ({:?})\", archive.kind()));\n\n for member in archive.members() {\n\n if let Some(member) = member.print_err(p) {\n\n p.blank();\n\n p.field(\"Member\", String::from_utf8_lossy(member.name()));\n\n if let Some(data) = member.data(data).print_err(p) {\n\n print_object(p, data);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/mod.rs", "rank": 24, "score": 239966.57031160692 }, { "content": "fn print_object(p: &mut Printer<'_>, data: &[u8]) {\n\n let kind = match object::FileKind::parse(data) {\n\n Ok(file) => file,\n\n Err(err) => {\n\n println!(\"Failed to parse file: {}\", err);\n\n return;\n\n }\n\n };\n\n match kind {\n\n object::FileKind::Archive => print_archive(p, data),\n\n object::FileKind::Coff => pe::print_coff(p, data),\n\n object::FileKind::DyldCache => macho::print_dyld_cache(p, data),\n\n object::FileKind::Elf32 => elf::print_elf32(p, data),\n\n object::FileKind::Elf64 => elf::print_elf64(p, data),\n\n object::FileKind::MachO32 => macho::print_macho32(p, data, 0),\n\n object::FileKind::MachO64 => macho::print_macho64(p, data, 0),\n\n object::FileKind::MachOFat32 => macho::print_macho_fat32(p, data),\n\n object::FileKind::MachOFat64 => macho::print_macho_fat64(p, data),\n\n object::FileKind::Pe32 => pe::print_pe32(p, data),\n\n object::FileKind::Pe64 => pe::print_pe64(p, data),\n\n // TODO\n\n _ => {}\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/mod.rs", "rank": 25, "score": 239966.57031160692 }, { "content": "fn print_hash<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n _sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some(hash)) = section.hash_header(endian, data).print_err(p) {\n\n p.group(\"Hash\", |p| {\n\n p.field(\"BucketCount\", hash.bucket_count.get(endian));\n\n p.field(\"ChainCount\", hash.chain_count.get(endian));\n\n });\n\n }\n\n /* TODO: add this in a test somewhere\n\n if let Ok(Some((hash_table, link))) = section.hash(endian, data) {\n\n if let Ok(symbols) = _sections.symbol_table_by_index(endian, data, link) {\n\n if let Ok(versions) = _sections.versions(endian, data) {\n\n for (index, symbol) in symbols.symbols().iter().enumerate() {\n\n let name = symbols.symbol_name(endian, symbol).unwrap();\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 26, "score": 224627.02315400602 }, { "content": "fn print_version<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n versions: Option<&VersionTable<Elf>>,\n\n version_index: VersionIndex,\n\n) {\n\n match versions.and_then(|versions| versions.version(version_index).print_err(p)) {\n\n Some(Some(version)) => {\n\n p.field_string_option(\"Version\", version_index.0, Some(version.name()))\n\n }\n\n _ => p.field_enum(\"Version\", version_index.0, FLAGS_VER_NDX),\n\n }\n\n p.flags(version_index.0, 0, FLAGS_VERSYM);\n\n}\n\n\n\nstatic FLAGS_EI_CLASS: &[Flag<u8>] = &flags!(ELFCLASSNONE, ELFCLASS32, ELFCLASS64);\n\nstatic FLAGS_EI_DATA: &[Flag<u8>] = &flags!(ELFDATANONE, ELFDATA2LSB, ELFDATA2MSB);\n\nstatic FLAGS_EV: &[Flag<u8>] = &flags!(EV_NONE, EV_CURRENT);\n\nstatic FLAGS_EI_OSABI: &[Flag<u8>] = &flags!(\n\n ELFOSABI_SYSV,\n\n ELFOSABI_HPUX,\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 27, "score": 224609.54311881695 }, { "content": "pub fn print(w: &'_ mut dyn Write, e: &'_ mut dyn Write, file: &[u8]) {\n\n let mut printer = Printer::new(w, e);\n\n print_object(&mut printer, &*file);\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/mod.rs", "rank": 28, "score": 222528.19473344937 }, { "content": "fn print_gnu_verneed<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((mut verneeds, link))) = section.gnu_verneed(endian, data).print_err(p) {\n\n let strings = sections.strings(endian, data, link).unwrap_or_default();\n\n while let Some(Some((verneed, mut vernauxs))) = verneeds.next().print_err(p) {\n\n p.group(\"VersionNeed\", |p| {\n\n p.field(\"Version\", verneed.vn_version.get(endian));\n\n p.field(\"AuxCount\", verneed.vn_cnt.get(endian));\n\n p.field_string(\n\n \"Filename\",\n\n verneed.vn_file.get(endian),\n\n verneed.file(endian, strings),\n\n );\n\n p.field(\"AuxOffset\", verneed.vn_aux.get(endian));\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 29, "score": 220777.35480574917 }, { "content": "fn print_gnu_versym<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((syms, _link))) = section.gnu_versym(endian, data).print_err(p) {\n\n let versions = sections.versions(endian, data).print_err(p).flatten();\n\n for (index, sym) in syms.iter().enumerate() {\n\n let version_index = VersionIndex(sym.0.get(endian));\n\n p.group(\"VersionSymbol\", |p| {\n\n p.field(\"Index\", index);\n\n print_version(p, versions.as_ref(), version_index);\n\n });\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 30, "score": 220777.35480574917 }, { "content": "fn print_gnu_verdef<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((mut verdefs, link))) = section.gnu_verdef(endian, data).print_err(p) {\n\n let strings = sections.strings(endian, data, link).unwrap_or_default();\n\n while let Some(Some((verdef, mut verdauxs))) = verdefs.next().print_err(p) {\n\n p.group(\"VersionDefinition\", |p| {\n\n p.field(\"Version\", verdef.vd_version.get(endian));\n\n p.field_hex(\"Flags\", verdef.vd_flags.get(endian));\n\n p.flags(verdef.vd_flags.get(endian), 0, FLAGS_VER_FLG);\n\n p.field(\"Index\", verdef.vd_ndx.get(endian));\n\n p.field(\"AuxCount\", verdef.vd_cnt.get(endian));\n\n p.field_hex(\"Hash\", verdef.vd_hash.get(endian));\n\n p.field(\"AuxOffset\", verdef.vd_aux.get(endian));\n\n p.field(\"NextOffset\", verdef.vd_next.get(endian));\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 31, "score": 220777.35480574917 }, { "content": "fn print_section_symbols<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section_index: SectionIndex,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some(symbols)) = section\n\n .symbols(endian, data, sections, section_index)\n\n .print_err(p)\n\n {\n\n let versions = if section.sh_type(endian) == SHT_DYNSYM {\n\n sections.versions(endian, data).print_err(p).flatten()\n\n } else {\n\n None\n\n };\n\n let os_stt = match elf.e_ident().os_abi {\n\n ELFOSABI_GNU => FLAGS_STT_GNU,\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 32, "score": 220562.50869487342 }, { "content": "#[allow(missing_docs)]\n\npub trait Sym: Debug + Pod {\n\n type Word: Into<u64>;\n\n type Endian: endian::Endian;\n\n\n\n fn st_name(&self, endian: Self::Endian) -> u32;\n\n fn st_info(&self) -> u8;\n\n fn st_bind(&self) -> u8;\n\n fn st_type(&self) -> u8;\n\n fn st_other(&self) -> u8;\n\n fn st_visibility(&self) -> u8;\n\n fn st_shndx(&self, endian: Self::Endian) -> u16;\n\n fn st_value(&self, endian: Self::Endian) -> Self::Word;\n\n fn st_size(&self, endian: Self::Endian) -> Self::Word;\n\n\n\n /// Parse the symbol name from the string table.\n\n fn name<'data, R: ReadRef<'data>>(\n\n &self,\n\n endian: Self::Endian,\n\n strings: StringTable<'data, R>,\n\n ) -> read::Result<&'data [u8]> {\n", "file_path": "src/read/elf/symbol.rs", "rank": 33, "score": 220003.02433040418 }, { "content": "// Only for Debug impl of `Bytes`.\n\nfn debug_list_bytes(bytes: &[u8], fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let mut list = fmt.debug_list();\n\n list.entries(bytes.iter().take(8).copied().map(DebugByte));\n\n if bytes.len() > 8 {\n\n list.entry(&DebugLen(bytes.len()));\n\n }\n\n list.finish()\n\n}\n\n\n", "file_path": "src/read/util.rs", "rank": 34, "score": 216935.76255675702 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct SymbolOffsets {\n\n index: SymbolIndex,\n\n str_id: Option<StringId>,\n\n}\n\n\n\nimpl<'a> Object<'a> {\n\n pub(crate) fn elf_section_info(\n\n &self,\n\n section: StandardSection,\n\n ) -> (&'static [u8], &'static [u8], SectionKind) {\n\n match section {\n\n StandardSection::Text => (&[], &b\".text\"[..], SectionKind::Text),\n\n StandardSection::Data => (&[], &b\".data\"[..], SectionKind::Data),\n\n StandardSection::ReadOnlyData | StandardSection::ReadOnlyString => {\n\n (&[], &b\".rodata\"[..], SectionKind::ReadOnlyData)\n\n }\n\n StandardSection::ReadOnlyDataWithRel => (&[], b\".data.rel.ro\", SectionKind::Data),\n\n StandardSection::UninitializedData => {\n\n (&[], &b\".bss\"[..], SectionKind::UninitializedData)\n\n }\n", "file_path": "src/write/elf/object.rs", "rank": 35, "score": 211693.50703931542 }, { "content": "fn print_program_headers<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n segments: &[Elf::ProgramHeader],\n\n) {\n\n for segment in segments {\n\n p.group(\"ProgramHeader\", |p| {\n\n let proc = match elf.e_machine(endian) {\n\n EM_MIPS => FLAGS_PT_MIPS,\n\n EM_PARISC => FLAGS_PT_PARISC,\n\n EM_ARM => FLAGS_PT_ARM,\n\n EM_IA_64 => FLAGS_PT_IA_64,\n\n _ => &[],\n\n };\n\n let os = match elf.e_ident().os_abi {\n\n ELFOSABI_HPUX => FLAGS_PT_HP,\n\n _ => &[],\n\n };\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 36, "score": 193475.13363837203 }, { "content": "fn print_section_headers<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n) {\n\n for (index, section) in sections.iter().enumerate() {\n\n let index = SectionIndex(index);\n\n p.group(\"SectionHeader\", |p| {\n\n p.field(\"Index\", index.0);\n\n p.field_string(\n\n \"Name\",\n\n section.sh_name(endian),\n\n sections.section_name(endian, section),\n\n );\n\n\n\n let proc = match elf.e_machine(endian) {\n\n EM_MIPS => FLAGS_SHT_MIPS,\n\n EM_PARISC => FLAGS_SHT_PARISC,\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 37, "score": 193475.13363837203 }, { "content": "#[derive(Clone, Debug)]\n\nstruct WasmSymbolInternal<'data> {\n\n name: &'data str,\n\n address: u64,\n\n size: u64,\n\n kind: SymbolKind,\n\n section: SymbolSection,\n\n scope: SymbolScope,\n\n}\n\n\n\nimpl<'data, 'file> read::private::Sealed for WasmSymbol<'data, 'file> {}\n\n\n\nimpl<'data, 'file> ObjectSymbol<'data> for WasmSymbol<'data, 'file> {\n\n #[inline]\n\n fn index(&self) -> SymbolIndex {\n\n self.index\n\n }\n\n\n\n #[inline]\n\n fn name_bytes(&self) -> read::Result<&'data [u8]> {\n\n Ok(self.symbol.name.as_bytes())\n", "file_path": "src/read/wasm.rs", "rank": 38, "score": 191813.211116139 }, { "content": "fn parse_ordinal(digits: &[u8]) -> Option<u32> {\n\n if digits.is_empty() {\n\n return None;\n\n }\n\n let mut result: u32 = 0;\n\n for &c in digits {\n\n let x = (c as char).to_digit(10)?;\n\n result = result.checked_mul(10)?.checked_add(x)?;\n\n }\n\n Some(result)\n\n}\n", "file_path": "src/read/pe/export.rs", "rank": 39, "score": 191467.0357393294 }, { "content": "fn print_cputype(p: &mut Printer<'_>, cputype: u32, cpusubtype: u32) {\n\n let proc = match cputype {\n\n CPU_TYPE_ANY => FLAGS_CPU_SUBTYPE_ANY,\n\n CPU_TYPE_VAX => FLAGS_CPU_SUBTYPE_VAX,\n\n CPU_TYPE_MC680X0 => FLAGS_CPU_SUBTYPE_MC680X0,\n\n CPU_TYPE_X86 => FLAGS_CPU_SUBTYPE_X86,\n\n CPU_TYPE_X86_64 => FLAGS_CPU_SUBTYPE_X86_64,\n\n CPU_TYPE_MIPS => FLAGS_CPU_SUBTYPE_MIPS,\n\n CPU_TYPE_MC98000 => FLAGS_CPU_SUBTYPE_MC98000,\n\n CPU_TYPE_HPPA => FLAGS_CPU_SUBTYPE_HPPA,\n\n CPU_TYPE_ARM => FLAGS_CPU_SUBTYPE_ARM,\n\n CPU_TYPE_ARM64 => FLAGS_CPU_SUBTYPE_ARM64,\n\n CPU_TYPE_ARM64_32 => FLAGS_CPU_SUBTYPE_ARM64_32,\n\n CPU_TYPE_MC88000 => FLAGS_CPU_SUBTYPE_MC88000,\n\n CPU_TYPE_SPARC => FLAGS_CPU_SUBTYPE_SPARC,\n\n CPU_TYPE_I860 => FLAGS_CPU_SUBTYPE_I860,\n\n CPU_TYPE_POWERPC | CPU_TYPE_POWERPC64 => FLAGS_CPU_SUBTYPE_POWERPC,\n\n _ => &[],\n\n };\n\n p.field_enum(\"CpuType\", cputype, FLAGS_CPU_TYPE);\n", "file_path": "crates/examples/src/readobj/macho.rs", "rank": 40, "score": 189716.71859798458 }, { "content": "fn print_file(p: &mut Printer<'_>, header: &ImageFileHeader) {\n\n p.group(\"ImageFileHeader\", |p| {\n\n p.field_enum(\"Machine\", header.machine.get(LE), FLAGS_IMAGE_FILE_MACHINE);\n\n p.field(\"NumberOfSections\", header.number_of_sections.get(LE));\n\n p.field(\"TimeDateStamp\", header.time_date_stamp.get(LE));\n\n p.field_hex(\n\n \"PointerToSymbolTable\",\n\n header.pointer_to_symbol_table.get(LE),\n\n );\n\n p.field(\"NumberOfSymbols\", header.number_of_symbols.get(LE));\n\n p.field_hex(\n\n \"SizeOfOptionalHeader\",\n\n header.size_of_optional_header.get(LE),\n\n );\n\n p.field_hex(\"Characteristics\", header.characteristics.get(LE));\n\n p.flags(header.characteristics.get(LE), 0, FLAGS_IMAGE_FILE);\n\n });\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/pe.rs", "rank": 41, "score": 187289.36584175064 }, { "content": "fn parse_relocation<Elf: FileHeader>(\n\n header: &Elf,\n\n endian: Elf::Endian,\n\n reloc: Elf::Rela,\n\n implicit_addend: bool,\n\n) -> Relocation {\n\n let mut encoding = RelocationEncoding::Generic;\n\n let is_mips64el = header.is_mips64el(endian);\n\n let (kind, size) = match header.e_machine(endian) {\n\n elf::EM_AARCH64 => match reloc.r_type(endian, false) {\n\n elf::R_AARCH64_ABS64 => (RelocationKind::Absolute, 64),\n\n elf::R_AARCH64_ABS32 => (RelocationKind::Absolute, 32),\n\n elf::R_AARCH64_ABS16 => (RelocationKind::Absolute, 16),\n\n elf::R_AARCH64_PREL64 => (RelocationKind::Relative, 64),\n\n elf::R_AARCH64_PREL32 => (RelocationKind::Relative, 32),\n\n elf::R_AARCH64_PREL16 => (RelocationKind::Relative, 16),\n\n elf::R_AARCH64_CALL26 => {\n\n encoding = RelocationEncoding::AArch64Call;\n\n (RelocationKind::PltRelative, 26)\n\n }\n", "file_path": "src/read/elf/relocation.rs", "rank": 42, "score": 186736.64671381464 }, { "content": "#[inline]\n\npub fn bytes_of<T: Pod>(val: &T) -> &[u8] {\n\n let size = mem::size_of::<T>();\n\n // Safety:\n\n // Any alignment is allowed.\n\n // The size is determined in this function.\n\n // The Pod trait ensures the type is valid to cast to bytes.\n\n unsafe { slice::from_raw_parts(slice::from_ref(val).as_ptr().cast(), size) }\n\n}\n\n\n\n/// Cast a `Pod` type to a mutable byte slice.\n", "file_path": "src/pod.rs", "rank": 43, "score": 185488.74966241745 }, { "content": "fn print_notes<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n mut notes: NoteIterator<Elf>,\n\n) {\n\n while let Some(Some(note)) = notes.next().print_err(p) {\n\n p.group(\"Note\", |p| {\n\n let name = note.name();\n\n p.field_string_option(\"Name\", note.n_namesz(endian), Some(name));\n\n let flags = if name == ELF_NOTE_CORE || name == ELF_NOTE_LINUX {\n\n FLAGS_NT_CORE\n\n } else if name == ELF_NOTE_SOLARIS {\n\n FLAGS_NT_SOLARIS\n\n } else if name == ELF_NOTE_GNU {\n\n FLAGS_NT_GNU\n\n } else {\n\n // TODO: NT_VERSION\n\n &[]\n\n };\n\n p.field_enum(\"Type\", note.n_type(endian), flags);\n\n // TODO: interpret desc\n\n p.field_bytes(\"Desc\", note.desc());\n\n });\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 44, "score": 183750.44569393288 }, { "content": "fn print_dynamic<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n elf: &Elf,\n\n dynamic: &[Elf::Dyn],\n\n dynstr: StringTable,\n\n) {\n\n let proc = match elf.e_machine(endian) {\n\n EM_SPARC => FLAGS_DT_SPARC,\n\n EM_MIPS => FLAGS_DT_MIPS,\n\n EM_ALPHA => FLAGS_DT_ALPHA,\n\n EM_PPC => FLAGS_DT_PPC,\n\n EM_PPC64 => FLAGS_DT_PPC64,\n\n EM_IA_64 => FLAGS_DT_IA_64,\n\n EM_ALTERA_NIOS2 => FLAGS_DT_NIOS2,\n\n _ => &[],\n\n };\n\n for d in dynamic {\n\n let tag = d.d_tag(endian).into();\n\n let val = d.d_val(endian).into();\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 45, "score": 183750.44569393288 }, { "content": "enum SymbolInternal<'data, 'file, R>\n\nwhere\n\n 'data: 'file,\n\n R: ReadRef<'data>,\n\n{\n\n #[cfg(feature = \"coff\")]\n\n Coff((coff::CoffSymbol<'data, 'file, R>, PhantomData<R>)),\n\n #[cfg(feature = \"elf\")]\n\n Elf32(\n\n (\n\n elf::ElfSymbol32<'data, 'file, Endianness, R>,\n\n PhantomData<R>,\n\n ),\n\n ),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(\n\n (\n\n elf::ElfSymbol64<'data, 'file, Endianness, R>,\n\n PhantomData<R>,\n\n ),\n", "file_path": "src/read/any.rs", "rank": 46, "score": 183088.27647468843 }, { "content": "#[inline]\n\npub fn bytes_of_slice<T: Pod>(val: &[T]) -> &[u8] {\n\n let size = val.len().wrapping_mul(mem::size_of::<T>());\n\n // Safety:\n\n // Any alignment is allowed.\n\n // The size is determined in this function.\n\n // The Pod trait ensures the type is valid to cast to bytes.\n\n unsafe { slice::from_raw_parts(val.as_ptr().cast(), size) }\n\n}\n\n\n\n/// Cast a slice of a `Pod` type to a mutable byte slice.\n", "file_path": "src/pod.rs", "rank": 47, "score": 182275.69241439167 }, { "content": "fn print_optional(p: &mut Printer<'_>, header: &impl ImageOptionalHeader) {\n\n p.group(\"ImageOptionalHeader\", |p| {\n\n p.field_hex(\"Magic\", header.magic());\n\n p.field(\"MajorLinkerVersion\", header.major_linker_version());\n\n p.field(\"MinorLinkerVersion\", header.minor_linker_version());\n\n p.field_hex(\"SizeOfCode\", header.size_of_code());\n\n p.field_hex(\"SizeOfInitializedData\", header.size_of_initialized_data());\n\n p.field_hex(\n\n \"SizeOfUninitializedData\",\n\n header.size_of_uninitialized_data(),\n\n );\n\n p.field_hex(\"AddressOfEntryPoint\", header.address_of_entry_point());\n\n p.field_hex(\"BaseOfCode\", header.base_of_code());\n\n p.field_hex(\"ImageBase\", header.image_base());\n\n p.field_hex(\"SectionAlignment\", header.section_alignment());\n\n p.field(\n\n \"MajorOperatingSystemVersion\",\n\n header.major_operating_system_version(),\n\n );\n\n p.field(\n", "file_path": "crates/examples/src/readobj/pe.rs", "rank": 48, "score": 181252.71894957032 }, { "content": "fn print_section_rel<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((relocations, link))) = section.rel(endian, data).print_err(p) {\n\n let symbols = sections\n\n .symbol_table_by_index(endian, data, link)\n\n .print_err(p);\n\n let proc = rel_flag_type(endian, elf);\n\n for relocation in relocations {\n\n p.group(\"Relocation\", |p| {\n\n p.field_hex(\"Offset\", relocation.r_offset(endian).into());\n\n p.field_enum(\"Type\", relocation.r_type(endian), proc);\n\n let sym = relocation.r_sym(endian);\n\n print_rel_symbol(p, endian, symbols, sym);\n\n });\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 49, "score": 180898.83353741473 }, { "content": "fn print_section_rela<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((relocations, link))) = section.rela(endian, data).print_err(p) {\n\n let symbols = sections\n\n .symbol_table_by_index(endian, data, link)\n\n .print_err(p);\n\n let proc = rel_flag_type(endian, elf);\n\n for relocation in relocations {\n\n p.group(\"Relocation\", |p| {\n\n p.field_hex(\"Offset\", relocation.r_offset(endian).into());\n\n p.field_enum(\n\n \"Type\",\n\n relocation.r_type(endian, elf.is_mips64el(endian)),\n\n proc,\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 50, "score": 180898.83353741473 }, { "content": "fn print_section_group<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((flag, members))) = section.group(endian, data).print_err(p) {\n\n p.field_enum(\"GroupFlag\", flag, FLAGS_GRP);\n\n p.group(\"GroupSections\", |p| {\n\n for member in members {\n\n let index = member.get(endian);\n\n p.print_indent();\n\n if let Some(section) = sections.section(SectionIndex(index as usize)).print_err(p) {\n\n if let Some(name) = sections.section_name(endian, section).print_err(p) {\n\n p.print_string(name);\n\n writeln!(p.w, \" ({})\", index).unwrap();\n\n } else {\n\n writeln!(p.w, \"{}\", index).unwrap();\n\n }\n\n } else {\n\n writeln!(p.w, \"{}\", index).unwrap();\n\n }\n\n }\n\n });\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 51, "score": 180898.83353741473 }, { "content": "fn print_segment_dynamic<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n segments: &[Elf::ProgramHeader],\n\n segment: &Elf::ProgramHeader,\n\n) {\n\n if let Some(Some(dynamic)) = segment.dynamic(endian, data).print_err(p) {\n\n // TODO: add a helper API for this and the other mandatory tags?\n\n let mut strtab = 0;\n\n let mut strsz = 0;\n\n for d in dynamic {\n\n let tag = d.d_tag(endian).into();\n\n if tag == DT_STRTAB.into() {\n\n strtab = d.d_val(endian).into();\n\n } else if tag == DT_STRSZ.into() {\n\n strsz = d.d_val(endian).into();\n\n }\n\n }\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 52, "score": 180898.83353741473 }, { "content": "fn print_section_dynamic<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((dynamic, index))) = section.dynamic(endian, data).print_err(p) {\n\n let strings = sections.strings(endian, data, index).unwrap_or_default();\n\n print_dynamic(p, endian, elf, dynamic, strings);\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 53, "score": 180898.83353741473 }, { "content": "fn print_segment_notes<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n segment: &Elf::ProgramHeader,\n\n) {\n\n if let Some(Some(notes)) = segment.notes(endian, data).print_err(p) {\n\n print_notes(p, endian, notes);\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 54, "score": 180898.83353741473 }, { "content": "fn print_section_notes<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some(notes)) = section.notes(endian, data).print_err(p) {\n\n print_notes(p, endian, notes);\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 55, "score": 180898.83353741473 }, { "content": "#[derive(Debug)]\n\nenum SymbolIteratorInternal<'data, 'file, R>\n\nwhere\n\n 'data: 'file,\n\n R: ReadRef<'data>,\n\n{\n\n #[cfg(feature = \"coff\")]\n\n Coff((coff::CoffSymbolIterator<'data, 'file, R>, PhantomData<R>)),\n\n #[cfg(feature = \"elf\")]\n\n Elf32(\n\n (\n\n elf::ElfSymbolIterator32<'data, 'file, Endianness, R>,\n\n PhantomData<R>,\n\n ),\n\n ),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(\n\n (\n\n elf::ElfSymbolIterator64<'data, 'file, Endianness, R>,\n\n PhantomData<R>,\n\n ),\n", "file_path": "src/read/any.rs", "rank": 56, "score": 179513.5128312378 }, { "content": "#[derive(Debug)]\n\nenum SymbolTableInternal<'data, 'file, R>\n\nwhere\n\n 'data: 'file,\n\n R: ReadRef<'data>,\n\n{\n\n #[cfg(feature = \"coff\")]\n\n Coff((coff::CoffSymbolTable<'data, 'file, R>, PhantomData<R>)),\n\n #[cfg(feature = \"elf\")]\n\n Elf32(\n\n (\n\n elf::ElfSymbolTable32<'data, 'file, Endianness, R>,\n\n PhantomData<R>,\n\n ),\n\n ),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(\n\n (\n\n elf::ElfSymbolTable64<'data, 'file, Endianness, R>,\n\n PhantomData<R>,\n\n ),\n", "file_path": "src/read/any.rs", "rank": 57, "score": 179513.5128312378 }, { "content": "#[inline]\n\npub fn slice_from_bytes_mut<T: Pod>(\n\n data: &mut [u8],\n\n count: usize,\n\n) -> Result<(&mut [T], &mut [u8])> {\n\n let size = count.checked_mul(mem::size_of::<T>()).ok_or(())?;\n\n if size > data.len() {\n\n return Err(());\n\n }\n\n let (data, tail) = data.split_at_mut(size);\n\n let ptr = data.as_mut_ptr();\n\n if (ptr as usize) % mem::align_of::<T>() != 0 {\n\n return Err(());\n\n }\n\n // Safety:\n\n // The alignment and size are checked by this function.\n\n // The Pod trait ensures the type is valid to cast from bytes.\n\n let slice = unsafe { slice::from_raw_parts_mut(ptr.cast(), count) };\n\n Ok((slice, tail))\n\n}\n\n\n\n/// Cast a `Pod` type to a byte slice.\n", "file_path": "src/pod.rs", "rank": 58, "score": 177003.75362288835 }, { "content": "#[derive(Debug)]\n\nenum FileInternal<'data, R: ReadRef<'data>> {\n\n #[cfg(feature = \"coff\")]\n\n Coff(coff::CoffFile<'data, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf32(elf::ElfFile32<'data, Endianness, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(elf::ElfFile64<'data, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO32(macho::MachOFile32<'data, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO64(macho::MachOFile64<'data, Endianness, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe32(pe::PeFile32<'data, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe64(pe::PeFile64<'data, R>),\n\n #[cfg(feature = \"wasm\")]\n\n Wasm(wasm::WasmFile<'data, R>),\n\n}\n\n\n\nimpl<'data, R: ReadRef<'data>> File<'data, R> {\n", "file_path": "src/read/any.rs", "rank": 59, "score": 175830.74456513458 }, { "content": "fn print_macho<Mach: MachHeader<Endian = Endianness>>(\n\n p: &mut Printer<'_>,\n\n header: &Mach,\n\n data: &[u8],\n\n offset: u64,\n\n) {\n\n if let Some(endian) = header.endian().print_err(p) {\n\n let mut state = MachState::default();\n\n print_mach_header(p, endian, header);\n\n if let Some(mut commands) = header.load_commands(endian, data, offset).print_err(p) {\n\n while let Some(Some(command)) = commands.next().print_err(p) {\n\n print_load_command(p, endian, data, header, command, &mut state);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/macho.rs", "rank": 60, "score": 174607.25846125698 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct SymbolOffsets {\n\n index: usize,\n\n str_id: Option<StringId>,\n\n aux_count: u8,\n\n}\n\n\n\nimpl<'a> Object<'a> {\n\n pub(crate) fn coff_section_info(\n\n &self,\n\n section: StandardSection,\n\n ) -> (&'static [u8], &'static [u8], SectionKind) {\n\n match section {\n\n StandardSection::Text => (&[], &b\".text\"[..], SectionKind::Text),\n\n StandardSection::Data => (&[], &b\".data\"[..], SectionKind::Data),\n\n StandardSection::ReadOnlyData\n\n | StandardSection::ReadOnlyDataWithRel\n\n | StandardSection::ReadOnlyString => (&[], &b\".rdata\"[..], SectionKind::ReadOnlyData),\n\n StandardSection::UninitializedData => {\n\n (&[], &b\".bss\"[..], SectionKind::UninitializedData)\n\n }\n", "file_path": "src/write/coff.rs", "rank": 61, "score": 173148.34850770468 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct SymbolOffsets {\n\n emit: bool,\n\n index: usize,\n\n str_id: Option<StringId>,\n\n}\n\n\n\nimpl<'a> Object<'a> {\n\n pub(crate) fn macho_set_subsections_via_symbols(&mut self) {\n\n let flags = match self.flags {\n\n FileFlags::MachO { flags } => flags,\n\n _ => 0,\n\n };\n\n self.flags = FileFlags::MachO {\n\n flags: flags | macho::MH_SUBSECTIONS_VIA_SYMBOLS,\n\n };\n\n }\n\n\n\n pub(crate) fn macho_segment_name(&self, segment: StandardSegment) -> &'static [u8] {\n\n match segment {\n\n StandardSegment::Text => &b\"__TEXT\"[..],\n", "file_path": "src/write/macho.rs", "rank": 62, "score": 173148.34850770468 }, { "content": "enum ComdatInternal<'data, 'file, R: ReadRef<'data>>\n\nwhere\n\n 'data: 'file,\n\n{\n\n #[cfg(feature = \"coff\")]\n\n Coff(coff::CoffComdat<'data, 'file, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf32(elf::ElfComdat32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(elf::ElfComdat64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO32(macho::MachOComdat32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO64(macho::MachOComdat64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe32(pe::PeComdat32<'data, 'file, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe64(pe::PeComdat64<'data, 'file, R>),\n\n #[cfg(feature = \"wasm\")]\n\n Wasm(wasm::WasmComdat<'data, 'file, R>),\n", "file_path": "src/read/any.rs", "rank": 63, "score": 169311.93965153053 }, { "content": "#[derive(Debug)]\n\nenum SegmentInternal<'data, 'file, R: ReadRef<'data>>\n\nwhere\n\n 'data: 'file,\n\n{\n\n #[cfg(feature = \"coff\")]\n\n Coff(coff::CoffSegment<'data, 'file, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf32(elf::ElfSegment32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(elf::ElfSegment64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO32(macho::MachOSegment32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO64(macho::MachOSegment64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe32(pe::PeSegment32<'data, 'file, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe64(pe::PeSegment64<'data, 'file, R>),\n\n #[cfg(feature = \"wasm\")]\n\n Wasm(wasm::WasmSegment<'data, 'file, R>),\n", "file_path": "src/read/any.rs", "rank": 64, "score": 169311.93965153053 }, { "content": "enum SectionInternal<'data, 'file, R: ReadRef<'data>>\n\nwhere\n\n 'data: 'file,\n\n{\n\n #[cfg(feature = \"coff\")]\n\n Coff(coff::CoffSection<'data, 'file, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf32(elf::ElfSection32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(elf::ElfSection64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO32(macho::MachOSection32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO64(macho::MachOSection64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe32(pe::PeSection32<'data, 'file, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe64(pe::PeSection64<'data, 'file, R>),\n\n #[cfg(feature = \"wasm\")]\n\n Wasm(wasm::WasmSection<'data, 'file, R>),\n", "file_path": "src/read/any.rs", "rank": 65, "score": 169311.93965153053 }, { "content": "#[derive(Debug)]\n\nenum ComdatIteratorInternal<'data, 'file, R: ReadRef<'data>>\n\nwhere\n\n 'data: 'file,\n\n{\n\n #[cfg(feature = \"coff\")]\n\n Coff(coff::CoffComdatIterator<'data, 'file, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf32(elf::ElfComdatIterator32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(elf::ElfComdatIterator64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO32(macho::MachOComdatIterator32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO64(macho::MachOComdatIterator64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe32(pe::PeComdatIterator32<'data, 'file, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe64(pe::PeComdatIterator64<'data, 'file, R>),\n\n #[cfg(feature = \"wasm\")]\n\n Wasm(wasm::WasmComdatIterator<'data, 'file, R>),\n", "file_path": "src/read/any.rs", "rank": 66, "score": 167417.82316432698 }, { "content": "#[derive(Debug)]\n\nenum SegmentIteratorInternal<'data, 'file, R: ReadRef<'data>>\n\nwhere\n\n 'data: 'file,\n\n{\n\n #[cfg(feature = \"coff\")]\n\n Coff(coff::CoffSegmentIterator<'data, 'file, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf32(elf::ElfSegmentIterator32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(elf::ElfSegmentIterator64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO32(macho::MachOSegmentIterator32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO64(macho::MachOSegmentIterator64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe32(pe::PeSegmentIterator32<'data, 'file, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe64(pe::PeSegmentIterator64<'data, 'file, R>),\n\n #[cfg(feature = \"wasm\")]\n\n Wasm(wasm::WasmSegmentIterator<'data, 'file, R>),\n", "file_path": "src/read/any.rs", "rank": 67, "score": 167417.82316432698 }, { "content": "#[derive(Debug)]\n\nenum SectionIteratorInternal<'data, 'file, R: ReadRef<'data>>\n\nwhere\n\n 'data: 'file,\n\n{\n\n #[cfg(feature = \"coff\")]\n\n Coff(coff::CoffSectionIterator<'data, 'file, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf32(elf::ElfSectionIterator32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(elf::ElfSectionIterator64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO32(macho::MachOSectionIterator32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO64(macho::MachOSectionIterator64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe32(pe::PeSectionIterator32<'data, 'file, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe64(pe::PeSectionIterator64<'data, 'file, R>),\n\n #[cfg(feature = \"wasm\")]\n\n Wasm(wasm::WasmSectionIterator<'data, 'file, R>),\n", "file_path": "src/read/any.rs", "rank": 68, "score": 167417.82316432698 }, { "content": "#[derive(Clone, Copy)]\n\nstruct ComdatOffsets {\n\n offset: usize,\n\n str_id: StringId,\n\n}\n\n\n", "file_path": "src/write/elf/object.rs", "rank": 69, "score": 166375.9443803213 }, { "content": "#[derive(Clone, Copy)]\n\nstruct SectionOffsets {\n\n index: SectionIndex,\n\n offset: usize,\n\n str_id: StringId,\n\n reloc_offset: usize,\n\n reloc_str_id: Option<StringId>,\n\n}\n\n\n", "file_path": "src/write/elf/object.rs", "rank": 70, "score": 166375.9443803213 }, { "content": "/// A symbol table entry.\n\npub trait ObjectSymbol<'data>: read::private::Sealed {\n\n /// The index of the symbol.\n\n fn index(&self) -> SymbolIndex;\n\n\n\n /// The name of the symbol.\n\n fn name_bytes(&self) -> Result<&'data [u8]>;\n\n\n\n /// The name of the symbol.\n\n ///\n\n /// Returns an error if the name is not UTF-8.\n\n fn name(&self) -> Result<&'data str>;\n\n\n\n /// The address of the symbol. May be zero if the address is unknown.\n\n fn address(&self) -> u64;\n\n\n\n /// The size of the symbol. May be zero if the size is unknown.\n\n fn size(&self) -> u64;\n\n\n\n /// Return the kind of this symbol.\n\n fn kind(&self) -> SymbolKind;\n", "file_path": "src/read/traits.rs", "rank": 71, "score": 165742.15553553659 }, { "content": "#[derive(Debug)]\n\nenum ComdatSectionIteratorInternal<'data, 'file, R: ReadRef<'data>>\n\nwhere\n\n 'data: 'file,\n\n{\n\n #[cfg(feature = \"coff\")]\n\n Coff(coff::CoffComdatSectionIterator<'data, 'file, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf32(elf::ElfComdatSectionIterator32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(elf::ElfComdatSectionIterator64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO32(macho::MachOComdatSectionIterator32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO64(macho::MachOComdatSectionIterator64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe32(pe::PeComdatSectionIterator32<'data, 'file, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe64(pe::PeComdatSectionIterator64<'data, 'file, R>),\n\n #[cfg(feature = \"wasm\")]\n\n Wasm(wasm::WasmComdatSectionIterator<'data, 'file, R>),\n", "file_path": "src/read/any.rs", "rank": 72, "score": 165601.609357303 }, { "content": "#[derive(Debug)]\n\nenum SectionRelocationIteratorInternal<'data, 'file, R: ReadRef<'data>>\n\nwhere\n\n 'data: 'file,\n\n{\n\n #[cfg(feature = \"coff\")]\n\n Coff(coff::CoffRelocationIterator<'data, 'file, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf32(elf::ElfSectionRelocationIterator32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(elf::ElfSectionRelocationIterator64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO32(macho::MachORelocationIterator32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"macho\")]\n\n MachO64(macho::MachORelocationIterator64<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe32(pe::PeRelocationIterator<'data, 'file, R>),\n\n #[cfg(feature = \"pe\")]\n\n Pe64(pe::PeRelocationIterator<'data, 'file, R>),\n\n #[cfg(feature = \"wasm\")]\n\n Wasm(wasm::WasmRelocationIterator<'data, 'file, R>),\n\n}\n\n\n\nimpl<'data, 'file, R: ReadRef<'data>> Iterator for SectionRelocationIterator<'data, 'file, R> {\n\n type Item = (u64, Relocation);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n with_inner_mut!(self.inner, SectionRelocationIteratorInternal, |x| x.next())\n\n }\n\n}\n", "file_path": "src/read/any.rs", "rank": 73, "score": 165601.609357303 }, { "content": "#[cfg(feature = \"compression\")]\n\n#[test]\n\nfn compression_gnu() {\n\n use object::read::ObjectSection;\n\n use std::io::Write;\n\n\n\n let data = b\"test data data data\";\n\n let len = data.len() as u32;\n\n\n\n let mut buf = Vec::new();\n\n buf.write_all(b\"ZLIB\\0\\0\\0\\0\").unwrap();\n\n buf.write_all(&len.to_be_bytes()).unwrap();\n\n let mut encoder = flate2::write::ZlibEncoder::new(buf, flate2::Compression::default());\n\n encoder.write_all(data).unwrap();\n\n let compressed = encoder.finish().unwrap();\n\n\n\n let mut object =\n\n write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);\n\n let section = object.add_section(\n\n Vec::new(),\n\n b\".zdebug_info\".to_vec(),\n\n object::SectionKind::Other,\n", "file_path": "tests/round_trip/elf.rs", "rank": 74, "score": 163272.50856206854 }, { "content": "/// A symbol table.\n\npub trait ObjectSymbolTable<'data>: read::private::Sealed {\n\n /// A symbol table entry.\n\n type Symbol: ObjectSymbol<'data>;\n\n\n\n /// An iterator over the symbols in a symbol table.\n\n type SymbolIterator: Iterator<Item = Self::Symbol>;\n\n\n\n /// Get an iterator over the symbols in the table.\n\n ///\n\n /// This may skip over symbols that are malformed or unsupported.\n\n fn symbols(&self) -> Self::SymbolIterator;\n\n\n\n /// Get the symbol at the given index.\n\n ///\n\n /// The meaning of the index depends on the object file.\n\n ///\n\n /// Returns an error if the index is invalid.\n\n fn symbol_by_index(&self, index: SymbolIndex) -> Result<Self::Symbol>;\n\n}\n\n\n", "file_path": "src/read/traits.rs", "rank": 75, "score": 162690.50031414803 }, { "content": "fn dump_parsed_object<W: Write, E: Write>(w: &mut W, e: &mut E, file: &object::File) -> Result<()> {\n\n writeln!(\n\n w,\n\n \"Format: {:?} {:?}-endian {}-bit\",\n\n file.format(),\n\n file.endianness(),\n\n if file.is_64() { \"64\" } else { \"32\" }\n\n )?;\n\n writeln!(w, \"Kind: {:?}\", file.kind())?;\n\n writeln!(w, \"Architecture: {:?}\", file.architecture())?;\n\n writeln!(w, \"Flags: {:x?}\", file.flags())?;\n\n writeln!(\n\n w,\n\n \"Relative Address Base: {:x?}\",\n\n file.relative_address_base()\n\n )?;\n\n writeln!(w, \"Entry Address: {:x?}\", file.entry())?;\n\n\n\n match file.mach_uuid() {\n\n Ok(Some(uuid)) => writeln!(w, \"Mach UUID: {:x?}\", uuid)?,\n", "file_path": "crates/examples/src/objdump.rs", "rank": 76, "score": 161608.28662837984 }, { "content": "fn testfile<F>(path: &str, data: &[u8], ext: &str, f: F) -> bool\n\nwhere\n\n F: FnOnce(&mut dyn Write, &mut dyn Write, &[u8]),\n\n{\n\n if glob::glob(&format!(\"crates/examples/{}.{}*\", path, ext))\n\n .unwrap()\n\n .find_map(Result::ok)\n\n .is_none()\n\n {\n\n return false;\n\n }\n\n\n\n // TODO: print diffs for mismatches\n\n let mut fail = false;\n\n let mut out = Vec::new();\n\n let mut err = Vec::new();\n\n f(&mut out, &mut err, data);\n\n\n\n // Check exact match of output.\n\n let out_path = &format!(\"crates/examples/{}.{}\", path, ext);\n", "file_path": "crates/examples/tests/testfiles.rs", "rank": 77, "score": 159308.45627380605 }, { "content": "fn print_symbols(p: &mut Printer<'_>, sections: Option<&SectionTable>, symbols: &SymbolTable) {\n\n for (index, symbol) in symbols.iter() {\n\n p.group(\"ImageSymbol\", |p| {\n\n p.field(\"Index\", index);\n\n if let Some(name) = symbol.name(symbols.strings()).print_err(p) {\n\n p.field_inline_string(\"Name\", name);\n\n } else {\n\n p.field(\"Name\", format!(\"{:X?}\", symbol.name));\n\n }\n\n p.field_hex(\"Value\", symbol.value.get(LE));\n\n let section = symbol.section_number.get(LE);\n\n if section == 0 || section >= IMAGE_SYM_SECTION_MAX {\n\n p.field_enum(\"Section\", section, FLAGS_IMAGE_SYM);\n\n } else {\n\n let section_name = sections.and_then(|sections| {\n\n sections\n\n .section(section.into())\n\n .and_then(|section| section.name(symbols.strings()))\n\n .print_err(p)\n\n });\n", "file_path": "crates/examples/src/readobj/pe.rs", "rank": 78, "score": 159276.70338518976 }, { "content": "fn print_ident(p: &mut Printer<'_>, ident: &Ident) {\n\n p.field(\"Magic\", format!(\"{:X?}\", ident.magic));\n\n p.field_enum(\"Class\", ident.class, &FLAGS_EI_CLASS);\n\n p.field_enum(\"Data\", ident.data, &FLAGS_EI_DATA);\n\n p.field_enum(\"Version\", ident.version, &FLAGS_EV);\n\n p.field_enum(\"OsAbi\", ident.os_abi, &FLAGS_EI_OSABI);\n\n p.field_hex(\"AbiVersion\", ident.abi_version);\n\n p.field(\"Unused\", format!(\"{:X?}\", ident.padding));\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 79, "score": 156477.95677182035 }, { "content": "struct DebugByte(u8);\n\n\n\nimpl fmt::Debug for DebugByte {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(fmt, \"0x{:02x}\", self.0)\n\n }\n\n}\n\n\n", "file_path": "src/read/util.rs", "rank": 80, "score": 150564.34601490063 }, { "content": "#[allow(missing_docs)]\n\npub trait FileHeader: Debug + Pod {\n\n // Ideally this would be a `u64: From<Word>`, but can't express that.\n\n type Word: Into<u64>;\n\n type Sword: Into<i64>;\n\n type Endian: endian::Endian;\n\n type ProgramHeader: ProgramHeader<Elf = Self, Endian = Self::Endian, Word = Self::Word>;\n\n type SectionHeader: SectionHeader<Elf = Self, Endian = Self::Endian, Word = Self::Word>;\n\n type CompressionHeader: CompressionHeader<Endian = Self::Endian, Word = Self::Word>;\n\n type NoteHeader: NoteHeader<Endian = Self::Endian>;\n\n type Dyn: Dyn<Endian = Self::Endian, Word = Self::Word>;\n\n type Sym: Sym<Endian = Self::Endian, Word = Self::Word>;\n\n type Rel: Rel<Endian = Self::Endian, Word = Self::Word>;\n\n type Rela: Rela<Endian = Self::Endian, Word = Self::Word> + From<Self::Rel>;\n\n\n\n /// Return true if this type is a 64-bit header.\n\n ///\n\n /// This is a property of the type, not a value in the header data.\n\n fn is_type_64(&self) -> bool;\n\n\n\n fn e_ident(&self) -> &elf::Ident;\n", "file_path": "src/read/elf/file.rs", "rank": 81, "score": 147823.0004362949 }, { "content": "#[allow(missing_docs)]\n\npub trait SectionHeader: Debug + Pod {\n\n type Elf: FileHeader<SectionHeader = Self, Endian = Self::Endian, Word = Self::Word>;\n\n type Word: Into<u64>;\n\n type Endian: endian::Endian;\n\n\n\n fn sh_name(&self, endian: Self::Endian) -> u32;\n\n fn sh_type(&self, endian: Self::Endian) -> u32;\n\n fn sh_flags(&self, endian: Self::Endian) -> Self::Word;\n\n fn sh_addr(&self, endian: Self::Endian) -> Self::Word;\n\n fn sh_offset(&self, endian: Self::Endian) -> Self::Word;\n\n fn sh_size(&self, endian: Self::Endian) -> Self::Word;\n\n fn sh_link(&self, endian: Self::Endian) -> u32;\n\n fn sh_info(&self, endian: Self::Endian) -> u32;\n\n fn sh_addralign(&self, endian: Self::Endian) -> Self::Word;\n\n fn sh_entsize(&self, endian: Self::Endian) -> Self::Word;\n\n\n\n /// Parse the section name from the string table.\n\n fn name<'data, R: ReadRef<'data>>(\n\n &self,\n\n endian: Self::Endian,\n", "file_path": "src/read/elf/section.rs", "rank": 82, "score": 147823.0004362949 }, { "content": "#[allow(missing_docs)]\n\npub trait CompressionHeader: Debug + Pod {\n\n type Word: Into<u64>;\n\n type Endian: endian::Endian;\n\n\n\n fn ch_type(&self, endian: Self::Endian) -> u32;\n\n fn ch_size(&self, endian: Self::Endian) -> Self::Word;\n\n fn ch_addralign(&self, endian: Self::Endian) -> Self::Word;\n\n}\n\n\n\nimpl<Endian: endian::Endian> CompressionHeader for elf::CompressionHeader32<Endian> {\n\n type Word = u32;\n\n type Endian = Endian;\n\n\n\n #[inline]\n\n fn ch_type(&self, endian: Self::Endian) -> u32 {\n\n self.ch_type.get(endian)\n\n }\n\n\n\n #[inline]\n\n fn ch_size(&self, endian: Self::Endian) -> Self::Word {\n", "file_path": "src/read/elf/compression.rs", "rank": 83, "score": 147823.0004362949 }, { "content": "#[allow(missing_docs)]\n\npub trait NoteHeader: Debug + Pod {\n\n type Endian: endian::Endian;\n\n\n\n fn n_namesz(&self, endian: Self::Endian) -> u32;\n\n fn n_descsz(&self, endian: Self::Endian) -> u32;\n\n fn n_type(&self, endian: Self::Endian) -> u32;\n\n}\n\n\n\nimpl<Endian: endian::Endian> NoteHeader for elf::NoteHeader32<Endian> {\n\n type Endian = Endian;\n\n\n\n #[inline]\n\n fn n_namesz(&self, endian: Self::Endian) -> u32 {\n\n self.n_namesz.get(endian)\n\n }\n\n\n\n #[inline]\n\n fn n_descsz(&self, endian: Self::Endian) -> u32 {\n\n self.n_descsz.get(endian)\n\n }\n", "file_path": "src/read/elf/note.rs", "rank": 84, "score": 147823.0004362949 }, { "content": "#[allow(missing_docs)]\n\npub trait ProgramHeader: Debug + Pod {\n\n type Elf: FileHeader<ProgramHeader = Self, Endian = Self::Endian, Word = Self::Word>;\n\n type Word: Into<u64>;\n\n type Endian: endian::Endian;\n\n\n\n fn p_type(&self, endian: Self::Endian) -> u32;\n\n fn p_flags(&self, endian: Self::Endian) -> u32;\n\n fn p_offset(&self, endian: Self::Endian) -> Self::Word;\n\n fn p_vaddr(&self, endian: Self::Endian) -> Self::Word;\n\n fn p_paddr(&self, endian: Self::Endian) -> Self::Word;\n\n fn p_filesz(&self, endian: Self::Endian) -> Self::Word;\n\n fn p_memsz(&self, endian: Self::Endian) -> Self::Word;\n\n fn p_align(&self, endian: Self::Endian) -> Self::Word;\n\n\n\n /// Return the offset and size of the segment in the file.\n\n fn file_range(&self, endian: Self::Endian) -> (u64, u64) {\n\n (self.p_offset(endian).into(), self.p_filesz(endian).into())\n\n }\n\n\n\n /// Return the segment data.\n", "file_path": "src/read/elf/segment.rs", "rank": 85, "score": 147823.0004362949 }, { "content": "fn find_subslice(haystack: &[u8], needle: &[u8]) -> bool {\n\n haystack\n\n .windows(needle.len())\n\n .any(|window| window == needle)\n\n}\n", "file_path": "crates/examples/tests/testfiles.rs", "rank": 86, "score": 143203.66192565026 }, { "content": "#[test]\n\nfn parse_self() {\n\n let exe = env::current_exe().unwrap();\n\n let data = fs::read(exe).unwrap();\n\n let object = File::parse(&*data).unwrap();\n\n assert!(object.entry() != 0);\n\n assert!(object.sections().count() != 0);\n\n}\n\n\n", "file_path": "tests/parse_self.rs", "rank": 87, "score": 134719.4327631728 }, { "content": "#[derive(Debug)]\n\nenum DynamicRelocationIteratorInternal<'data, 'file, R>\n\nwhere\n\n 'data: 'file,\n\n R: ReadRef<'data>,\n\n{\n\n #[cfg(feature = \"elf\")]\n\n Elf32(elf::ElfDynamicRelocationIterator32<'data, 'file, Endianness, R>),\n\n #[cfg(feature = \"elf\")]\n\n Elf64(elf::ElfDynamicRelocationIterator64<'data, 'file, Endianness, R>),\n\n // We need to always use the lifetime parameters.\n\n #[allow(unused)]\n\n None(PhantomData<(&'data (), &'file (), R)>),\n\n}\n\n\n\nimpl<'data, 'file, R: ReadRef<'data>> Iterator for DynamicRelocationIterator<'data, 'file, R> {\n\n type Item = (u64, Relocation);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n match self.inner {\n\n #[cfg(feature = \"elf\")]\n", "file_path": "src/read/any.rs", "rank": 88, "score": 134379.4719357543 }, { "content": "#[derive(Debug)]\n\nstruct ReadCacheInternal<R: Read + Seek> {\n\n read: R,\n\n bufs: HashMap<(u64, u64), Box<[u8]>>,\n\n strings: HashMap<(u64, u8), Box<[u8]>>,\n\n}\n\n\n\nimpl<R: Read + Seek> ReadCache<R> {\n\n /// Create an empty `ReadCache` for the given stream.\n\n pub fn new(read: R) -> Self {\n\n ReadCache {\n\n cache: RefCell::new(ReadCacheInternal {\n\n read,\n\n bufs: HashMap::new(),\n\n strings: HashMap::new(),\n\n }),\n\n }\n\n }\n\n\n\n /// Return an implementation of `ReadRef` that restricts reads\n\n /// to the given range of the stream.\n", "file_path": "src/read/read_cache.rs", "rank": 89, "score": 132708.05120477287 }, { "content": "#[cfg(feature = \"std\")]\n\n#[test]\n\nfn parse_self_cache() {\n\n use object::read::{ReadCache, ReadRef};\n\n let exe = env::current_exe().unwrap();\n\n let file = fs::File::open(exe).unwrap();\n\n let cache = ReadCache::new(file);\n\n let data = cache.range(0, cache.len().unwrap());\n\n let object = File::parse(data).unwrap();\n\n assert!(object.entry() != 0);\n\n assert!(object.sections().count() != 0);\n\n}\n", "file_path": "tests/parse_self.rs", "rank": 90, "score": 132347.8254631897 }, { "content": "/// A trait for using an endianness specification.\n\n///\n\n/// Provides methods for converting between the specified endianness and\n\n/// the native endianness of the target machine.\n\n///\n\n/// This trait does not require that the endianness is known at compile time.\n\npub trait Endian: Debug + Default + Clone + Copy + PartialEq + Eq + 'static {\n\n /// Construct a specification for the endianness of some values.\n\n ///\n\n /// Returns `None` if the type does not support specifying the given endianness.\n\n fn from_big_endian(big_endian: bool) -> Option<Self>;\n\n\n\n /// Construct a specification for the endianness of some values.\n\n ///\n\n /// Returns `None` if the type does not support specifying the given endianness.\n\n fn from_little_endian(little_endian: bool) -> Option<Self> {\n\n Self::from_big_endian(!little_endian)\n\n }\n\n\n\n /// Return true for big endian byte order.\n\n fn is_big_endian(self) -> bool;\n\n\n\n /// Return true for little endian byte order.\n\n #[inline]\n\n fn is_little_endian(self) -> bool {\n\n !self.is_big_endian()\n", "file_path": "src/endian.rs", "rank": 91, "score": 127262.46427990052 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct SectionOffsets {\n\n offset: usize,\n\n str_id: Option<StringId>,\n\n reloc_offset: usize,\n\n selection: u8,\n\n associative_section: u16,\n\n}\n\n\n", "file_path": "src/write/coff.rs", "rank": 92, "score": 126500.90713884834 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct SectionOffsets {\n\n index: usize,\n\n offset: usize,\n\n address: u64,\n\n reloc_offset: usize,\n\n}\n\n\n", "file_path": "src/write/macho.rs", "rank": 93, "score": 126500.90713884834 }, { "content": "struct MachHeader {\n\n cputype: u32,\n\n cpusubtype: u32,\n\n filetype: u32,\n\n ncmds: u32,\n\n sizeofcmds: u32,\n\n flags: u32,\n\n}\n\n\n", "file_path": "src/write/macho.rs", "rank": 94, "score": 126406.14403011774 }, { "content": "/// A loadable segment defined in an object file.\n\n///\n\n/// For ELF, this is a program header with type `PT_LOAD`.\n\n/// For Mach-O, this is a load command with type `LC_SEGMENT` or `LC_SEGMENT_64`.\n\npub trait ObjectSegment<'data>: read::private::Sealed {\n\n /// Returns the virtual address of the segment.\n\n fn address(&self) -> u64;\n\n\n\n /// Returns the size of the segment in memory.\n\n fn size(&self) -> u64;\n\n\n\n /// Returns the alignment of the segment in memory.\n\n fn align(&self) -> u64;\n\n\n\n /// Returns the offset and size of the segment in the file.\n\n fn file_range(&self) -> (u64, u64);\n\n\n\n /// Returns a reference to the file contents of the segment.\n\n ///\n\n /// The length of this data may be different from the size of the\n\n /// segment in memory.\n\n fn data(&self) -> Result<&'data [u8]>;\n\n\n\n /// Return the segment data in the given range.\n", "file_path": "src/read/traits.rs", "rank": 95, "score": 125067.65718323209 }, { "content": "/// A section defined in an object file.\n\npub trait ObjectSection<'data>: read::private::Sealed {\n\n /// An iterator over the relocations for a section.\n\n ///\n\n /// The first field in the item tuple is the section offset\n\n /// that the relocation applies to.\n\n type RelocationIterator: Iterator<Item = (u64, Relocation)>;\n\n\n\n /// Returns the section index.\n\n fn index(&self) -> SectionIndex;\n\n\n\n /// Returns the address of the section.\n\n fn address(&self) -> u64;\n\n\n\n /// Returns the size of the section in memory.\n\n fn size(&self) -> u64;\n\n\n\n /// Returns the alignment of the section in memory.\n\n fn align(&self) -> u64;\n\n\n\n /// Returns offset and size of on-disk segment (if any).\n", "file_path": "src/read/traits.rs", "rank": 96, "score": 125057.9853639027 }, { "content": "/// A COMDAT section group defined in an object file.\n\npub trait ObjectComdat<'data>: read::private::Sealed {\n\n /// An iterator over the sections in the object file.\n\n type SectionIterator: Iterator<Item = SectionIndex>;\n\n\n\n /// Returns the COMDAT selection kind.\n\n fn kind(&self) -> ComdatKind;\n\n\n\n /// Returns the index of the symbol used for the name of COMDAT section group.\n\n fn symbol(&self) -> SymbolIndex;\n\n\n\n /// Returns the name of the COMDAT section group.\n\n fn name_bytes(&self) -> Result<&[u8]>;\n\n\n\n /// Returns the name of the COMDAT section group.\n\n ///\n\n /// Returns an error if the name is not UTF-8.\n\n fn name(&self) -> Result<&str>;\n\n\n\n /// Get the sections in this section group.\n\n fn sections(&self) -> Self::SectionIterator;\n\n}\n\n\n", "file_path": "src/read/traits.rs", "rank": 97, "score": 125057.9853639027 }, { "content": "pub fn print<W: Write, E: Write>(\n\n w: &mut W,\n\n e: &mut E,\n\n file: &[u8],\n\n member_names: Vec<String>,\n\n) -> Result<()> {\n\n let mut member_names: Vec<_> = member_names.into_iter().map(|name| (name, false)).collect();\n\n\n\n if let Ok(archive) = ArchiveFile::parse(&*file) {\n\n writeln!(w, \"Format: Archive (kind: {:?})\", archive.kind())?;\n\n for member in archive.members() {\n\n match member {\n\n Ok(member) => {\n\n if find_member(&mut member_names, member.name()) {\n\n writeln!(w)?;\n\n writeln!(w, \"{}:\", String::from_utf8_lossy(member.name()))?;\n\n if let Ok(data) = member.data(&*file) {\n\n dump_object(w, e, data)?;\n\n }\n\n }\n", "file_path": "crates/examples/src/objdump.rs", "rank": 98, "score": 123329.11931482567 }, { "content": "struct DebugLen(usize);\n\n\n\nimpl fmt::Debug for DebugLen {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(fmt, \"...; {}\", self.0)\n\n }\n\n}\n\n\n\n/// A newtype for byte strings.\n\n///\n\n/// For byte slices that are strings of an unknown encoding.\n\n///\n\n/// Provides a `Debug` implementation that interprets the bytes as UTF-8.\n\n#[derive(Default, Clone, Copy, PartialEq, Eq)]\n\npub(crate) struct ByteString<'data>(pub &'data [u8]);\n\n\n\nimpl<'data> fmt::Debug for ByteString<'data> {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(fmt, \"\\\"{}\\\"\", String::from_utf8_lossy(self.0))\n\n }\n", "file_path": "src/read/util.rs", "rank": 99, "score": 120979.4556807818 } ]
Rust
gstreamer/src/buffer_pool.rs
heftig/gstreamer-rs
03c3580c224a10c86e89b34380215a551e1f7bff
use BufferPool; use Structure; use glib; use glib::translate::{from_glib, from_glib_full, from_glib_none, ToGlib, ToGlibPtr, ToGlibPtrMut}; use glib::IsA; use ffi; use std::mem; use std::ops; use std::ptr; #[derive(Debug, PartialEq, Eq)] pub struct BufferPoolConfig(Structure); impl ops::Deref for BufferPoolConfig { type Target = ::StructureRef; fn deref(&self) -> &::StructureRef { self.0.deref() } } impl ops::DerefMut for BufferPoolConfig { fn deref_mut(&mut self) -> &mut ::StructureRef { self.0.deref_mut() } } impl AsRef<::StructureRef> for BufferPoolConfig { fn as_ref(&self) -> &::StructureRef { self.0.as_ref() } } impl AsMut<::StructureRef> for BufferPoolConfig { fn as_mut(&mut self) -> &mut ::StructureRef { self.0.as_mut() } } impl BufferPoolConfig { pub fn add_option(&mut self, option: &str) { unsafe { ffi::gst_buffer_pool_config_add_option( self.0.to_glib_none_mut().0, option.to_glib_none().0, ); } } pub fn has_option(&self, option: &str) -> bool { unsafe { from_glib(ffi::gst_buffer_pool_config_has_option( self.0.to_glib_none().0, option.to_glib_none().0, )) } } pub fn get_options(&self) -> Vec<String> { unsafe { let n = ffi::gst_buffer_pool_config_n_options(self.0.to_glib_none().0) as usize; let mut options = Vec::with_capacity(n); for i in 0..n { options.push(from_glib_none(ffi::gst_buffer_pool_config_get_option( self.0.to_glib_none().0, i as u32, ))); } options } } pub fn set_params<'a, T: Into<Option<&'a ::Caps>>>( &mut self, caps: T, size: u32, min_buffers: u32, max_buffers: u32, ) { let caps = caps.into(); unsafe { ffi::gst_buffer_pool_config_set_params( self.0.to_glib_none_mut().0, caps.to_glib_none().0, size, min_buffers, max_buffers, ); } } pub fn get_params(&self) -> Option<(Option<::Caps>, u32, u32, u32)> { unsafe { let mut caps = ptr::null_mut(); let mut size = mem::uninitialized(); let mut min_buffers = mem::uninitialized(); let mut max_buffers = mem::uninitialized(); let ret: bool = from_glib(ffi::gst_buffer_pool_config_get_params( self.0.to_glib_none().0, &mut caps, &mut size, &mut min_buffers, &mut max_buffers, )); if !ret { return None; } Some((from_glib_none(caps), size, min_buffers, max_buffers)) } } pub fn validate_params<'a, T: Into<Option<&'a ::Caps>>>( &self, caps: T, size: u32, min_buffers: u32, max_buffers: u32, ) -> bool { let caps = caps.into(); unsafe { from_glib(ffi::gst_buffer_pool_config_validate_params( self.0.to_glib_none().0, caps.to_glib_none().0, size, min_buffers, max_buffers, )) } } } #[derive(Debug)] pub struct BufferPoolAcquireParams(ffi::GstBufferPoolAcquireParams); impl BufferPoolAcquireParams { pub fn with_flags(flags: ::BufferPoolAcquireFlags) -> Self { BufferPoolAcquireParams(ffi::GstBufferPoolAcquireParams { format: ffi::GST_FORMAT_UNDEFINED, start: -1, stop: -1, flags: flags.to_glib(), _gst_reserved: [ptr::null_mut(); 4], }) } pub fn with_start_stop<T: ::SpecificFormattedValue>( start: T, stop: T, flags: ::BufferPoolAcquireFlags, ) -> Self { unsafe { BufferPoolAcquireParams(ffi::GstBufferPoolAcquireParams { format: start.get_format().to_glib(), start: start.to_raw_value(), stop: stop.to_raw_value(), flags: flags.to_glib(), _gst_reserved: [ptr::null_mut(); 4], }) } } pub fn flags(&self) -> ::BufferPoolAcquireFlags { from_glib(self.0.flags) } pub fn format(&self) -> ::Format { from_glib(self.0.format) } pub fn start(&self) -> ::GenericFormattedValue { ::GenericFormattedValue::new(from_glib(self.0.format), self.0.start) } pub fn stop(&self) -> ::GenericFormattedValue { ::GenericFormattedValue::new(from_glib(self.0.format), self.0.stop) } } impl PartialEq for BufferPoolAcquireParams { fn eq(&self, other: &Self) -> bool { self.format() == other.format() && self.start() == other.start() && self.stop() == other.stop() } } impl Eq for BufferPoolAcquireParams {} impl BufferPool { pub fn new() -> BufferPool { assert_initialized_main_thread!(); let (major, minor, _, _) = ::version(); if (major, minor) > (1, 12) { unsafe { from_glib_full(ffi::gst_buffer_pool_new()) } } else { unsafe { from_glib_none(ffi::gst_buffer_pool_new()) } } } } impl Default for BufferPool { fn default() -> Self { Self::new() } } pub trait BufferPoolExtManual { fn get_config(&self) -> BufferPoolConfig; fn set_config(&self, config: BufferPoolConfig) -> Result<(), glib::error::BoolError>; fn is_flushing(&self) -> bool; fn acquire_buffer<'a, P: Into<Option<&'a BufferPoolAcquireParams>>>( &self, params: P, ) -> Result<::Buffer, ::FlowReturn>; fn release_buffer(&self, buffer: ::Buffer); } impl<O: IsA<BufferPool>> BufferPoolExtManual for O { fn get_config(&self) -> BufferPoolConfig { unsafe { let ptr = ffi::gst_buffer_pool_get_config(self.to_glib_none().0); BufferPoolConfig(from_glib_full(ptr)) } } fn set_config(&self, config: BufferPoolConfig) -> Result<(), glib::error::BoolError> { unsafe { glib::error::BoolError::from_glib( ffi::gst_buffer_pool_set_config(self.to_glib_none().0, config.0.into_ptr()), "Failed to set config", ) } } fn is_flushing(&self) -> bool { unsafe { let stash = self.to_glib_none(); let ptr: *mut ffi::GstBufferPool = stash.0; from_glib((*ptr).flushing) } } fn acquire_buffer<'a, P: Into<Option<&'a BufferPoolAcquireParams>>>( &self, params: P, ) -> Result<::Buffer, ::FlowReturn> { let params = params.into(); let params_ptr = match params { Some(params) => &params.0 as *const _ as *mut _, None => ptr::null_mut(), }; unsafe { let mut buffer = ptr::null_mut(); let ret = from_glib(ffi::gst_buffer_pool_acquire_buffer( self.to_glib_none().0, &mut buffer, params_ptr, )); if ret == ::FlowReturn::Ok { Ok(from_glib_full(buffer)) } else { Err(ret) } } } fn release_buffer(&self, buffer: ::Buffer) { unsafe { ffi::gst_buffer_pool_release_buffer(self.to_glib_none().0, buffer.into_ptr()); } } } #[cfg(test)] mod tests { use super::*; use prelude::*; #[test] fn test_pool() { ::init().unwrap(); let pool = ::BufferPool::new(); let mut config = pool.get_config(); config.set_params(Some(&::Caps::new_simple("foo/bar", &[])), 1024, 0, 2); pool.set_config(config).unwrap(); pool.set_active(true).unwrap(); let params = ::BufferPoolAcquireParams::with_flags(::BufferPoolAcquireFlags::DONTWAIT); let _buf1 = pool.acquire_buffer(&params).unwrap(); let buf2 = pool.acquire_buffer(&params).unwrap(); assert!(pool.acquire_buffer(&params).is_err()); drop(buf2); let _buf2 = pool.acquire_buffer(&params).unwrap(); pool.set_active(false).unwrap(); } }
use BufferPool; use Structure; use glib; use glib::translate::{from_glib, from_glib_full, from_glib_none, ToGlib, ToGlibPtr, ToGlibPtrMut}; use glib::IsA; use ffi; use std::mem; use std::ops; use std::ptr; #[derive(Debug, PartialEq, Eq)] pub struct BufferPoolConfig(Structure); impl ops::Deref for BufferPoolConfig { type Target = ::StructureRef; fn deref(&self) -> &::StructureRef { self.0.deref() } } impl ops::DerefMut for BufferPoolConfig { fn deref_mut(&mut self) -> &mut ::StructureRef { self.0.deref_mut() } } impl AsRef<::StructureRef> for BufferPoolConfig { fn as_ref(&self) -> &::StructureRef { self.0.as_ref() } } impl AsMut<::StructureRef> for BufferPoolConfig { fn as_mut(&mut self) -> &mut ::StructureRef { self.0.as_mut() } } impl BufferPoolConfig { pub fn add_option(&mut self, option: &str) { unsafe { ffi::gst_buffer_pool_config_add_option( self.0.to_glib_none_mut().0, option.to_glib_none().0, ); } } pub fn has_option(&self, option: &str) -> bool { unsafe { from_glib(ffi::gst_buffer_pool_config_has_option( self.0.to_glib_none().0, option.to_glib_none().0, )) } } pub fn get_options(&self) -> Vec<String> { unsafe { let n = ffi::gst_buffer_pool_config_n_options(self.0.to_glib_none().0) as usize; let mut options = Vec::with_capacity(n); for i in 0..n { options.push(from_glib_none(ffi::gst_buffer_pool_config_get_option( self.0.to_glib_none().0, i as u32, ))); } options } } pub fn set_params<'a, T: Into<Option<&'a ::Caps>>>( &mut self, caps: T, size: u32, min_buffers: u32, max_buffers: u32, ) { let caps = caps.into(); unsafe { ffi::gst_buffer_pool_config_set_params( self.0.to_glib_none_mut().0, caps.to_glib_none().0, size, min_buffers, max_buffers, ); } } pub fn get_params(&self) -> Option<(Option<::Caps>, u32, u32, u32)> { unsafe { let mut caps = ptr::null_mut(); let mut size = mem::uninitialized(); let mut min_buffers = mem::uninitialized(); let mut max_buffers = mem::uninitialized(); let ret: bool = from_glib(ffi::gst_buffer_pool_config_get_params( self.0.to_glib_none().0, &mut caps, &mut size, &mut min_buffers, &mut max_buffers, )); if !ret { return None; } Some((from_glib_none(caps), size, min_buffers, max_buffers)) } } pub fn validate_params<'a, T: Into<Option<&'a ::Caps>>>( &self, caps: T, size: u32, min_buffers: u32, max_buffers: u32, ) -> bool { let caps = caps.into(); unsafe { from_glib(ffi::gst_buffer_pool_config_validate_params( self.0.to_glib_none().0, caps.to_glib_none().0, size, min_buffers, max_buffers, )) } } } #[derive(Debug)] pub struct BufferPoolAcquireParams(ffi::GstBufferPoolAcquireParams); impl BufferPoolAcquireParams { pub fn with_flags(flags: ::BufferPoolAcquireFlags) -> Self { BufferPoolAcquireParams(ffi::GstBufferPoolAcquireParams { format: ffi::GST_FORMAT_UNDEFINED, start: -1, stop: -1, flags: flags.to_glib(), _gst_reserved: [ptr::null_mut(); 4], }) } pub fn with_start_stop<T: ::SpecificFormattedValue>( start: T, stop: T, flags: ::BufferPoolAcquireFlags, ) -> Self { unsafe { BufferPoolAcquireParams(ffi::GstBufferPoolAcquireParams { format: start.get_format().to_glib(), start: start.to_raw_value(), stop: stop.to_raw_value(), flags: flags.to_glib(), _gst_reserved: [ptr::null_mut(); 4], }) } } pub fn flags(&self) -> ::BufferPoolAcquireFlags { from_glib(self.0.flags) } pub fn format(&self) -> ::Format { from_glib(self.0.format) } pub fn start(&self) -> ::GenericFormattedValue { ::GenericFormattedValue::new(from_glib(self.0.format), self.0.start) } pub fn stop(&self) -> ::GenericFormattedValue { ::GenericFormattedValue::new(from_glib(self.0.format), self.0.stop) } } impl PartialEq for BufferPoolAcquireParams { fn eq(&self, other: &Self) -> bool { self.format() == other.format() && self.start() == other.start() && self.stop() == other.stop() } } impl Eq for BufferPoolAcquireParams {} impl BufferPool { pub fn new() -> BufferPool { assert_initialized_main_thread!(); let (major, minor, _, _) = ::version(); if (major, minor) > (1, 12) { unsafe { from_glib_full(ffi::gst_buffer_pool_new()) } } else { unsafe { from_glib_none(ffi::gst_buffer_pool_new()) } } } } impl Default for BufferPool { fn default() -> Self { Self::new() } } pub trait BufferPoolExtManual { fn get_config(&self) -> BufferPoolConfig; fn set_config(&self, config: BufferPoolConfig) -> Result<(), glib::error::BoolError>; fn is_flushing(&self) -> bool; fn acquire_buffer<'a, P: Into<Option<&'a BufferPoolAcquireParams>>>( &self, params: P, ) -> Result<::Buffer, ::FlowReturn>; fn release_buffer(&self, buffer: ::Buffer); } impl<O: IsA<BufferPool>> BufferPoolExtManual for O { fn get_config(&self) -> BufferPoolConfig { unsafe { let ptr = ffi::gst_buffer_pool_get_config(self.to_glib_none().0); BufferPoolConfig(from_glib_full(ptr)) } } fn set_config(&self, config: BufferPoolConfig) -> Result<(), glib::error::BoolError> { unsafe { glib::error::BoolError::from_glib( ffi::gst_buffer_pool_set_config(self.to_glib_none().0, config.0.into_ptr()), "Failed to set config", ) } } fn is_flushing(&self) -> bool { unsafe { let stash = self.to_glib_none(); let ptr: *mut ffi::GstBufferPool = stash.0; from_glib((*ptr).flushing) } }
fn release_buffer(&self, buffer: ::Buffer) { unsafe { ffi::gst_buffer_pool_release_buffer(self.to_glib_none().0, buffer.into_ptr()); } } } #[cfg(test)] mod tests { use super::*; use prelude::*; #[test] fn test_pool() { ::init().unwrap(); let pool = ::BufferPool::new(); let mut config = pool.get_config(); config.set_params(Some(&::Caps::new_simple("foo/bar", &[])), 1024, 0, 2); pool.set_config(config).unwrap(); pool.set_active(true).unwrap(); let params = ::BufferPoolAcquireParams::with_flags(::BufferPoolAcquireFlags::DONTWAIT); let _buf1 = pool.acquire_buffer(&params).unwrap(); let buf2 = pool.acquire_buffer(&params).unwrap(); assert!(pool.acquire_buffer(&params).is_err()); drop(buf2); let _buf2 = pool.acquire_buffer(&params).unwrap(); pool.set_active(false).unwrap(); } }
fn acquire_buffer<'a, P: Into<Option<&'a BufferPoolAcquireParams>>>( &self, params: P, ) -> Result<::Buffer, ::FlowReturn> { let params = params.into(); let params_ptr = match params { Some(params) => &params.0 as *const _ as *mut _, None => ptr::null_mut(), }; unsafe { let mut buffer = ptr::null_mut(); let ret = from_glib(ffi::gst_buffer_pool_acquire_buffer( self.to_glib_none().0, &mut buffer, params_ptr, )); if ret == ::FlowReturn::Ok { Ok(from_glib_full(buffer)) } else { Err(ret) } } }
function_block-full_function
[ { "content": "pub fn type_find_helper<P: IsA<gst::Pad>>(src: &P, size: u64) -> Option<gst::Caps> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(ffi::gst_type_find_helper(src.to_glib_none().0, size))\n\n }\n\n}\n\n\n", "file_path": "gstreamer-base/src/auto/functions.rs", "rank": 0, "score": 349503.0390563768 }, { "content": "pub fn type_find_helper_for_buffer<'a, P: IsA<gst::Object> + 'a, Q: Into<Option<&'a P>>>(obj: Q, buf: &gst::Buffer) -> (Option<gst::Caps>, gst::TypeFindProbability) {\n\n assert_initialized_main_thread!();\n\n let obj = obj.into();\n\n let obj = obj.to_glib_none();\n\n unsafe {\n\n let mut prob = mem::uninitialized();\n\n let ret = from_glib_full(ffi::gst_type_find_helper_for_buffer(obj.0, buf.to_glib_none().0, &mut prob));\n\n (ret, from_glib(prob))\n\n }\n\n}\n\n\n", "file_path": "gstreamer-base/src/auto/functions.rs", "rank": 1, "score": 335275.7058662332 }, { "content": "pub fn type_find_helper_for_extension<'a, P: IsA<gst::Object> + 'a, Q: Into<Option<&'a P>>>(obj: Q, extension: &str) -> Option<gst::Caps> {\n\n assert_initialized_main_thread!();\n\n let obj = obj.into();\n\n let obj = obj.to_glib_none();\n\n unsafe {\n\n from_glib_full(ffi::gst_type_find_helper_for_extension(obj.0, extension.to_glib_none().0))\n\n }\n\n}\n\n\n\n//pub fn type_find_helper_get_range<'a, P: IsA<gst::Object>, Q: IsA<gst::Object> + 'a, R: Into<Option<&'a Q>>>(obj: &P, parent: R, func: /*Unknown conversion*//*Unimplemented*/TypeFindHelperGetRangeFunction, size: u64, extension: &str) -> (Option<gst::Caps>, gst::TypeFindProbability) {\n\n// unsafe { TODO: call ffi::gst_type_find_helper_get_range() }\n\n//}\n", "file_path": "gstreamer-base/src/auto/functions.rs", "rank": 2, "score": 333393.08465173846 }, { "content": "pub fn version() -> (u32, u32, u32, u32) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut major = mem::uninitialized();\n\n let mut minor = mem::uninitialized();\n\n let mut micro = mem::uninitialized();\n\n let mut nano = mem::uninitialized();\n\n ffi::gst_version(&mut major, &mut minor, &mut micro, &mut nano);\n\n (major, minor, micro, nano)\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 3, "score": 303484.37578508875 }, { "content": "pub fn parse_launch_full<'a, P: Into<Option<&'a mut ParseContext>>>(\n\n pipeline_description: &str,\n\n context: P,\n\n flags: ParseFlags,\n\n) -> Result<Element, Error> {\n\n assert_initialized_main_thread!();\n\n let mut context = context.into();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = ffi::gst_parse_launch_full(\n\n pipeline_description.to_glib_none().0,\n\n context.to_glib_none_mut().0,\n\n flags.to_glib(),\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 4, "score": 299121.39432439633 }, { "content": "pub fn parse_launchv_full<'a, P: Into<Option<&'a mut ParseContext>>>(\n\n argv: &[&str],\n\n context: P,\n\n flags: ParseFlags,\n\n) -> Result<Element, Error> {\n\n assert_initialized_main_thread!();\n\n let mut context = context.into();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = ffi::gst_parse_launchv_full(\n\n argv.to_glib_none().0,\n\n context.to_glib_none_mut().0,\n\n flags.to_glib(),\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 5, "score": 299121.39432439633 }, { "content": "pub fn parse_bin_from_description_full<'a, P: Into<Option<&'a mut ParseContext>>>(\n\n bin_description: &str,\n\n ghost_unlinked_pads: bool,\n\n context: P,\n\n flags: ParseFlags,\n\n) -> Result<Element, Error> {\n\n assert_initialized_main_thread!();\n\n let mut context = context.into();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = ffi::gst_parse_bin_from_description_full(\n\n bin_description.to_glib_none().0,\n\n ghost_unlinked_pads.to_glib(),\n\n context.to_glib_none_mut().0,\n\n flags.to_glib(),\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 6, "score": 294333.1040753911 }, { "content": "pub fn debug_set_threshold_from_string(list: &str, reset: bool) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gst_debug_set_threshold_from_string(list.to_glib_none().0, reset.to_glib());\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 7, "score": 291262.54111425235 }, { "content": "pub fn init() -> Result<(), glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n if from_glib(ffi::gst_init_check(\n\n ptr::null_mut(),\n\n ptr::null_mut(),\n\n &mut error,\n\n )) {\n\n Ok(())\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n\npub unsafe fn deinit() {\n\n ffi::gst_deinit();\n\n}\n\n\n\npub const BUFFER_OFFSET_NONE: u64 = ffi::GST_BUFFER_OFFSET_NONE;\n", "file_path": "gstreamer/src/lib.rs", "rank": 8, "score": 277104.06231798703 }, { "content": "pub fn parse_bin_from_description(bin_description: &str, ghost_unlinked_pads: bool) -> Result<Bin, Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = ffi::gst_parse_bin_from_description(bin_description.to_glib_none().0, ghost_unlinked_pads.to_glib(), &mut error);\n\n if error.is_null() { Ok(from_glib_none(ret)) } else { Err(from_glib_full(error)) }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 9, "score": 273338.5429316997 }, { "content": "#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\npub fn debug_add_ring_buffer_logger(max_size_per_thread: u32, thread_timeout: u32) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gst_debug_add_ring_buffer_logger(max_size_per_thread, thread_timeout);\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 10, "score": 272411.92588178214 }, { "content": "pub fn debug_set_active(active: bool) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gst_debug_set_active(active.to_glib());\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 11, "score": 250191.95156209604 }, { "content": "pub fn debug_set_colored(colored: bool) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gst_debug_set_colored(colored.to_glib());\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 12, "score": 250191.95156209604 }, { "content": "pub trait TypeFindImpl {\n\n fn peek(&mut self, offset: i64, size: u32) -> Option<&[u8]>;\n\n fn suggest(&mut self, probability: TypeFindProbability, caps: &Caps);\n\n fn get_length(&self) -> Option<u64> {\n\n None\n\n }\n\n}\n\n\n\nimpl<'a> TypeFind<'a> {\n\n pub fn register<\n\n 'b,\n\n 'c,\n\n 'd,\n\n P: Into<Option<&'b Plugin>>,\n\n R: Into<Option<&'c str>>,\n\n S: Into<Option<&'d Caps>>,\n\n F,\n\n >(\n\n plugin: P,\n\n name: &str,\n", "file_path": "gstreamer/src/typefind.rs", "rank": 13, "score": 248754.9834687683 }, { "content": "pub fn parse_launchv(argv: &[&str]) -> Result<Element, Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = ffi::gst_parse_launchv(argv.to_glib_none().0, &mut error);\n\n if error.is_null() { Ok(from_glib_none(ret)) } else { Err(from_glib_full(error)) }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 14, "score": 247794.74572166626 }, { "content": "struct CapsItemSe<'a>(&'a StructureRef, Option<&'a CapsFeaturesRef>);\n\nimpl<'a> Serialize for CapsItemSe<'a> {\n\n fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n\n let mut tup = serializer.serialize_tuple(2)?;\n\n tup.serialize_element(self.0)?;\n\n tup.serialize_element(&self.1)?;\n\n tup.end()\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/caps_serde.rs", "rank": 15, "score": 247212.8675208579 }, { "content": "struct CapsItemDe(Structure, Option<CapsFeatures>);\n\n\n", "file_path": "gstreamer/src/caps_serde.rs", "rank": 16, "score": 244393.35275001367 }, { "content": "pub fn parse_launch(pipeline_description: &str) -> Result<Element, Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = ffi::gst_parse_launch(pipeline_description.to_glib_none().0, &mut error);\n\n if error.is_null() { Ok(from_glib_none(ret)) } else { Err(from_glib_full(error)) }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 17, "score": 243965.27668545273 }, { "content": "struct FieldSe<'a>(&'a str, &'a glib::SendValue);\n\nimpl<'a> Serialize for FieldSe<'a> {\n\n fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n\n ser_value!(self.1, |type_, value| {\n\n let mut tup = serializer.serialize_tuple(3)?;\n\n tup.serialize_element(self.0)?;\n\n tup.serialize_element(type_)?;\n\n tup.serialize_element(&value)?;\n\n tup.end()\n\n })\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/structure_serde.rs", "rank": 18, "score": 239237.4966621165 }, { "content": "#[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\npub fn debug_get_stack_trace(flags: StackTraceFlags) -> Option<String> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(ffi::gst_debug_get_stack_trace(flags.to_glib()))\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 19, "score": 234326.86077113904 }, { "content": "pub trait FormattedValue: Copy + Clone + Sized + 'static {\n\n fn get_default_format() -> Format;\n\n fn try_from(v: GenericFormattedValue) -> Option<Self>;\n\n\n\n fn get_format(&self) -> Format;\n\n\n\n unsafe fn from_raw(format: Format, value: i64) -> Self;\n\n unsafe fn to_raw_value(&self) -> i64;\n\n}\n\n\n", "file_path": "gstreamer/src/format.rs", "rank": 20, "score": 231796.17391537537 }, { "content": "pub fn debug_set_threshold_for_name(name: &str, level: DebugLevel) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gst_debug_set_threshold_for_name(name.to_glib_none().0, level.to_glib());\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 21, "score": 224257.62850818367 }, { "content": "struct FieldsDe<'a>(&'a mut StructureRef);\n\n\n", "file_path": "gstreamer/src/structure_serde.rs", "rank": 22, "score": 219874.2819617808 }, { "content": "struct FieldsVisitor<'a>(&'a mut StructureRef);\n\nimpl<'de, 'a> Visitor<'de> for FieldsVisitor<'a> {\n\n type Value = ();\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a sequence of `Structure` `Field`s\")\n\n }\n\n\n\n fn visit_seq<A: SeqAccess<'de>>(self, mut seq: A) -> Result<(), A::Error> {\n\n while let Some(field) = seq.next_element::<FieldDe>()? {\n\n let (name, value): (String, glib::SendValue) = field.into();\n\n self.0.set_value(name.as_str(), value);\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<'de, 'a> DeserializeSeed<'de> for FieldsDe<'a> {\n\n type Value = ();\n\n\n\n fn deserialize<D: Deserializer<'de>>(self, deserializer: D) -> Result<(), D::Error> {\n\n deserializer.deserialize_seq(FieldsVisitor(self.0))\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/structure_serde.rs", "rank": 23, "score": 219874.2819617808 }, { "content": "pub fn update_registry() -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(ffi::gst_update_registry())\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 24, "score": 218611.04595651358 }, { "content": "pub fn debug_is_active() -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(ffi::gst_debug_is_active())\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 25, "score": 218611.04595651358 }, { "content": "pub fn debug_is_colored() -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(ffi::gst_debug_is_colored())\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 26, "score": 218611.0459565136 }, { "content": "pub fn debug_set_default_threshold(level: DebugLevel) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gst_debug_set_default_threshold(level.to_glib());\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 27, "score": 216894.30367487803 }, { "content": "pub fn new_still_frame_event<'a>(in_still: bool) -> StillFrameEventBuilder<'a> {\n\n StillFrameEventBuilder::new(in_still)\n\n}\n\n\n\npub struct StillFrameEventBuilder<'a> {\n\n seqnum: Option<gst::Seqnum>,\n\n running_time_offset: Option<i64>,\n\n other_fields: Vec<(&'a str, &'a ToSendValue)>,\n\n in_still: bool,\n\n}\n\n\n\nimpl<'a> StillFrameEventBuilder<'a> {\n\n fn new(in_still: bool) -> Self {\n\n skip_assert_initialized!();\n\n Self {\n\n seqnum: None,\n\n running_time_offset: None,\n\n other_fields: Vec::new(),\n\n in_still,\n\n }\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 28, "score": 213314.74877810074 }, { "content": "pub trait SpecificFormattedValue: FormattedValue {}\n\n\n\nimpl FormattedValue for GenericFormattedValue {\n\n fn get_default_format() -> Format {\n\n Format::Undefined\n\n }\n\n\n\n fn try_from(v: GenericFormattedValue) -> Option<Self> {\n\n Some(v)\n\n }\n\n\n\n fn get_format(&self) -> Format {\n\n self.get_format()\n\n }\n\n\n\n unsafe fn from_raw(format: Format, value: i64) -> Self {\n\n GenericFormattedValue::new(format, value)\n\n }\n\n\n\n unsafe fn to_raw_value(&self) -> i64 {\n", "file_path": "gstreamer/src/format.rs", "rank": 29, "score": 207022.3990253205 }, { "content": "pub trait BufferPoolExt {\n\n fn get_options(&self) -> Vec<String>;\n\n\n\n fn has_option(&self, option: &str) -> bool;\n\n\n\n fn is_active(&self) -> bool;\n\n\n\n fn set_active(&self, active: bool) -> Result<(), glib::error::BoolError>;\n\n\n\n fn set_flushing(&self, flushing: bool);\n\n}\n\n\n\nimpl<O: IsA<BufferPool>> BufferPoolExt for O {\n\n fn get_options(&self) -> Vec<String> {\n\n unsafe {\n\n FromGlibPtrContainer::from_glib_none(ffi::gst_buffer_pool_get_options(self.to_glib_none().0))\n\n }\n\n }\n\n\n\n fn has_option(&self, option: &str) -> bool {\n", "file_path": "gstreamer/src/auto/buffer_pool.rs", "rank": 30, "score": 206057.27924743015 }, { "content": "pub trait EncodingTargetExt {\n\n fn get_category(&self) -> String;\n\n\n\n fn get_description(&self) -> String;\n\n\n\n fn get_name(&self) -> String;\n\n\n\n fn get_profile(&self, name: &str) -> Option<EncodingProfile>;\n\n\n\n fn get_profiles(&self) -> Vec<EncodingProfile>;\n\n\n\n fn save(&self) -> Result<(), Error>;\n\n\n\n fn save_to_file<P: AsRef<std::path::Path>>(&self, filepath: P) -> Result<(), Error>;\n\n}\n\n\n\nimpl<O: IsA<EncodingTarget>> EncodingTargetExt for O {\n\n fn get_category(&self) -> String {\n\n unsafe {\n\n from_glib_none(ffi::gst_encoding_target_get_category(self.to_glib_none().0))\n", "file_path": "gstreamer-pbutils/src/auto/encoding_target.rs", "rank": 32, "score": 203082.71907313348 }, { "content": "pub fn debug_unset_threshold_for_name(name: &str) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gst_debug_unset_threshold_for_name(name.to_glib_none().0);\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 33, "score": 202159.85337618654 }, { "content": "#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\npub fn get_main_executable_path() -> Option<String> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_none(ffi::gst_get_main_executable_path())\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 34, "score": 202142.38194685592 }, { "content": "pub trait MessageErrorDomain: glib::error::ErrorDomain {}\n\n\n\nimpl MessageErrorDomain for ::CoreError {}\n\nimpl MessageErrorDomain for ::ResourceError {}\n\nimpl MessageErrorDomain for ::StreamError {}\n\nimpl MessageErrorDomain for ::LibraryError {}\n\n\n\npub struct ErrorBuilder<'a, T> {\n\n builder: MessageBuilder<'a>,\n\n error: T,\n\n message: &'a str,\n\n debug: Option<&'a str>,\n\n #[allow(unused)]\n\n details: Option<Structure>,\n\n}\n\nimpl<'a, T: MessageErrorDomain> ErrorBuilder<'a, T> {\n\n fn new(error: T, message: &'a str) -> Self {\n\n skip_assert_initialized!();\n\n Self {\n\n builder: MessageBuilder::new(),\n", "file_path": "gstreamer/src/message.rs", "rank": 35, "score": 197126.6271927558 }, { "content": "pub fn audio_buffer_clip(\n\n buffer: gst::Buffer,\n\n segment: &gst::Segment,\n\n rate: u32,\n\n channels: u32,\n\n) -> Option<gst::Buffer> {\n\n skip_assert_initialized!();\n\n\n\n unsafe {\n\n from_glib_full(ffi::gst_audio_buffer_clip(\n\n buffer.into_ptr(),\n\n segment.to_glib_none().0,\n\n rate as i32,\n\n channels as i32,\n\n ))\n\n }\n\n}\n\n\n\n// Re-export all the traits in a prelude module, so that applications\n\n// can always \"use gst::prelude::*\" without getting conflicts\n\npub mod prelude {\n\n pub use glib::prelude::*;\n\n pub use gst::prelude::*;\n\n\n\n pub use auto::traits::*;\n\n}\n", "file_path": "gstreamer-audio/src/lib.rs", "rank": 36, "score": 196173.60670101087 }, { "content": "pub fn type_find_helper_for_data<\n\n 'a,\n\n P: IsA<gst::Object> + 'a,\n\n Q: Into<Option<&'a P>>,\n\n R: AsRef<[u8]>,\n\n>(\n\n obj: Q,\n\n data: R,\n\n) -> (Option<gst::Caps>, gst::TypeFindProbability) {\n\n assert_initialized_main_thread!();\n\n let obj = obj.into();\n\n let obj = obj.to_glib_none();\n\n unsafe {\n\n let mut prob = mem::uninitialized();\n\n let data = data.as_ref();\n\n let (ptr, len) = (data.as_ptr(), data.len());\n\n let ret = from_glib_full(ffi::gst_type_find_helper_for_data(\n\n obj.0,\n\n mut_override(ptr),\n\n len,\n\n &mut prob,\n\n ));\n\n (ret, from_glib(prob))\n\n }\n\n}\n", "file_path": "gstreamer-base/src/functions.rs", "rank": 37, "score": 192810.20546673867 }, { "content": "pub trait GstValueExt: Sized {\n\n fn can_compare(&self, other: &Self) -> bool;\n\n fn compare(&self, other: &Self) -> Option<cmp::Ordering>;\n\n fn eq(&self, other: &Self) -> bool;\n\n fn can_intersect(&self, other: &Self) -> bool;\n\n fn intersect(&self, other: &Self) -> Option<Self>;\n\n fn can_subtract(&self, other: &Self) -> bool;\n\n fn subtract(&self, other: &Self) -> Option<Self>;\n\n fn can_union(&self, other: &Self) -> bool;\n\n fn union(&self, other: &Self) -> Option<Self>;\n\n fn fixate(&self) -> Option<Self>;\n\n fn is_fixed(&self) -> bool;\n\n fn is_subset(&self, superset: &Self) -> bool;\n\n fn serialize(&self) -> Option<String>;\n\n fn deserialize<'a, T: Into<&'a str>>(s: T) -> Option<glib::Value>;\n\n}\n\n\n\nimpl GstValueExt for glib::Value {\n\n fn can_compare(&self, other: &Self) -> bool {\n\n unsafe {\n", "file_path": "gstreamer/src/value.rs", "rank": 38, "score": 191626.33472930698 }, { "content": "pub fn version_string() -> String {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(ffi::gst_version_string())\n\n }\n\n}\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 39, "score": 191217.8980760513 }, { "content": "struct TagValues<'a>(&'a str, &'a mut TagListRef);\n\n\n", "file_path": "gstreamer/src/tags_serde.rs", "rank": 40, "score": 191179.09459122719 }, { "content": "pub fn buffer_reorder_channels(\n\n buffer: &mut gst::BufferRef,\n\n format: ::AudioFormat,\n\n channels: u32,\n\n from: &[AudioChannelPosition],\n\n to: &[AudioChannelPosition],\n\n) -> Result<(), glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n\n\n if from.len() != to.len() || from.len() > 64 {\n\n return Err(glib::BoolError(\"Invalid number of channels\"));\n\n }\n\n\n\n let from_len = from.len();\n\n let to_len = to.len();\n\n\n\n let from_raw: [ffi::GstAudioChannelPosition; 64] = array_init::array_init_copy(|i| {\n\n if i >= from_len as usize {\n\n ffi::GST_AUDIO_CHANNEL_POSITION_INVALID\n\n } else {\n", "file_path": "gstreamer-audio/src/audio_channel_position.rs", "rank": 41, "score": 189403.86933760231 }, { "content": "#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\npub fn debug_remove_ring_buffer_logger() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gst_debug_remove_ring_buffer_logger();\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 42, "score": 189403.86933760231 }, { "content": "struct TagValuesVisitor<'a>(&'a str, &'a mut TagListRef);\n\nimpl<'de, 'a> Visitor<'de> for TagValuesVisitor<'a> {\n\n type Value = ();\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a sequence of `Tag` values with the same type\")\n\n }\n\n\n\n fn visit_seq<A: SeqAccess<'de>>(self, mut seq: A) -> Result<(), A::Error> {\n\n let tag_type: glib::Type = unsafe {\n\n let tag_name = self.0.to_glib_none();\n\n from_glib(ffi::gst_tag_get_type(tag_name.0))\n\n };\n\n\n\n loop {\n\n let tag_value = match tag_type {\n\n glib::Type::F64 => de_tag_value!(self.0, seq, f64),\n\n glib::Type::String => de_tag_value!(self.0, seq, String),\n\n glib::Type::U32 => de_tag_value!(self.0, seq, u32),\n\n glib::Type::U64 => de_tag_value!(self.0, seq, u64),\n", "file_path": "gstreamer/src/tags_serde.rs", "rank": 43, "score": 188168.03953882906 }, { "content": "pub fn debug_bin_to_dot_data<P: IsA<Bin>>(bin: &P, details: DebugGraphDetails) -> String {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib_full(ffi::gst_debug_bin_to_dot_data(bin.to_glib_none().0, details.to_glib()))\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 44, "score": 182880.20676408077 }, { "content": "pub fn debug_get_default_threshold() -> DebugLevel {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(ffi::gst_debug_get_default_threshold())\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 45, "score": 181293.04685743814 }, { "content": "pub fn is_force_key_unit_event(event: &gst::EventRef) -> bool {\n\n unsafe { from_glib(ffi::gst_video_event_is_force_key_unit(event.as_mut_ptr())) }\n\n}\n\n\n\n// FIXME: Copy from gstreamer/src/event.rs\n\nmacro_rules! event_builder_generic_impl {\n\n ($new_fn:expr) => {\n\n pub fn seqnum(self, seqnum: gst::Seqnum) -> Self {\n\n Self {\n\n seqnum: Some(seqnum),\n\n .. self\n\n }\n\n }\n\n\n\n pub fn running_time_offset(self, running_time_offset: i64) -> Self {\n\n Self {\n\n running_time_offset: Some(running_time_offset),\n\n .. self\n\n }\n\n }\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 46, "score": 180680.85922190303 }, { "content": "pub trait EncodingProfileBuilder<'a>: Sized {\n\n fn name(self, name: &'a str) -> Self;\n\n fn description(self, description: &'a str) -> Self;\n\n fn format(self, format: &'a gst::Caps) -> Self;\n\n fn preset(self, preset: &'a str) -> Self;\n\n fn preset_name(self, preset_name: &'a str) -> Self;\n\n fn restriction(self, format: &'a gst::Caps) -> Self;\n\n fn presence(self, presence: u32) -> Self;\n\n fn allow_dynamic_output(self, allow: bool) -> Self;\n\n fn enabled(self, enabled: bool) -> Self;\n\n}\n\n\n\nmacro_rules! declare_encoding_profile_builder_common(\n\n ($name:ident) => {\n\n impl<'a> EncodingProfileBuilder<'a> for $name<'a> {\n\n fn name(mut self, name: &'a str) -> $name<'a> {\n\n self.base.name = Some(name);\n\n self\n\n }\n\n\n", "file_path": "gstreamer-pbutils/src/encoding_profile.rs", "rank": 47, "score": 180597.23590996343 }, { "content": "pub fn is_video_overlay_prepare_window_handle_message(msg: &gst::MessageRef) -> bool {\n\n unsafe {\n\n from_glib(ffi::gst_is_video_overlay_prepare_window_handle_message(\n\n msg.as_mut_ptr(),\n\n ))\n\n }\n\n}\n", "file_path": "gstreamer-video/src/video_overlay.rs", "rank": 48, "score": 175751.29521036719 }, { "content": "pub trait IteratorImpl<T>: Clone + Send + 'static\n\nwhere\n\n for<'a> T: FromValueOptional<'a> + StaticType + ToValue + Send + 'static,\n\n{\n\n fn next(&mut self) -> Option<Result<T, IteratorError>>;\n\n fn resync(&mut self);\n\n}\n\n\n\nunsafe extern \"C\" fn rs_iterator_copy<T, I: IteratorImpl<T>>(\n\n it: *const ffi::GstIterator,\n\n copy: *mut ffi::GstIterator,\n\n) where\n\n for<'a> T: FromValueOptional<'a> + StaticType + ToValue + Send + 'static,\n\n{\n\n let it = it as *const RsIterator<T, I>;\n\n let copy = copy as *mut RsIterator<T, I>;\n\n\n\n ptr::write(&mut (*copy).imp, (*it).imp.clone());\n\n}\n\n\n", "file_path": "gstreamer/src/iterator.rs", "rank": 49, "score": 173931.9892592216 }, { "content": "#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\npub fn debug_ring_buffer_logger_get_logs() -> Vec<String> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n FromGlibPtrContainer::from_glib_full(ffi::gst_debug_ring_buffer_logger_get_logs())\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 50, "score": 171092.00218529892 }, { "content": "pub fn parse_still_frame_event(event: &gst::EventRef) -> Option<StillFrameEvent> {\n\n unsafe {\n\n let mut in_still = mem::uninitialized();\n\n\n\n let res: bool = from_glib(ffi::gst_video_event_parse_still_frame(\n\n event.as_mut_ptr(),\n\n &mut in_still,\n\n ));\n\n if res {\n\n Some(StillFrameEvent {\n\n in_still: from_glib(in_still),\n\n })\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 51, "score": 169336.64377486738 }, { "content": "fn get_other_type_id<T: StaticType>() -> usize {\n\n match T::static_type() {\n\n glib::Type::Other(type_id) => type_id,\n\n type_ => panic!(\"Expecting `Other` variant, found `{}`\", type_),\n\n }\n\n}\n\n\n\nlazy_static! {\n\n pub(crate) static ref ARRAY_OTHER_TYPE_ID: usize = get_other_type_id::<Array>();\n\n pub(crate) static ref BITMASK_OTHER_TYPE_ID: usize = get_other_type_id::<Bitmask>();\n\n pub(crate) static ref DATE_TIME_OTHER_TYPE_ID: usize = get_other_type_id::<DateTime>();\n\n pub(crate) static ref FRACTION_OTHER_TYPE_ID: usize = get_other_type_id::<Fraction>();\n\n pub(crate) static ref FRACTION_RANGE_OTHER_TYPE_ID: usize =\n\n get_other_type_id::<FractionRange>();\n\n pub(crate) static ref INT_RANGE_I32_OTHER_TYPE_ID: usize = get_other_type_id::<IntRange<i32>>();\n\n pub(crate) static ref INT_RANGE_I64_OTHER_TYPE_ID: usize = get_other_type_id::<IntRange<i64>>();\n\n pub(crate) static ref LIST_OTHER_TYPE_ID: usize = get_other_type_id::<List>();\n\n pub(crate) static ref SAMPLE_OTHER_TYPE_ID: usize = get_other_type_id::<Sample>();\n\n}\n\n\n", "file_path": "gstreamer/src/value_serde.rs", "rank": 52, "score": 168085.48805472834 }, { "content": "pub fn parse_force_key_unit_event(event: &gst::EventRef) -> Option<ForceKeyUnitEvent> {\n\n if event.is_upstream() {\n\n parse_upstream_force_key_unit_event(event).map(ForceKeyUnitEvent::Upstream)\n\n } else {\n\n parse_downstream_force_key_unit_event(event).map(ForceKeyUnitEvent::Downstream)\n\n }\n\n}\n\n\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 53, "score": 164974.03964328844 }, { "content": "struct StructureForIter<'a>(&'a StructureRef);\n\nimpl<'a> Serialize for StructureForIter<'a> {\n\n fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n\n let iter = self.0.iter();\n\n let size = iter.size_hint().0;\n\n if size > 0 {\n\n let mut seq = serializer.serialize_seq(Some(size))?;\n\n for field in iter {\n\n seq.serialize_element(&FieldSe(field.0, field.1))?;\n\n }\n\n seq.end()\n\n } else {\n\n let seq = serializer.serialize_seq(None)?;\n\n seq.end()\n\n }\n\n }\n\n}\n\n\n\nimpl Serialize for StructureRef {\n\n fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n", "file_path": "gstreamer/src/structure_serde.rs", "rank": 54, "score": 162588.1297767748 }, { "content": "pub fn debug_bin_to_dot_file<P: IsA<Bin>, Q: AsRef<std::path::Path>>(bin: &P, details: DebugGraphDetails, file_name: Q) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gst_debug_bin_to_dot_file(bin.to_glib_none().0, details.to_glib(), file_name.as_ref().to_glib_none().0);\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 55, "score": 156100.2295650466 }, { "content": "pub fn new_downstream_force_key_unit_event<'a>() -> DownstreamForceKeyUnitEventBuilder<'a> {\n\n DownstreamForceKeyUnitEventBuilder::new()\n\n}\n\n\n\npub struct DownstreamForceKeyUnitEventBuilder<'a> {\n\n seqnum: Option<gst::Seqnum>,\n\n running_time_offset: Option<i64>,\n\n other_fields: Vec<(&'a str, &'a ToSendValue)>,\n\n timestamp: gst::ClockTime,\n\n stream_time: gst::ClockTime,\n\n running_time: gst::ClockTime,\n\n all_headers: bool,\n\n count: u32,\n\n}\n\n\n\nimpl<'a> DownstreamForceKeyUnitEventBuilder<'a> {\n\n fn new() -> Self {\n\n skip_assert_initialized!();\n\n Self {\n\n seqnum: None,\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 56, "score": 155013.49113351168 }, { "content": "pub fn new_upstream_force_key_unit_event<'a>() -> UpstreamForceKeyUnitEventBuilder<'a> {\n\n UpstreamForceKeyUnitEventBuilder::new()\n\n}\n\n\n\npub struct UpstreamForceKeyUnitEventBuilder<'a> {\n\n seqnum: Option<gst::Seqnum>,\n\n running_time_offset: Option<i64>,\n\n other_fields: Vec<(&'a str, &'a ToSendValue)>,\n\n running_time: gst::ClockTime,\n\n all_headers: bool,\n\n count: u32,\n\n}\n\n\n\nimpl<'a> UpstreamForceKeyUnitEventBuilder<'a> {\n\n fn new() -> Self {\n\n skip_assert_initialized!();\n\n Self {\n\n seqnum: None,\n\n running_time_offset: None,\n\n other_fields: Vec::new(),\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 57, "score": 155013.49113351168 }, { "content": "pub fn debug_bin_to_dot_file_with_ts<P: IsA<Bin>, Q: AsRef<std::path::Path>>(bin: &P, details: DebugGraphDetails, file_name: Q) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gst_debug_bin_to_dot_file_with_ts(bin.to_glib_none().0, details.to_glib(), file_name.as_ref().to_glib_none().0);\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 58, "score": 154374.95599726957 }, { "content": "pub trait Tag<'a> {\n\n type TagType: FromValueOptional<'a> + SetValue + Send;\n\n fn tag_name<'b>() -> &'b str;\n\n}\n\n\n\nmacro_rules! impl_tag(\n\n ($name:ident, $t:ty, $rust_tag:ident, $gst_tag:ident) => {\n\n pub enum $name {}\n\n impl<'a> Tag<'a> for $name {\n\n type TagType = $t;\n\n fn tag_name<'b>() -> &'b str {\n\n *$rust_tag\n\n }\n\n }\n\n\n\n lazy_static! {\n\n pub(crate) static ref $rust_tag: &'static str =\n\n unsafe { CStr::from_ptr(ffi::$gst_tag).to_str().unwrap() };\n\n }\n\n };\n", "file_path": "gstreamer/src/tags.rs", "rank": 59, "score": 152169.4638977395 }, { "content": "pub trait PipelineExt {\n\n fn auto_clock(&self);\n\n\n\n fn get_auto_flush_bus(&self) -> bool;\n\n\n\n fn get_delay(&self) -> ClockTime;\n\n\n\n fn get_latency(&self) -> ClockTime;\n\n\n\n fn get_pipeline_clock(&self) -> Option<Clock>;\n\n\n\n fn set_auto_flush_bus(&self, auto_flush: bool);\n\n\n\n fn set_delay(&self, delay: ClockTime);\n\n\n\n fn set_latency(&self, latency: ClockTime);\n\n\n\n fn use_clock<'a, P: IsA<Clock> + 'a, Q: Into<Option<&'a P>>>(&self, clock: Q);\n\n\n\n fn connect_property_auto_flush_bus_notify<F: Fn(&Self) + Send + Sync + 'static>(&self, f: F) -> SignalHandlerId;\n", "file_path": "gstreamer/src/auto/pipeline.rs", "rank": 60, "score": 152022.51769296045 }, { "content": "pub trait ClockExtManual {\n\n fn new_periodic_id(&self, start_time: ClockTime, interval: ClockTime) -> Option<ClockId>;\n\n\n\n fn periodic_id_reinit(\n\n &self,\n\n id: &ClockId,\n\n start_time: ClockTime,\n\n interval: ClockTime,\n\n ) -> Result<(), glib::BoolError>;\n\n\n\n fn new_single_shot_id(&self, time: ClockTime) -> Option<ClockId>;\n\n\n\n fn single_shot_id_reinit(&self, id: &ClockId, time: ClockTime) -> Result<(), glib::BoolError>;\n\n}\n\n\n\nimpl<O: IsA<Clock> + IsA<glib::object::Object>> ClockExtManual for O {\n\n fn new_periodic_id(&self, start_time: ClockTime, interval: ClockTime) -> Option<ClockId> {\n\n unsafe {\n\n from_glib_full(ffi::gst_clock_new_periodic_id(\n\n self.to_glib_none().0,\n", "file_path": "gstreamer/src/clock.rs", "rank": 61, "score": 152022.51769296045 }, { "content": "pub trait PresetExt {\n\n fn delete_preset(&self, name: &str) -> Result<(), glib::error::BoolError>;\n\n\n\n fn get_meta(&self, name: &str, tag: &str) -> Option<String>;\n\n\n\n fn get_preset_names(&self) -> Vec<String>;\n\n\n\n fn get_property_names(&self) -> Vec<String>;\n\n\n\n fn is_editable(&self) -> bool;\n\n\n\n fn load_preset(&self, name: &str) -> Result<(), glib::error::BoolError>;\n\n\n\n fn rename_preset(&self, old_name: &str, new_name: &str) -> Result<(), glib::error::BoolError>;\n\n\n\n fn save_preset(&self, name: &str) -> Result<(), glib::error::BoolError>;\n\n\n\n fn set_meta<'a, P: Into<Option<&'a str>>>(&self, name: &str, tag: &str, value: P) -> Result<(), glib::error::BoolError>;\n\n}\n\n\n", "file_path": "gstreamer/src/auto/preset.rs", "rank": 62, "score": 152022.51769296045 }, { "content": "pub trait BinExtManual {\n\n fn add_many<E: IsA<Element>>(&self, elements: &[&E]) -> Result<(), glib::BoolError>;\n\n fn remove_many<E: IsA<Element>>(&self, elements: &[&E]) -> Result<(), glib::BoolError>;\n\n\n\n fn iterate_all_by_interface(&self, iface: glib::types::Type) -> ::Iterator<Element>;\n\n fn iterate_elements(&self) -> ::Iterator<Element>;\n\n fn iterate_recurse(&self) -> ::Iterator<Element>;\n\n fn iterate_sinks(&self) -> ::Iterator<Element>;\n\n fn iterate_sorted(&self) -> ::Iterator<Element>;\n\n fn iterate_sources(&self) -> ::Iterator<Element>;\n\n fn get_children(&self) -> Vec<Element>;\n\n}\n\n\n\nimpl<O: IsA<Bin>> BinExtManual for O {\n\n fn add_many<E: IsA<Element>>(&self, elements: &[&E]) -> Result<(), glib::BoolError> {\n\n for e in elements {\n\n unsafe {\n\n let ret: bool =\n\n from_glib(ffi::gst_bin_add(self.to_glib_none().0, e.to_glib_none().0));\n\n if !ret {\n", "file_path": "gstreamer/src/bin.rs", "rank": 63, "score": 152022.51769296045 }, { "content": "pub trait BinExt {\n\n fn add<P: IsA<Element>>(&self, element: &P) -> Result<(), glib::error::BoolError>;\n\n\n\n //fn add_many<P: IsA<Element>>(&self, element_1: &P, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n fn find_unlinked_pad(&self, direction: PadDirection) -> Option<Pad>;\n\n\n\n fn get_by_interface(&self, iface: glib::types::Type) -> Option<Element>;\n\n\n\n fn get_by_name(&self, name: &str) -> Option<Element>;\n\n\n\n fn get_by_name_recurse_up(&self, name: &str) -> Option<Element>;\n\n\n\n #[cfg(any(feature = \"v1_10\", feature = \"dox\"))]\n\n fn get_suppressed_flags(&self) -> ElementFlags;\n\n\n\n //fn iterate_all_by_interface(&self, iface: glib::types::Type) -> /*Ignored*/Option<Iterator>;\n\n\n\n //fn iterate_elements(&self) -> /*Ignored*/Option<Iterator>;\n\n\n", "file_path": "gstreamer/src/auto/bin.rs", "rank": 64, "score": 152022.51769296045 }, { "content": "pub trait PadExtManual {\n\n fn add_probe<F>(&self, mask: PadProbeType, func: F) -> Option<PadProbeId>\n\n where\n\n F: Fn(&Pad, &mut PadProbeInfo) -> PadProbeReturn + Send + Sync + 'static;\n\n fn remove_probe(&self, id: PadProbeId);\n\n\n\n fn chain(&self, buffer: Buffer) -> FlowReturn;\n\n fn push(&self, buffer: Buffer) -> FlowReturn;\n\n\n\n fn chain_list(&self, list: BufferList) -> FlowReturn;\n\n fn push_list(&self, list: BufferList) -> FlowReturn;\n\n\n\n fn pull_range(&self, offset: u64, size: u32) -> Result<Buffer, FlowReturn>;\n\n fn get_range(&self, offset: u64, size: u32) -> Result<Buffer, FlowReturn>;\n\n\n\n fn peer_query(&self, query: &mut QueryRef) -> bool;\n\n fn query(&self, query: &mut QueryRef) -> bool;\n\n fn query_default<'a, P: IsA<Object> + 'a, Q: Into<Option<&'a P>>>(\n\n &self,\n\n parent: Q,\n", "file_path": "gstreamer/src/pad.rs", "rank": 65, "score": 152022.51769296045 }, { "content": "pub trait PadExt {\n\n fn activate_mode(&self, mode: PadMode, active: bool) -> Result<(), glib::error::BoolError>;\n\n\n\n //fn add_probe(&self, mask: PadProbeType, callback: /*Unknown conversion*//*Unimplemented*/PadProbeCallback, destroy_data: /*Unknown conversion*//*Unimplemented*/DestroyNotify) -> libc::c_ulong;\n\n\n\n fn can_link<P: IsA<Pad>>(&self, sinkpad: &P) -> bool;\n\n\n\n fn check_reconfigure(&self) -> bool;\n\n\n\n fn create_stream_id<'a, P: IsA<Element>, Q: Into<Option<&'a str>>>(&self, parent: &P, stream_id: Q) -> Option<String>;\n\n\n\n //fn create_stream_id_printf<'a, P: IsA<Element>, Q: Into<Option<&'a str>>>(&self, parent: &P, stream_id: Q, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Option<String>;\n\n\n\n //fn create_stream_id_printf_valist<'a, P: IsA<Element>, Q: Into<Option<&'a str>>>(&self, parent: &P, stream_id: Q, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported) -> Option<String>;\n\n\n\n //fn forward<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, forward: /*Unknown conversion*//*Unimplemented*/PadForwardFunction, user_data: P) -> bool;\n\n\n\n fn get_allowed_caps(&self) -> Option<Caps>;\n\n\n\n fn get_current_caps(&self) -> Option<Caps>;\n", "file_path": "gstreamer/src/auto/pad.rs", "rank": 66, "score": 152022.51769296045 }, { "content": "pub trait DeviceExt {\n\n fn create_element<'a, P: Into<Option<&'a str>>>(&self, name: P) -> Option<Element>;\n\n\n\n fn get_caps(&self) -> Option<Caps>;\n\n\n\n fn get_device_class(&self) -> String;\n\n\n\n fn get_display_name(&self) -> String;\n\n\n\n fn get_properties(&self) -> Option<Structure>;\n\n\n\n fn has_classes(&self, classes: &str) -> bool;\n\n\n\n fn has_classesv(&self, classes: &[&str]) -> bool;\n\n\n\n fn reconfigure_element<P: IsA<Element>>(&self, element: &P) -> bool;\n\n\n\n fn connect_removed<F: Fn(&Self) + Send + Sync + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n", "file_path": "gstreamer/src/auto/device.rs", "rank": 67, "score": 152022.51769296045 }, { "content": "pub trait ClockExt {\n\n fn add_observation(&self, slave: ClockTime, master: ClockTime) -> Option<f64>;\n\n\n\n fn add_observation_unapplied(&self, slave: ClockTime, master: ClockTime) -> Option<(f64, ClockTime, ClockTime, ClockTime, ClockTime)>;\n\n\n\n fn adjust_unlocked(&self, internal: ClockTime) -> ClockTime;\n\n\n\n fn get_calibration(&self) -> (ClockTime, ClockTime, ClockTime, ClockTime);\n\n\n\n fn get_internal_time(&self) -> ClockTime;\n\n\n\n fn get_master(&self) -> Option<Clock>;\n\n\n\n fn get_resolution(&self) -> ClockTime;\n\n\n\n fn get_time(&self) -> ClockTime;\n\n\n\n fn get_timeout(&self) -> ClockTime;\n\n\n\n fn is_synced(&self) -> bool;\n", "file_path": "gstreamer/src/auto/clock.rs", "rank": 68, "score": 152022.51769296045 }, { "content": "pub trait ElementExt {\n\n fn abort_state(&self);\n\n\n\n fn add_pad<P: IsA<Pad>>(&self, pad: &P) -> Result<(), glib::error::BoolError>;\n\n\n\n //#[cfg(any(feature = \"v1_10\", feature = \"dox\"))]\n\n //fn call_async(&self, func: /*Unknown conversion*//*Unimplemented*/ElementCallAsyncFunc, destroy_notify: /*Unknown conversion*//*Unimplemented*/DestroyNotify);\n\n\n\n fn change_state(&self, transition: StateChange) -> StateChangeReturn;\n\n\n\n fn continue_state(&self, ret: StateChangeReturn) -> StateChangeReturn;\n\n\n\n fn create_all_pads(&self);\n\n\n\n //#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n //fn foreach_pad<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, func: /*Unknown conversion*//*Unimplemented*/ElementForeachPadFunc, user_data: P) -> bool;\n\n\n\n //#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n //fn foreach_sink_pad<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, func: /*Unknown conversion*//*Unimplemented*/ElementForeachPadFunc, user_data: P) -> bool;\n\n\n", "file_path": "gstreamer/src/auto/element.rs", "rank": 69, "score": 152022.51769296045 }, { "content": "pub trait ElementExtManual {\n\n fn query(&self, query: &mut QueryRef) -> bool;\n\n\n\n fn send_event(&self, event: Event) -> bool;\n\n\n\n fn get_metadata<'a>(&self, key: &str) -> Option<&'a str>;\n\n\n\n fn get_pad_template(&self, name: &str) -> Option<PadTemplate>;\n\n fn get_pad_template_list(&self) -> Vec<PadTemplate>;\n\n\n\n #[cfg_attr(feature = \"cargo-clippy\", allow(too_many_arguments))]\n\n fn message_full<T: ::MessageErrorDomain>(\n\n &self,\n\n type_: ElementMessageType,\n\n code: T,\n\n message: Option<&str>,\n\n debug: Option<&str>,\n\n file: &str,\n\n function: &str,\n\n line: u32,\n", "file_path": "gstreamer/src/element.rs", "rank": 70, "score": 152022.51769296045 }, { "content": "pub fn convert_sample(\n\n sample: &gst::Sample,\n\n caps: &gst::Caps,\n\n timeout: gst::ClockTime,\n\n) -> Result<gst::Sample, glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = ffi::gst_video_convert_sample(\n\n sample.to_glib_none().0,\n\n caps.to_glib_none().0,\n\n timeout.to_glib(),\n\n &mut error,\n\n );\n\n\n\n if error.is_null() {\n\n Ok(from_glib_full(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-video/src/functions.rs", "rank": 71, "score": 151569.26775069794 }, { "content": "pub trait GstObjectExtManual {\n\n fn connect_deep_notify<\n\n 'a,\n\n P: Into<Option<&'a str>>,\n\n F: Fn(&Self, &::Object, &glib::ParamSpec) + Send + Sync + 'static,\n\n >(\n\n &self,\n\n name: P,\n\n f: F,\n\n ) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<::Object> + IsA<glib::Object> + glib::value::SetValue> GstObjectExtManual for O {\n\n fn connect_deep_notify<\n\n 'a,\n\n P: Into<Option<&'a str>>,\n\n F: Fn(&Self, &::Object, &glib::ParamSpec) + Send + Sync + 'static,\n\n >(\n\n &self,\n\n name: P,\n", "file_path": "gstreamer/src/object.rs", "rank": 72, "score": 149591.25183543772 }, { "content": "pub trait GstObjectExt {\n\n //fn add_control_binding(&self, binding: /*Ignored*/&ControlBinding) -> bool;\n\n\n\n fn default_error<'a, P: Into<Option<&'a str>>>(&self, error: &Error, debug: P);\n\n\n\n //fn get_control_binding(&self, property_name: &str) -> /*Ignored*/Option<ControlBinding>;\n\n\n\n fn get_control_rate(&self) -> ClockTime;\n\n\n\n //fn get_g_value_array(&self, property_name: &str, timestamp: ClockTime, interval: ClockTime, values: /*Ignored*/&[&glib::Value]) -> bool;\n\n\n\n fn get_name(&self) -> String;\n\n\n\n fn get_parent(&self) -> Option<Object>;\n\n\n\n fn get_path_string(&self) -> String;\n\n\n\n //fn get_value(&self, property_name: &str, timestamp: ClockTime) -> /*Ignored*/Option<glib::Value>;\n\n\n\n //fn get_value_array<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, property_name: &str, timestamp: ClockTime, interval: ClockTime, n_values: u32, values: P) -> bool;\n", "file_path": "gstreamer/src/auto/object.rs", "rank": 73, "score": 149591.25183543772 }, { "content": "pub trait AggregatorExt {\n\n //#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n //fn get_allocator(&self, allocator: /*Ignored*/gst::Allocator, params: /*Ignored*/gst::AllocationParams);\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn get_buffer_pool(&self) -> Option<gst::BufferPool>;\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn get_latency(&self) -> gst::ClockTime;\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn set_latency(&self, min_latency: gst::ClockTime, max_latency: gst::ClockTime);\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn set_src_caps(&self, caps: &gst::Caps);\n\n\n\n fn get_property_start_time(&self) -> u64;\n\n\n\n fn set_property_start_time(&self, start_time: u64);\n\n\n", "file_path": "gstreamer-base/src/auto/aggregator.rs", "rank": 74, "score": 149591.25183543772 }, { "content": "pub trait AggregatorExtManual {\n\n fn finish_buffer(&self, buffer: gst::Buffer) -> gst::FlowReturn;\n\n}\n\n\n\nimpl<O: IsA<Aggregator>> AggregatorExtManual for O {\n\n fn finish_buffer(&self, buffer: gst::Buffer) -> gst::FlowReturn {\n\n unsafe {\n\n from_glib(ffi::gst_aggregator_finish_buffer(\n\n self.to_glib_none().0,\n\n buffer.into_ptr(),\n\n ))\n\n }\n\n }\n\n}\n", "file_path": "gstreamer-base/src/aggregator.rs", "rank": 75, "score": 149591.25183543772 }, { "content": "pub trait TagSetterExt {\n\n //fn add_tag_valist(&self, mode: TagMergeMode, tag: &str, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported);\n\n\n\n //fn add_tag_valist_values(&self, mode: TagMergeMode, tag: &str, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported);\n\n\n\n //fn add_tag_value(&self, mode: TagMergeMode, tag: &str, value: /*Ignored*/&glib::Value);\n\n\n\n //fn add_tag_values(&self, mode: TagMergeMode, tag: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n //fn add_tags(&self, mode: TagMergeMode, tag: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n fn get_tag_list(&self) -> Option<TagList>;\n\n\n\n fn get_tag_merge_mode(&self) -> TagMergeMode;\n\n\n\n fn merge_tags(&self, list: &TagList, mode: TagMergeMode);\n\n\n\n fn reset_tags(&self);\n\n\n\n fn set_tag_merge_mode(&self, mode: TagMergeMode);\n", "file_path": "gstreamer/src/auto/tag_setter.rs", "rank": 76, "score": 147279.61283910155 }, { "content": "pub trait ChildProxyExt {\n\n fn child_added<P: IsA<glib::Object>>(&self, child: &P, name: &str);\n\n\n\n fn child_removed<P: IsA<glib::Object>>(&self, child: &P, name: &str);\n\n\n\n //fn get(&self, first_property_name: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n fn get_child_by_index(&self, index: u32) -> Option<glib::Object>;\n\n\n\n fn get_child_by_name(&self, name: &str) -> Option<glib::Object>;\n\n\n\n fn get_children_count(&self) -> u32;\n\n\n\n //fn get_property(&self, name: &str, value: /*Ignored*/glib::Value);\n\n\n\n //fn get_valist(&self, first_property_name: &str, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported);\n\n\n\n //fn lookup(&self, name: &str, pspec: /*Ignored*/glib::ParamSpec) -> Option<glib::Object>;\n\n\n\n //fn set(&self, first_property_name: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n", "file_path": "gstreamer/src/auto/child_proxy.rs", "rank": 77, "score": 147279.61283910155 }, { "content": "pub trait DeviceProviderExtManual {\n\n fn get_metadata<'a>(&self, key: &str) -> Option<&'a str>;\n\n}\n\n\n\nimpl<O: IsA<DeviceProvider>> DeviceProviderExtManual for O {\n\n fn get_metadata<'a>(&self, key: &str) -> Option<&'a str> {\n\n unsafe {\n\n let klass = (*(self.to_glib_none().0 as *mut gobject_ffi::GTypeInstance)).g_class\n\n as *mut ffi::GstDeviceProviderClass;\n\n\n\n let ptr = ffi::gst_device_provider_class_get_metadata(klass, key.to_glib_none().0);\n\n\n\n if ptr.is_null() {\n\n None\n\n } else {\n\n Some(CStr::from_ptr(ptr).to_str().unwrap())\n\n }\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/device_provider.rs", "rank": 78, "score": 147279.61283910155 }, { "content": "pub trait DeviceMonitorExt {\n\n fn add_filter<'a, 'b, P: Into<Option<&'a str>>, Q: Into<Option<&'b Caps>>>(&self, classes: P, caps: Q) -> u32;\n\n\n\n fn get_bus(&self) -> Bus;\n\n\n\n fn get_devices(&self) -> Vec<Device>;\n\n\n\n fn get_providers(&self) -> Vec<String>;\n\n\n\n fn get_show_all_devices(&self) -> bool;\n\n\n\n fn remove_filter(&self, filter_id: u32) -> bool;\n\n\n\n fn set_show_all_devices(&self, show_all: bool);\n\n\n\n fn start(&self) -> Result<(), glib::error::BoolError>;\n\n\n\n fn stop(&self);\n\n\n\n fn get_property_show_all(&self) -> bool;\n", "file_path": "gstreamer/src/auto/device_monitor.rs", "rank": 79, "score": 147279.61283910155 }, { "content": "pub trait DeviceProviderExt {\n\n fn can_monitor(&self) -> bool;\n\n\n\n fn device_add(&self, device: &Device);\n\n\n\n fn device_remove(&self, device: &Device);\n\n\n\n fn get_bus(&self) -> Bus;\n\n\n\n fn get_devices(&self) -> Vec<Device>;\n\n\n\n fn get_factory(&self) -> Option<DeviceProviderFactory>;\n\n\n\n fn get_hidden_providers(&self) -> Vec<String>;\n\n\n\n fn hide_provider(&self, name: &str);\n\n\n\n fn start(&self) -> Result<(), glib::error::BoolError>;\n\n\n\n fn stop(&self);\n", "file_path": "gstreamer/src/auto/device_provider.rs", "rank": 80, "score": 147279.61283910155 }, { "content": "pub trait WindowsBusExtManual {\n\n fn get_pollfd(&self) -> windows::io::RawHandle;\n\n}\n\n\n\nimpl WindowsBusExtManual for Bus {\n\n /// This is supported on **Windows** only.\n\n fn get_pollfd(&self) -> windows::io::RawHandle {\n\n #[cfg(windows)]\n\n unsafe {\n\n let mut pollfd: glib_ffi::GPollFD = mem::zeroed();\n\n ffi::gst_bus_get_pollfd(self.to_glib_none().0, &mut pollfd);\n\n\n\n pollfd.fd as *mut _\n\n }\n\n\n\n #[cfg(all(not(windows), feature = \"dox\"))]\n\n windows::io::RawHandle {}\n\n }\n\n}\n", "file_path": "gstreamer/src/bus_windows.rs", "rank": 81, "score": 147279.61283910155 }, { "content": "pub trait GObjectExtManualGst {\n\n fn set_property_from_str(&self, name: &str, value: &str);\n\n}\n\n\n\nimpl<O: IsA<glib::Object>> GObjectExtManualGst for O {\n\n fn set_property_from_str(&self, name: &str, value: &str) {\n\n unsafe {\n\n ffi::gst_util_set_object_arg(\n\n self.to_glib_none().0,\n\n name.to_glib_none().0,\n\n value.to_glib_none().0,\n\n );\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/gobject.rs", "rank": 82, "score": 147279.61283910155 }, { "content": "pub trait GhostPadExt {\n\n fn get_target(&self) -> Option<Pad>;\n\n\n\n fn set_target<'a, P: IsA<Pad> + 'a, Q: Into<Option<&'a P>>>(&self, newtarget: Q) -> Result<(), glib::error::BoolError>;\n\n}\n\n\n\nimpl<O: IsA<GhostPad>> GhostPadExt for O {\n\n fn get_target(&self) -> Option<Pad> {\n\n unsafe {\n\n from_glib_full(ffi::gst_ghost_pad_get_target(self.to_glib_none().0))\n\n }\n\n }\n\n\n\n fn set_target<'a, P: IsA<Pad> + 'a, Q: Into<Option<&'a P>>>(&self, newtarget: Q) -> Result<(), glib::error::BoolError> {\n\n let newtarget = newtarget.into();\n\n let newtarget = newtarget.to_glib_none();\n\n unsafe {\n\n glib::error::BoolError::from_glib(ffi::gst_ghost_pad_set_target(self.to_glib_none().0, newtarget.0), \"Failed to set target\")\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/auto/ghost_pad.rs", "rank": 83, "score": 147279.61283910155 }, { "content": "pub trait UnixBusExtManual {\n\n fn get_pollfd(&self) -> unix::io::RawFd;\n\n}\n\n\n\nimpl UnixBusExtManual for Bus {\n\n /// This is supported on **Unix** only.\n\n fn get_pollfd(&self) -> unix::io::RawFd {\n\n #[cfg(unix)]\n\n unsafe {\n\n let mut pollfd: glib_ffi::GPollFD = mem::zeroed();\n\n ffi::gst_bus_get_pollfd(self.to_glib_none().0, &mut pollfd);\n\n\n\n pollfd.fd\n\n }\n\n\n\n #[cfg(all(not(unix), feature = \"dox\"))]\n\n unix::io::RawFd {}\n\n }\n\n}\n", "file_path": "gstreamer/src/bus_unix.rs", "rank": 84, "score": 147279.61283910155 }, { "content": "pub trait URIHandlerExt {\n\n fn get_protocols(&self) -> Vec<String>;\n\n\n\n fn get_uri(&self) -> Option<String>;\n\n\n\n fn get_uri_type(&self) -> URIType;\n\n\n\n fn set_uri(&self, uri: &str) -> Result<(), Error>;\n\n}\n\n\n\nimpl<O: IsA<URIHandler>> URIHandlerExt for O {\n\n fn get_protocols(&self) -> Vec<String> {\n\n unsafe {\n\n FromGlibPtrContainer::from_glib_none(ffi::gst_uri_handler_get_protocols(self.to_glib_none().0))\n\n }\n\n }\n\n\n\n fn get_uri(&self) -> Option<String> {\n\n unsafe {\n\n from_glib_full(ffi::gst_uri_handler_get_uri(self.to_glib_none().0))\n", "file_path": "gstreamer/src/auto/uri_handler.rs", "rank": 85, "score": 147279.61283910155 }, { "content": "pub trait TocSetterExt {\n\n fn get_toc(&self) -> Option<Toc>;\n\n\n\n fn reset(&self);\n\n\n\n fn set_toc<'a, P: Into<Option<&'a Toc>>>(&self, toc: P);\n\n}\n\n\n\nimpl<O: IsA<TocSetter>> TocSetterExt for O {\n\n fn get_toc(&self) -> Option<Toc> {\n\n unsafe {\n\n from_glib_full(ffi::gst_toc_setter_get_toc(self.to_glib_none().0))\n\n }\n\n }\n\n\n\n fn reset(&self) {\n\n unsafe {\n\n ffi::gst_toc_setter_reset(self.to_glib_none().0);\n\n }\n\n }\n\n\n\n fn set_toc<'a, P: Into<Option<&'a Toc>>>(&self, toc: P) {\n\n let toc = toc.into();\n\n let toc = toc.to_glib_none();\n\n unsafe {\n\n ffi::gst_toc_setter_set_toc(self.to_glib_none().0, toc.0);\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/auto/toc_setter.rs", "rank": 86, "score": 147279.61283910155 }, { "content": "pub trait ChildProxyExtManual {\n\n fn get_child_property(&self, name: &str) -> Option<glib::Value>;\n\n fn set_child_property(&self, name: &str, value: &glib::ToValue) -> Result<(), glib::BoolError>;\n\n}\n\n\n\nimpl<O: IsA<ChildProxy>> ChildProxyExtManual for O {\n\n fn get_child_property(&self, name: &str) -> Option<glib::Value> {\n\n unsafe {\n\n let found: bool = from_glib(ffi::gst_child_proxy_lookup(\n\n self.to_glib_none().0,\n\n name.to_glib_none().0,\n\n ptr::null_mut(),\n\n ptr::null_mut(),\n\n ));\n\n if !found {\n\n return None;\n\n }\n\n\n\n let mut value = glib::Value::uninitialized();\n\n ffi::gst_child_proxy_get_property(\n", "file_path": "gstreamer/src/child_proxy.rs", "rank": 87, "score": 147279.61283910155 }, { "content": "pub trait PluginFeatureExt {\n\n fn check_version(&self, min_major: u32, min_minor: u32, min_micro: u32) -> bool;\n\n\n\n fn get_plugin(&self) -> Option<Plugin>;\n\n\n\n fn get_plugin_name(&self) -> Option<String>;\n\n\n\n fn get_rank(&self) -> u32;\n\n\n\n fn load(&self) -> Option<PluginFeature>;\n\n\n\n fn set_rank(&self, rank: u32);\n\n}\n\n\n\nimpl<O: IsA<PluginFeature>> PluginFeatureExt for O {\n\n fn check_version(&self, min_major: u32, min_minor: u32, min_micro: u32) -> bool {\n\n unsafe {\n\n from_glib(ffi::gst_plugin_feature_check_version(self.to_glib_none().0, min_major, min_minor, min_micro))\n\n }\n\n }\n", "file_path": "gstreamer/src/auto/plugin_feature.rs", "rank": 88, "score": 147279.61283910155 }, { "content": "pub trait SystemClockExt {\n\n fn get_property_clock_type(&self) -> ClockType;\n\n\n\n fn set_property_clock_type(&self, clock_type: ClockType);\n\n\n\n fn connect_property_clock_type_notify<F: Fn(&Self) + Send + Sync + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<SystemClock> + IsA<glib::object::Object>> SystemClockExt for O {\n\n fn get_property_clock_type(&self) -> ClockType {\n\n unsafe {\n\n let mut value = Value::from_type(<ClockType as StaticType>::static_type());\n\n gobject_ffi::g_object_get_property(self.to_glib_none().0, \"clock-type\".to_glib_none().0, value.to_glib_none_mut().0);\n\n value.get().unwrap()\n\n }\n\n }\n\n\n\n fn set_property_clock_type(&self, clock_type: ClockType) {\n\n unsafe {\n\n gobject_ffi::g_object_set_property(self.to_glib_none().0, \"clock-type\".to_glib_none().0, Value::from(&clock_type).to_glib_none().0);\n", "file_path": "gstreamer/src/auto/system_clock.rs", "rank": 89, "score": 147279.61283910155 }, { "content": "pub trait ProxyPadExt {\n\n fn get_internal(&self) -> Option<ProxyPad>;\n\n}\n\n\n\nimpl<O: IsA<ProxyPad>> ProxyPadExt for O {\n\n fn get_internal(&self) -> Option<ProxyPad> {\n\n unsafe {\n\n from_glib_full(ffi::gst_proxy_pad_get_internal(self.to_glib_none().0))\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/auto/proxy_pad.rs", "rank": 90, "score": 147279.61283910155 }, { "content": "pub trait TagSetterExtManual {\n\n fn add<'a, T: Tag<'a>>(&self, value: T::TagType, mode: TagMergeMode)\n\n where\n\n T::TagType: ToSendValue;\n\n}\n\n\n\nimpl<O: IsA<TagSetter>> TagSetterExtManual for O {\n\n fn add<'a, T: Tag<'a>>(&self, value: T::TagType, mode: TagMergeMode)\n\n where\n\n T::TagType: ToSendValue,\n\n {\n\n unsafe {\n\n let v = value.to_send_value();\n\n\n\n ffi::gst_tag_setter_add_tag_value(\n\n self.to_glib_none().0,\n\n mode.to_glib(),\n\n T::tag_name().to_glib_none().0,\n\n v.to_glib_none().0,\n\n );\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/tag_setter.rs", "rank": 91, "score": 147279.61283910155 }, { "content": "pub fn debug_print_stack_trace() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gst_debug_print_stack_trace();\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 92, "score": 146827.6738059734 }, { "content": "pub fn reorder_channels(\n\n data: &mut [u8],\n\n format: ::AudioFormat,\n\n channels: u32,\n\n from: &[AudioChannelPosition],\n\n to: &[AudioChannelPosition],\n\n) -> Result<(), glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n\n\n if from.len() != to.len() || from.len() > 64 {\n\n return Err(glib::BoolError(\"Invalid number of channels\"));\n\n }\n\n\n\n let from_len = from.len();\n\n let to_len = to.len();\n\n\n\n let from_raw: [ffi::GstAudioChannelPosition; 64] = array_init::array_init_copy(|i| {\n\n if i >= from_len as usize {\n\n ffi::GST_AUDIO_CHANNEL_POSITION_INVALID\n\n } else {\n", "file_path": "gstreamer-audio/src/audio_channel_position.rs", "rank": 93, "score": 146827.6738059734 }, { "content": "pub fn center_video_rectangle(\n\n src: &VideoRectangle,\n\n dst: &VideoRectangle,\n\n scale: bool,\n\n) -> VideoRectangle {\n\n let mut result = ffi::GstVideoRectangle {\n\n x: 0,\n\n y: 0,\n\n w: 0,\n\n h: 0,\n\n };\n\n let src_rect = ffi::GstVideoRectangle {\n\n x: src.x,\n\n y: src.y,\n\n w: src.w,\n\n h: src.h,\n\n };\n\n let dst_rect = ffi::GstVideoRectangle {\n\n x: dst.x,\n\n y: dst.y,\n\n w: dst.w,\n\n h: dst.h,\n\n };\n\n unsafe {\n\n ffi::gst_video_sink_center_rect(src_rect, dst_rect, &mut result, scale.to_glib());\n\n }\n\n VideoRectangle::new(result.x, result.y, result.w, result.h)\n\n}\n", "file_path": "gstreamer-video/src/video_rectangle.rs", "rank": 94, "score": 146827.6738059734 }, { "content": "struct CapsSome(Caps);\n\n\n", "file_path": "gstreamer/src/caps_serde.rs", "rank": 95, "score": 146325.95043933118 }, { "content": "pub trait DiscovererInfoExt {\n\n fn copy(&self) -> DiscovererInfo;\n\n\n\n fn get_audio_streams(&self) -> Vec<DiscovererStreamInfo>;\n\n\n\n fn get_container_streams(&self) -> Vec<DiscovererStreamInfo>;\n\n\n\n fn get_duration(&self) -> gst::ClockTime;\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn get_live(&self) -> bool;\n\n\n\n fn get_misc(&self) -> Option<gst::Structure>;\n\n\n\n fn get_missing_elements_installer_details(&self) -> Vec<String>;\n\n\n\n fn get_result(&self) -> DiscovererResult;\n\n\n\n fn get_seekable(&self) -> bool;\n\n\n", "file_path": "gstreamer-pbutils/src/auto/discoverer_info.rs", "rank": 96, "score": 145078.98360413563 }, { "content": "pub trait BaseTransformExt {\n\n //fn get_allocator(&self, allocator: /*Ignored*/gst::Allocator, params: /*Ignored*/gst::AllocationParams);\n\n\n\n fn get_buffer_pool(&self) -> Option<gst::BufferPool>;\n\n\n\n fn is_in_place(&self) -> bool;\n\n\n\n fn is_passthrough(&self) -> bool;\n\n\n\n fn is_qos_enabled(&self) -> bool;\n\n\n\n fn reconfigure_sink(&self);\n\n\n\n fn reconfigure_src(&self);\n\n\n\n fn set_gap_aware(&self, gap_aware: bool);\n\n\n\n fn set_in_place(&self, in_place: bool);\n\n\n\n fn set_passthrough(&self, passthrough: bool);\n", "file_path": "gstreamer-base/src/auto/base_transform.rs", "rank": 97, "score": 145078.98360413563 }, { "content": "pub trait BaseSinkExt {\n\n //fn do_preroll(&self, obj: /*Ignored*/&gst::MiniObject) -> gst::FlowReturn;\n\n\n\n fn get_blocksize(&self) -> u32;\n\n\n\n #[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\n fn get_drop_out_of_segment(&self) -> bool;\n\n\n\n fn get_last_sample(&self) -> Option<gst::Sample>;\n\n\n\n fn get_latency(&self) -> gst::ClockTime;\n\n\n\n fn get_max_bitrate(&self) -> u64;\n\n\n\n fn get_max_lateness(&self) -> i64;\n\n\n\n fn get_render_delay(&self) -> gst::ClockTime;\n\n\n\n fn get_sync(&self) -> bool;\n\n\n", "file_path": "gstreamer-base/src/auto/base_sink.rs", "rank": 98, "score": 145078.98360413563 }, { "content": "pub trait BaseSrcExt {\n\n //fn get_allocator(&self, allocator: /*Ignored*/gst::Allocator, params: /*Ignored*/gst::AllocationParams);\n\n\n\n fn get_blocksize(&self) -> u32;\n\n\n\n fn get_buffer_pool(&self) -> Option<gst::BufferPool>;\n\n\n\n fn get_do_timestamp(&self) -> bool;\n\n\n\n fn is_async(&self) -> bool;\n\n\n\n fn is_live(&self) -> bool;\n\n\n\n fn new_seamless_segment(&self, start: i64, stop: i64, time: i64) -> bool;\n\n\n\n fn query_latency(&self) -> Option<(bool, gst::ClockTime, gst::ClockTime)>;\n\n\n\n fn set_async(&self, async: bool);\n\n\n\n fn set_automatic_eos(&self, automatic_eos: bool);\n", "file_path": "gstreamer-base/src/auto/base_src.rs", "rank": 99, "score": 145078.98360413563 } ]
Rust
src/io/obj.rs
cpheinrich/web-geo-viewer
e835b0667e57697e0b8005c08c9dcd36222811ee
/* Copyright 2020 Martin Buck Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use log::*; use rust_3d::*; use std::{ fmt, io::{BufRead, Error as ioError}, }; use super::utils::*; use super::{MaterialInfo, MaterialSurface}; pub fn load_obj_mesh<EM, P, R>( read: &mut R, mesh: &mut EM, material_info: &mut MaterialInfo, ) -> ObjResult<()> where EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>, P: IsBuildable3D + Clone, R: BufRead, { let mut line_buffer = Vec::new(); let mut i_line = 0; let mut mtl_name = "NotSpecified".to_string(); material_info .surfaces .insert(mtl_name.clone(), MaterialSurface::new()); while let Ok(line) = fetch_line(read, &mut line_buffer) { i_line += 1; if line.starts_with(b"usemtl ") { let mut words = to_words_skip_empty(line); words.next().ok_or(ObjError::LineParse(i_line))?; mtl_name = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; material_info .surfaces .insert(mtl_name.clone(), MaterialSurface::new()); } if line.starts_with(b"mtllib ") { let mut words = to_words_skip_empty(line); words.next().ok_or(ObjError::LineParse(i_line))?; let lib_name: String = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; material_info.material_libs.insert(lib_name.clone()); } if line.starts_with(b"v ") { let mut words = to_words_skip_empty(line); words.next().ok_or(ObjError::LineParse(i_line))?; let x = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; let y = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; let z = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; mesh.add_vertex(P::new(x, y, z)); } else if line.starts_with(b"vt ") { let mut words = to_words_skip_empty(line); words.next().ok_or(ObjError::LineParse(i_line))?; let x = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; let y = words .next() .and_then(|w| from_ascii(w)) .or(Some(0f64)) .unwrap(); let z = words .next() .and_then(|w| from_ascii(w)) .or(Some(0f64)) .unwrap(); material_info.uv.push_d(Point3D::new(x, y, z)); } else if line.starts_with(b"f ") { let mut words = to_words_skip_empty(line); words.next().ok_or(ObjError::LineParse(i_line))?; let mut tmp = words.next().ok_or(ObjError::LineParse(i_line))?; let tmp_str = until_bytes(tmp, b'/'); let a: usize = from_ascii(tmp_str).ok_or(ObjError::LineParse(i_line))?; let mut maybe_at: Option<usize> = None; if tmp_str.len() + 1 < tmp.len() { maybe_at = from_ascii(until_bytes(&tmp[(tmp_str.len() + 1)..], b'/')).or(None); } tmp = words.next().ok_or(ObjError::LineParse(i_line))?; let tmp_str = until_bytes(tmp, b'/'); let b: usize = from_ascii(tmp_str).ok_or(ObjError::LineParse(i_line))?; let mut maybe_bt: Option<usize> = None; if tmp_str.len() + 1 < tmp.len() { maybe_bt = from_ascii(until_bytes(&tmp[(tmp_str.len() + 1)..], b'/')).or(None); } tmp = words.next().ok_or(ObjError::LineParse(i_line))?; let tmp_str = until_bytes(tmp, b'/'); let c: usize = from_ascii(tmp_str).ok_or(ObjError::LineParse(i_line))?; let mut maybe_ct: Option<usize> = None; if tmp_str.len() + 1 < tmp.len() { maybe_ct = from_ascii(until_bytes(&tmp[(tmp_str.len() + 1)..], b'/')).or(None); } let mut maybe_d: Option<usize> = None; let mut maybe_dt: Option<usize> = None; match words.next() { Some(tmp) => { let tmp_str = until_bytes(tmp, b'/'); if tmp_str.len() > 0 { maybe_d = from_ascii(until_bytes(&tmp_str, b'/')).or(None); } if tmp_str.len() + 1 < tmp.len() { maybe_dt = from_ascii(until_bytes(&tmp[(tmp_str.len() + 1)..], b'/')).or(None); } if let Some(d) = maybe_d { let face = Face3 { a: VId { val: a - 1 }, b: VId { val: c - 1 }, c: VId { val: d - 1 }, }; material_info .surfaces .get_mut(&mtl_name) .unwrap() .faces .insert(face); } if let (Some(at), Some(ct), Some(d), Some(dt)) = (maybe_at, maybe_ct, maybe_d, maybe_dt) { material_info .surfaces .get_mut(&mtl_name) .unwrap() .uvs .insert( Face3 { a: VId { val: a - 1 }, b: VId { val: c - 1 }, c: VId { val: d - 1 }, }, Face3 { a: VId { val: at - 1 }, b: VId { val: ct - 1 }, c: VId { val: dt - 1 }, }, ); } } None => {} }; material_info .surfaces .get_mut(&mtl_name) .unwrap() .faces .insert(Face3 { a: VId { val: a - 1 }, b: VId { val: b - 1 }, c: VId { val: c - 1 }, }); if let (Some(at), Some(bt), Some(ct)) = (maybe_at, maybe_bt, maybe_ct) { material_info .surfaces .get_mut(&mtl_name) .unwrap() .uvs .insert( Face3 { a: VId { val: a - 1 }, b: VId { val: b - 1 }, c: VId { val: c - 1 }, }, Face3 { a: VId { val: at - 1 }, b: VId { val: bt - 1 }, c: VId { val: ct - 1 }, }, ); } if let Some(_next) = words.next() { return Err(ObjError::NotTriangularMesh(i_line)); } } } for surface in &material_info.surfaces { for face in &surface.1.faces { match mesh.try_add_connection(face.a, face.b, face.c).or(Err( ObjError::InvalidMeshIndices(face.a.val, face.b.val, face.c.val), )) { Ok(_) => {} Err(_) => { info!( "Warning, face {},{},{} could not be added.", face.a.val, face.b.val, face.c.val ); } } } } Ok(()) } pub fn load_obj_points<IP, P, R>(read: &mut R, ip: &mut IP) -> ObjResult<()> where IP: IsPushable<P>, P: IsBuildable3D, R: BufRead, { let mut line_buffer = Vec::new(); let mut i_line = 0; while let Ok(line) = fetch_line(read, &mut line_buffer) { i_line += 1; if line.starts_with(b"v ") { let mut words = to_words_skip_empty(line); words.next().ok_or(ObjError::LineParse(i_line))?; let x = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; let y = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; let z = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; ip.push(P::new(x, y, z)); } } Ok(()) } pub enum ObjError { AccessFile, InvalidMeshIndices(usize, usize, usize), LineParse(usize), NotTriangularMesh(usize), } pub type ObjResult<T> = std::result::Result<T, ObjError>; impl fmt::Debug for ObjError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Self::AccessFile => write!(f, "Unable to access file"), Self::LineParse(x) => write!(f, "Unable to parse line {}", x), Self::InvalidMeshIndices(x, y, z) => { write!(f, "File contains invalid mesh indices: {}, {}, {}", x, y, z) } Self::NotTriangularMesh(x) => write!( f, "File contains face with more than 3 sets of indices on line {}", x ), } } } impl From<ioError> for ObjError { fn from(_error: ioError) -> Self { ObjError::AccessFile } }
/* Copyright 2020 Martin Buck Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use log::*; use rust_3d::*; use std::{ fmt, io::{BufRead, Error as ioError}, }; use super::utils::*; use super::{MaterialInfo, MaterialSurface}; pub fn load_obj_mesh<EM, P, R>( read: &mut R, mesh: &mut EM, material_info: &mut MaterialInfo, ) -> ObjResult<()> where EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>, P: IsBuildable3D + Clone, R: BufRead, { let mut line_buffer = Vec::new(); let mut i_line = 0; let mut mtl_name = "NotSpecified".to_string(); material_info .surfaces .insert(mtl_name.clone(), MaterialSurface::new()); while let Ok(line) = fetch_line(read, &mut line_buffer) { i_line += 1; if line.starts_with(b"usemtl ") { let mut words = to_words_skip_empty(line); words.next().ok_or(ObjError::LineParse(i_line))?; mtl_name = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; material_info .surfaces .insert(mtl_name.clone(), MaterialSurface::new()); } if line.starts_with(b"mtllib ") { let mut words = to_words_skip_empty(line); words.next().ok_or(ObjError::LineParse(i_line))?; let lib_name: String = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; material_info.material_libs.insert(lib_name.clone()); } if line.starts_with(b"v ") { let mut words = to_words_skip_empty(line); words.next().ok_or(ObjError::LineParse(i_line))?; let x = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; let y = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; let z = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; mesh.add_vertex(P::new(x, y, z)); } else if line.starts_with(b"vt ") { let mut words = to_words_skip_empty(line); words.next().ok_or(ObjError::LineParse(i_line))?; let x = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; let y = words .next() .and_then(|w| from_ascii(w)) .or(Some(0f64)) .unwrap(); let z = words .next() .and_then(|w| from_ascii(w)) .or(Some(0f64)) .unwrap(); material_info.uv.push_d(Point3D::new(x, y, z)); } else if line.starts_with(b"f ") { let mut words = to_words_skip_empty(line); words.next().ok_or(ObjError::LineParse(i_line))?; let mut tmp = words.next().ok_or(ObjError::LineParse(i_line))?; let tmp_str = until_bytes(tmp, b'/'); let a: usize = from_ascii(tmp_str).ok_or(ObjError::LineParse(i_line))?; let mut maybe_at: Option<usize> = None; if tmp_str.len() + 1 < tmp.len() { maybe_at = from_ascii(until_bytes(&tmp[(tmp_str.len() + 1)..], b'/')).or(None); } tmp = words.next().ok_or(ObjError::LineParse(i_line))?; let tmp_str = until_bytes(tmp, b'/'); let b: usize = from_ascii(tmp_str).ok_or(ObjError::LineParse(i_line))?; let mut maybe_bt: Option<usize> = None; if tmp_str.len() + 1 < tmp.len() { maybe_bt = from_ascii(until_bytes(&tmp[(tmp_str.len() + 1)..], b'/')).or(None); } tmp = words.next().ok_or(ObjError::LineParse(i_line))?; let tmp_str = until_bytes(tmp, b'/'); let c: usize = from_ascii(tmp_str).ok_or(ObjError::LineParse(i_line))?; let mut maybe_ct: Option<usize> = None; if tmp_str.len() + 1 < tmp.len() { maybe_ct = from_ascii(until_bytes(&tmp[(tmp_str.len() + 1)..], b'/')).or(None); } let mut maybe_d: Option<usize> = None; let mut maybe_dt: Option<usize> = None; match words.next() { Some(tmp) => { let tmp_str = until_bytes(tmp, b'/'); if tmp_str.len() > 0 { maybe_d = from_ascii(until_bytes(&tmp_str, b'/')).or(None); } if tmp_str.len() + 1 < tmp.len() { maybe_dt = from_ascii(until_bytes(&tmp[(tmp_str.len() + 1)..], b'/')).or(None); }
} } } } Ok(()) } pub fn load_obj_points<IP, P, R>(read: &mut R, ip: &mut IP) -> ObjResult<()> where IP: IsPushable<P>, P: IsBuildable3D, R: BufRead, { let mut line_buffer = Vec::new(); let mut i_line = 0; while let Ok(line) = fetch_line(read, &mut line_buffer) { i_line += 1; if line.starts_with(b"v ") { let mut words = to_words_skip_empty(line); words.next().ok_or(ObjError::LineParse(i_line))?; let x = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; let y = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; let z = words .next() .and_then(|w| from_ascii(w)) .ok_or(ObjError::LineParse(i_line))?; ip.push(P::new(x, y, z)); } } Ok(()) } pub enum ObjError { AccessFile, InvalidMeshIndices(usize, usize, usize), LineParse(usize), NotTriangularMesh(usize), } pub type ObjResult<T> = std::result::Result<T, ObjError>; impl fmt::Debug for ObjError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Self::AccessFile => write!(f, "Unable to access file"), Self::LineParse(x) => write!(f, "Unable to parse line {}", x), Self::InvalidMeshIndices(x, y, z) => { write!(f, "File contains invalid mesh indices: {}, {}, {}", x, y, z) } Self::NotTriangularMesh(x) => write!( f, "File contains face with more than 3 sets of indices on line {}", x ), } } } impl From<ioError> for ObjError { fn from(_error: ioError) -> Self { ObjError::AccessFile } }
if let Some(d) = maybe_d { let face = Face3 { a: VId { val: a - 1 }, b: VId { val: c - 1 }, c: VId { val: d - 1 }, }; material_info .surfaces .get_mut(&mtl_name) .unwrap() .faces .insert(face); } if let (Some(at), Some(ct), Some(d), Some(dt)) = (maybe_at, maybe_ct, maybe_d, maybe_dt) { material_info .surfaces .get_mut(&mtl_name) .unwrap() .uvs .insert( Face3 { a: VId { val: a - 1 }, b: VId { val: c - 1 }, c: VId { val: d - 1 }, }, Face3 { a: VId { val: at - 1 }, b: VId { val: ct - 1 }, c: VId { val: dt - 1 }, }, ); } } None => {} }; material_info .surfaces .get_mut(&mtl_name) .unwrap() .faces .insert(Face3 { a: VId { val: a - 1 }, b: VId { val: b - 1 }, c: VId { val: c - 1 }, }); if let (Some(at), Some(bt), Some(ct)) = (maybe_at, maybe_bt, maybe_ct) { material_info .surfaces .get_mut(&mtl_name) .unwrap() .uvs .insert( Face3 { a: VId { val: a - 1 }, b: VId { val: b - 1 }, c: VId { val: c - 1 }, }, Face3 { a: VId { val: at - 1 }, b: VId { val: bt - 1 }, c: VId { val: ct - 1 }, }, ); } if let Some(_next) = words.next() { return Err(ObjError::NotTriangularMesh(i_line)); } } } for surface in &material_info.surfaces { for face in &surface.1.faces { match mesh.try_add_connection(face.a, face.b, face.c).or(Err( ObjError::InvalidMeshIndices(face.a.val, face.b.val, face.c.val), )) { Ok(_) => {} Err(_) => { info!( "Warning, face {},{},{} could not be added.", face.a.val, face.b.val, face.c.val );
random
[ { "content": "/// Loads an IsMesh3D from the off file format\n\npub fn load_off_mesh<EM, P, R>(read: &mut R, mesh: &mut EM) -> OffResult<()>\n\nwhere\n\n EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>,\n\n P: IsBuildable3D + Clone,\n\n R: BufRead,\n\n{\n\n let mut line_buffer = Vec::new();\n\n let mut i_line = 0;\n\n\n\n let mut off_seen = false;\n\n let mut counts = None;\n\n\n\n while let Ok(line) = fetch_line(read, &mut line_buffer) {\n\n i_line += 1;\n\n\n\n if !off_seen && line.starts_with(b\"OFF\") {\n\n off_seen = true;\n\n continue;\n\n }\n\n\n", "file_path": "src/io/off.rs", "rank": 0, "score": 323430.3918512946 }, { "content": "#[inline(always)]\n\npub fn skip_bytes<R>(read: &mut R, n_bytes: usize) -> std::io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buffer = [0u8; 1];\n\n for _ in 0..n_bytes {\n\n let _ = read.read_exact(&mut buffer)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// Skip number of elements\n", "file_path": "src/io/utils.rs", "rank": 1, "score": 238809.35433665093 }, { "content": "/// Loads IsPushable<Is3D> from the .off file format\n\npub fn load_off_points<IP, P, R>(read: &mut R, ip: &mut IP) -> OffResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n let mut line_buffer = Vec::new();\n\n let mut i_line = 0;\n\n\n\n let mut off_seen = false;\n\n let mut n_vertices = None;\n\n let mut n_added = 0;\n\n\n\n while let Ok(line) = fetch_line(read, &mut line_buffer) {\n\n i_line += 1;\n\n\n\n if !off_seen && line.starts_with(b\"OFF\") {\n\n off_seen = true;\n\n continue;\n\n }\n", "file_path": "src/io/off.rs", "rank": 3, "score": 227814.92701315827 }, { "content": "/// Loads materials from a .mtl file\n\npub fn load_mtl<R>(read: &mut R) -> MtlResult<HashMap<String, Material>>\n\nwhere\n\n R: BufRead,\n\n{\n\n let mut line_buffer = Vec::new();\n\n let mut i_line = 0;\n\n let mut mtl_name: Option<String> = None;\n\n let mut result: HashMap<String, Material> = HashMap::new();\n\n while let Ok(line) = fetch_line(read, &mut line_buffer) {\n\n i_line += 1;\n\n if line.starts_with(b\"newmtl \") {\n\n // skip \"newmtl\"\n\n let mut words = to_words_skip_empty(line);\n\n words.next().ok_or(MtlError::LineParse(i_line))?;\n\n if let Some(next_word) = words.next().and_then(|w| from_ascii(w)) {\n\n mtl_name = Some(next_word);\n\n result.insert(mtl_name.clone().unwrap(), Material::new());\n\n }\n\n } else if line.starts_with(b\"map_Kd \") {\n\n let mut words = to_words_skip_empty(line);\n", "file_path": "src/io/mtl.rs", "rank": 5, "score": 217195.13771809978 }, { "content": "/// Loads an IsMesh3D from the .ply file format\n\npub fn load_ply<EM, P, R>(\n\n read: &mut R,\n\n mesh: &mut EM,\n\n material: &mut Material,\n\n material_info: &mut MaterialInfo,\n\n name: &String,\n\n) -> PlyResult<()>\n\nwhere\n\n EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>,\n\n P: IsBuildable3D + Clone,\n\n R: BufRead,\n\n{\n\n let mut line_buffer = Vec::new();\n\n let mut i_line = 0;\n\n let surface = MaterialSurface::new();\n\n material_info.surfaces.insert(name.clone(), surface);\n\n\n\n let header = load_header(read, &mut line_buffer, &mut i_line)?;\n\n material.texture_name = header.texture_name.clone();\n\n\n", "file_path": "src/io/ply/load.rs", "rank": 6, "score": 203082.31387108704 }, { "content": "#[inline(always)]\n\npub fn read_face_type<BR, R>(read: &mut R, t: FaceType) -> PlyResult<usize>\n\nwhere\n\n BR: IsByteReader,\n\n R: Read,\n\n{\n\n match t {\n\n FaceType::Char => Ok(BR::read_i8(read)? as usize),\n\n FaceType::UChar => Ok(BR::read_u8(read)? as usize),\n\n FaceType::Short => Ok(BR::read_i16(read)? as usize),\n\n FaceType::UShort => Ok(BR::read_u16(read)? as usize),\n\n FaceType::Int => Ok(BR::read_i32(read)? as usize),\n\n FaceType::UInt => Ok(BR::read_u32(read)? as usize),\n\n _ => Err(PlyError::InvalidFaceType),\n\n }\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/ply/utils.rs", "rank": 7, "score": 200767.85424448625 }, { "content": "#[inline(always)]\n\npub fn read_vertex_type<BR, R>(read: &mut R, t: VertexType) -> PlyResult<f64>\n\nwhere\n\n BR: IsByteReader,\n\n R: Read,\n\n{\n\n Ok(match t {\n\n VertexType::Float => BR::read_f32(read)? as f64,\n\n VertexType::Double => BR::read_f64(read)?,\n\n })\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/ply/utils.rs", "rank": 8, "score": 181349.06933940266 }, { "content": "#[inline(always)]\n\npub fn fetch_line<'a, R>(read: &mut R, line_buffer: &'a mut Vec<u8>) -> FetchLineResult<&'a [u8]>\n\nwhere\n\n R: BufRead,\n\n{\n\n line_buffer.clear();\n\n let n_read = read.read_until(b'\\n', line_buffer)?;\n\n if n_read == 0 {\n\n return Err(FetchLineError);\n\n }\n\n\n\n // We must drop the '\\n' we read_until for sure\n\n // And might also have to drop additional whitespace\n\n let mut ignore_end = 1;\n\n for i in 1..line_buffer.len() {\n\n if (line_buffer[line_buffer.len() - i - 1] as char).is_whitespace() {\n\n ignore_end += 1;\n\n } else {\n\n break;\n\n }\n\n }\n", "file_path": "src/io/utils.rs", "rank": 9, "score": 176033.91338870264 }, { "content": "fn load_header<R>(read: &mut R, line_buffer: &mut Vec<u8>, i_line: &mut usize) -> PlyResult<Header>\n\nwhere\n\n R: BufRead,\n\n{\n\n let mut vertex_order = [Xyz::X, Xyz::X, Xyz::X];\n\n let mut i_vertex_order = 0;\n\n\n\n let mut ply_found = false;\n\n let mut read_state = HeaderReadState::Meta;\n\n let mut opt_format = None;\n\n let mut opt_n_vertices: Option<usize> = None;\n\n let mut opt_n_faces: Option<usize> = None;\n\n\n\n let mut opt_fst_type = None;\n\n let mut opt_snd_type = None;\n\n let mut opt_third_type = None;\n\n let mut n_types_found = 0;\n\n let mut vertex_before = BytesWords::default();\n\n let mut vertex_between_first_snd = BytesWords::default();\n\n let mut vertex_between_snd_third = BytesWords::default();\n", "file_path": "src/io/ply/load.rs", "rank": 10, "score": 171214.44435496168 }, { "content": "fn load_ascii<EM, P, R>(\n\n read: &mut R,\n\n mesh: &mut EM,\n\n header: &Header,\n\n line_buffer: &mut Vec<u8>,\n\n i_line: &mut usize,\n\n material_info: &mut MaterialInfo,\n\n name: &String,\n\n) -> PlyResult<()>\n\nwhere\n\n EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>,\n\n P: IsBuildable3D + Clone,\n\n R: BufRead,\n\n{\n\n while let Ok(line) = fetch_line(read, line_buffer) {\n\n *i_line += 1;\n\n\n\n if header.n_vertices > mesh.num_vertices() {\n\n let mut words = to_words_skip_empty(line);\n\n\n", "file_path": "src/io/ply/load.rs", "rank": 11, "score": 170370.97573858604 }, { "content": "fn load_binary<BR, EM, P, R>(\n\n read: &mut R,\n\n mesh: &mut EM,\n\n header: &Header,\n\n material_info: &mut MaterialInfo,\n\n name: &String,\n\n) -> PlyResult<()>\n\nwhere\n\n EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>,\n\n P: IsBuildable3D + Clone,\n\n R: Read,\n\n BR: IsByteReader,\n\n{\n\n for _ in 0..header.n_vertices {\n\n skip_bytes(read, header.vertex_format.before.bytes)?;\n\n\n\n let first = read_vertex_type::<BR, _>(read, header.vertex_format.first)?;\n\n\n\n skip_bytes(read, header.vertex_format.between_first_snd.bytes)?;\n\n\n", "file_path": "src/io/ply/load.rs", "rank": 12, "score": 163650.02640364625 }, { "content": "/// Saves an IsMesh3D in the ASCII .ply file format\n\npub fn save_ply_ascii<M, P, W>(write: &mut W, mesh: &M) -> PlyResult<()>\n\nwhere\n\n M: IsMesh<P, Face3>,\n\n P: IsBuildable3D,\n\n W: Write,\n\n{\n\n let header = \"ply\\n\".to_string()\n\n + \"format ascii 1.0\\n\"\n\n + \"comment Created by rust-3d\\n\"\n\n + \"element vertex \"\n\n + &mesh.num_vertices().to_string()\n\n + \"\\n\"\n\n + \"property float x\\n\"\n\n + \"property float y\\n\"\n\n + \"property float z\\n\"\n\n + \"element face \"\n\n + &mesh.num_faces().to_string()\n\n + \"\\n\"\n\n + \"property list uchar uint vertex_indices\\n\"\n\n + \"end_header\\n\";\n", "file_path": "src/io/ply/save.rs", "rank": 13, "score": 157463.75637735537 }, { "content": "/// Saves an IsMesh3D in the binary .ply file format\n\npub fn save_ply_binary<M, P, W>(write: &mut W, mesh: &M, precision: &Precision) -> PlyResult<()>\n\nwhere\n\n M: IsMesh<P, Face3>,\n\n P: IsBuildable3D,\n\n W: Write,\n\n{\n\n let header = match precision {\n\n Precision::P32 => {\n\n \"ply\\n\".to_string()\n\n + \"format binary_big_endian 1.0\\n\"\n\n + \"comment Created by rust-3d\\n\"\n\n + \"element vertex \"\n\n + &mesh.num_vertices().to_string()\n\n + \"\\n\"\n\n + \"property float x\\n\"\n\n + \"property float y\\n\"\n\n + \"property float z\\n\"\n\n + \"element face \"\n\n + &mesh.num_faces().to_string()\n\n + \"\\n\"\n", "file_path": "src/io/ply/save.rs", "rank": 14, "score": 148369.8508730583 }, { "content": "#[inline(always)]\n\npub fn skip_n<I>(i: &mut I, n: usize)\n\nwhere\n\n I: Iterator,\n\n{\n\n for _ in 0..n {\n\n i.next();\n\n }\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n/// Reads a FromStr from ASCII bytes\n", "file_path": "src/io/utils.rs", "rank": 15, "score": 143428.76253843826 }, { "content": "/// Saves an IsMesh3D in the ASCII .ply file format with additional colors\n\npub fn save_ply_ascii_colored<M, P, W>(write: &mut W, mesh: &M, colors: &Vec<Rgb>) -> PlyResult<()>\n\nwhere\n\n M: IsMesh<P, Face3>,\n\n P: IsBuildable3D,\n\n W: Write,\n\n{\n\n let n_vertices = mesh.num_vertices();\n\n let n_faces = mesh.num_faces();\n\n\n\n if n_vertices != colors.len() {\n\n return Err(PlyError::ColorArrayIncorrectLength);\n\n }\n\n\n\n let header = \"ply\\n\".to_string()\n\n + \"format ascii 1.0\\n\"\n\n + \"comment Created by rust-3d\\n\"\n\n + \"element vertex \"\n\n + &n_vertices.to_string()\n\n + \"\\n\"\n\n + \"property float x\\n\"\n", "file_path": "src/io/ply/save.rs", "rank": 16, "score": 142136.39063591236 }, { "content": "pub fn update(model: &mut Model, msg: &Msg) -> ShouldRender {\n\n match msg {\n\n Msg::Files(files) => {\n\n for file in files.into_iter() {\n\n let task = {\n\n let callback = model.link.callback(Msg::Loaded);\n\n model.reader.read_file(file.clone(), callback).unwrap()\n\n };\n\n model.tasks.push(task);\n\n }\n\n }\n\n Msg::Loaded(file) => {\n\n let path = Path::new(&file.name);\n\n info!(\"Loading file {}\", file.name);\n\n if let Some(ext) = path.extension() {\n\n let maybe_image_format =\n\n string_to_format(&ext.to_os_string().into_string().unwrap().to_lowercase());\n\n if let Some(image_format) = maybe_image_format {\n\n info!(\"Loading image {}\", file.name);\n\n match image::load_from_memory_with_format(&file.content[..], image_format) {\n", "file_path": "src/mesh_loader.rs", "rank": 17, "score": 128900.32721045848 }, { "content": "pub fn update(model: &mut Model, msg: &Msg) -> ShouldRender {\n\n match msg {\n\n Msg::RemoveMesh(event) => {\n\n model.mesh.remove(event);\n\n return true;\n\n }\n\n Msg::MeshVisibilityToggle(event) => {\n\n let visible = &mut model.mesh.get_mut(event).unwrap().visible;\n\n *visible = !(*visible);\n\n }\n\n _ => {}\n\n };\n\n false\n\n}\n", "file_path": "src/mesh_list.rs", "rank": 18, "score": 128900.32721045848 }, { "content": "/// Saves an IsMesh3D in the binary .ply file format with additional colors\n\npub fn save_ply_binary_colored<M, P, W>(\n\n write: &mut W,\n\n mesh: &M,\n\n precision: &Precision,\n\n colors: &Vec<Rgb>,\n\n) -> PlyResult<()>\n\nwhere\n\n M: IsMesh<P, Face3>,\n\n P: IsBuildable3D,\n\n W: Write,\n\n{\n\n let n_vertices = mesh.num_vertices();\n\n let n_faces = mesh.num_faces();\n\n\n\n if n_vertices != colors.len() {\n\n return Err(PlyError::ColorArrayIncorrectLength);\n\n }\n\n\n\n let header = match precision {\n\n Precision::P32 => {\n", "file_path": "src/io/ply/save.rs", "rank": 19, "score": 110693.12439252582 }, { "content": "fn extend_by_vertex(p: &Point3D, array: &mut Vec<f32>) {\n\n array.push(p.x() as f32);\n\n array.push(p.y() as f32);\n\n array.push(p.z() as f32);\n\n}\n\n\n", "file_path": "src/mesh_loader.rs", "rank": 20, "score": 110427.73148720444 }, { "content": "fn string_to_format(format_string: &String) -> Option<image::ImageFormat> {\n\n let test_string = format_string.to_lowercase();\n\n if test_string == \"png\" {\n\n Some(image::ImageFormat::Png)\n\n } else if test_string == \"jpg\" || test_string == \".jpeg\" {\n\n Some(image::ImageFormat::Jpeg)\n\n } else if test_string == \"gif\" {\n\n Some(image::ImageFormat::Gif)\n\n } else if test_string == \"webp\" {\n\n Some(image::ImageFormat::WebP)\n\n } else if test_string == \"pnm\" {\n\n Some(image::ImageFormat::Pnm)\n\n } else if test_string == \"tiff\" || test_string == \"tiff\" {\n\n Some(image::ImageFormat::Tiff)\n\n } else if test_string == \"tga\" {\n\n Some(image::ImageFormat::Tga)\n\n } else if test_string == \"dds\" {\n\n Some(image::ImageFormat::Dds)\n\n } else if test_string == \"bmp\" {\n\n Some(image::ImageFormat::Bmp)\n\n } else if test_string == \"ico\" {\n\n Some(image::ImageFormat::Ico)\n\n } else if test_string == \"hdr\" {\n\n Some(image::ImageFormat::Hdr)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/mesh_loader.rs", "rank": 21, "score": 109377.13953942576 }, { "content": "pub fn rendered(model: &mut Model, first_render: bool) {\n\n let canvas = model.node_ref.cast::<HtmlCanvasElement>().unwrap();\n\n\n\n let gl: GL = canvas\n\n .get_context(\"webgl2\")\n\n .unwrap()\n\n .unwrap()\n\n .dyn_into()\n\n .unwrap();\n\n\n\n model.canvas = Some(canvas);\n\n model.gl = new_gl(&gl);\n\n let gl_ref = model.gl.as_ref().expect(\"GL Context not initialized!\");\n\n\n\n model.renderer = Some(DeferredPipeline::new(&gl_ref).unwrap());\n\n model.untextured_mesh_renderer = Some(UntexturedMeshRenderer::new(&gl_ref));\n\n model.textured_mesh_renderer = Some(TexturedMeshRenderer::new(&gl_ref));\n\n\n\n // Camera\n\n let camera = Camera::new_perspective(\n", "file_path": "src/render_canvas.rs", "rank": 22, "score": 107607.81174580553 }, { "content": "pub fn update(model: &mut Model, msg: &Msg) -> ShouldRender {\n\n match msg {\n\n Msg::PointerDown(event) => {\n\n model.mouse_down = event.button();\n\n }\n\n Msg::PointerUp(_event) => {\n\n model.mouse_down = -1;\n\n }\n\n Msg::MouseUp(_event) => {\n\n model.mouse_down = -1;\n\n }\n\n Msg::TouchEnd(_event) => {\n\n model.mouse_down = -1;\n\n }\n\n Msg::PointerMove(event) => {\n\n if model.mouse_down != -1 {\n\n model.mouse_events.push(event.clone());\n\n }\n\n }\n\n Msg::PointerWheel(event) => {\n\n model.wheel_events.push(event.clone());\n\n }\n\n _ => {}\n\n }\n\n false\n\n}\n", "file_path": "src/input_controller.rs", "rank": 23, "score": 105991.94145957299 }, { "content": "pub fn update(model: &mut Model, msg: &Msg) -> ShouldRender {\n\n match msg {\n\n Msg::Render(timestamp) => {\n\n render_gl(model, *timestamp);\n\n }\n\n _ => {}\n\n };\n\n false\n\n}\n\n\n", "file_path": "src/render_canvas.rs", "rank": 24, "score": 105991.94145957299 }, { "content": "pub fn view_mesh_list(model: &Model) -> Html {\n\n html! {\n\n <table style=\"width:100%\">\n\n { for model.mesh.iter().map(|f| view_element(model, f.0)) }\n\n </table>\n\n }\n\n}\n\n\n", "file_path": "src/mesh_list.rs", "rank": 25, "score": 104186.93395579242 }, { "content": "#[inline(always)]\n\npub fn point_with_order<P>(fst: f64, snd: f64, third: f64, order: VertexOrder) -> P\n\nwhere\n\n P: IsBuildable3D,\n\n{\n\n match order {\n\n VertexOrder::Xyz => P::new(fst, snd, third),\n\n VertexOrder::Xzy => P::new(fst, third, snd),\n\n VertexOrder::Yxz => P::new(snd, fst, third),\n\n VertexOrder::Yzx => P::new(snd, third, fst),\n\n VertexOrder::Zxy => P::new(third, fst, snd),\n\n VertexOrder::Zyx => P::new(third, snd, fst),\n\n }\n\n}\n", "file_path": "src/io/ply/utils.rs", "rank": 26, "score": 104022.75045521706 }, { "content": "fn create_mesh_surface(\n\n model: &mut Model,\n\n mesh: &Rust3DMesh,\n\n uvs: &rust_3d::PointCloud3D<Point3D>,\n\n material_name: Option<String>,\n\n surface: &MaterialSurface,\n\n) -> MeshSurface {\n\n let gl_ref = model.gl.as_ref().expect(\"GL Context not initialized!\");\n\n // Only add faces that belong to this surface.\n\n let mut indices: Vec<u32> = vec![];\n\n for fid in 0..mesh.num_faces() {\n\n let vids = mesh.face_vertex_ids(FId { val: fid }).unwrap();\n\n if !surface.faces.contains(&vids.clone()) {\n\n continue;\n\n }\n\n indices.push(vids.a.val as u32);\n\n indices.push(vids.b.val as u32);\n\n indices.push(vids.c.val as u32);\n\n }\n\n let mut maybe_uvs: Option<&[f32]> = None;\n", "file_path": "src/mesh_loader.rs", "rank": 27, "score": 102101.93073526093 }, { "content": "pub fn view(model: &Model) -> Html {\n\n html! {\n\n <form>\n\n <input id=\"load_mesh\" type=\"file\" multiple=true accept=\".ply, .obj, .off, .stl, .mtl, .png, .jpeg, .jpg, .gif, .webp, .pnm, .tif, .tiff, .tga, .dds, .bmp, .ico, .hdr\" onchange=model.link.callback(move |value| {\n\n let mut result = Vec::new();\n\n if let ChangeData::Files(files) = value {\n\n let files = js_sys::try_iter(&files)\n\n .unwrap()\n\n .unwrap()\n\n .into_iter()\n\n .map(|v| File::from(v.unwrap()));\n\n result.extend(files);\n\n }\n\n Msg::Files(result)\n\n })/>\n\n <label for=\"load_mesh\">{\"Select files to view.\"}</label>\n\n </form>\n\n }\n\n}\n", "file_path": "src/mesh_loader.rs", "rank": 28, "score": 100576.2789334034 }, { "content": "#[inline(always)]\n\npub fn collect_index_line(line: &[u8]) -> Option<[usize; 3]> {\n\n let mut words = to_words_skip_empty(line);\n\n if words.next()? != b\"3\" {\n\n return None;\n\n }\n\n\n\n let a = from_ascii(words.next()?)?;\n\n let b = from_ascii(words.next()?)?;\n\n let c = from_ascii(words.next()?)?;\n\n\n\n Some([a, b, c])\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/ply/utils.rs", "rank": 29, "score": 89797.95361827328 }, { "content": "/// Divide a mesh by materials, in instances where the only\n\n/// buffer we're passing to the shader is the vertex buffer. In\n\n/// this case we can use the values and existing order\n\n/// directly.\n\nfn divide_mesh_by_materials(\n\n model: &mut Model,\n\n mesh: &Rust3DMesh,\n\n material_info: &MaterialInfo,\n\n maybe_normals: Option<Vec<f32>>,\n\n) -> MeshContainer {\n\n let vertices = create_vertex_data(model, mesh, maybe_normals);\n\n info!(\"Adding model with {} vertices\", mesh.num_vertices());\n\n let mut surfaces: Vec<MeshSurface> = vec![];\n\n if material_info.surfaces.len() > 0 {\n\n for surface in material_info.surfaces.iter() {\n\n info!(\n\n \"Adding surface {} with {} faces\",\n\n surface.0,\n\n surface.1.faces.len()\n\n );\n\n let mesh_surface = create_mesh_surface(\n\n model,\n\n mesh,\n\n &material_info.uv,\n", "file_path": "src/mesh_loader.rs", "rank": 30, "score": 73687.30636712552 }, { "content": "fn render_gl(model: &mut Model, _timestamp: f64) {\n\n let gl = model.gl.as_ref().expect(\"GL Context not initialized!\");\n\n let screen_width = model.canvas.as_ref().unwrap().width();\n\n let screen_height = model.canvas.as_ref().unwrap().height();\n\n model\n\n .camera\n\n .as_mut()\n\n .unwrap()\n\n .set_size(screen_width as f32, screen_height as f32);\n\n\n\n for mouse_event in model.mouse_events.iter() {\n\n if mouse_event.shift_key() || model.mouse_down == 2 {\n\n let target = model.camera.as_ref().unwrap().target();\n\n let position = model.camera.as_ref().unwrap().position();\n\n let up = model.camera.as_ref().unwrap().up();\n\n let forward = target - position;\n\n let right = forward.cross(*up);\n\n let translation_y = MOVE_SPEED * (mouse_event.movement_y() as f32) * up;\n\n let translation_x = -1f32 * MOVE_SPEED * (mouse_event.movement_x() as f32) * right;\n\n let translation = translation_x + translation_y;\n", "file_path": "src/render_canvas.rs", "rank": 31, "score": 72957.34934998934 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\npub fn new_gl(gl: &GL) -> Option<Gl> {\n\n gl::Glstruct::new(gl.clone()).into()\n\n}\n\n\n", "file_path": "src/render_canvas.rs", "rank": 32, "score": 70747.14583320629 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\npub fn new_gl(_gl: &GL) -> Option<Gl> {\n\n None\n\n}\n\n\n", "file_path": "src/render_canvas.rs", "rank": 33, "score": 70747.14583320629 }, { "content": "/// Per-wedge UVs require that we duplicate any\n\n/// vertices that are referenced multiple times\n\n/// in the index vector so that the vertex and\n\n/// UV buffer that we send to the GPU are the\n\n/// same length. The resulting vectors are the\n\n/// same length and arrangement of the index\n\n/// vector, and the index vector is therefore\n\n/// sequential.\n\nfn divide_mesh_by_materials_per_wedge(\n\n model: &mut Model,\n\n mesh: &Rust3DMesh,\n\n material_info: &MaterialInfo,\n\n) -> MeshContainer {\n\n info!(\"Rebuilding vertex/UV vectors so they agree.\");\n\n let gl_ref = model.gl.as_ref().expect(\"GL Context not initialized!\");\n\n let mut vertices = vec![];\n\n let mut uvs = vec![];\n\n let uv_in = &material_info.uv;\n\n if 3 * mesh.num_faces() != uv_in.len() {\n\n // This function is only meant to be used with a mesh that has\n\n // per-face UVs.\n\n panic!(\n\n \"Expected 1 UV per wedge ({} != {})!\",\n\n mesh.num_faces() * 3,\n\n uv_in.len()\n\n );\n\n }\n\n let mut surface_indices: HashMap<String, Vec<u32>> = HashMap::new();\n", "file_path": "src/mesh_loader.rs", "rank": 34, "score": 70078.31353593236 }, { "content": "#[inline(always)]\n\npub fn from_ascii<T>(bytes: &[u8]) -> Option<T>\n\nwhere\n\n T: FromStr,\n\n{\n\n if bytes.is_ascii() {\n\n unsafe { T::from_str(std::str::from_utf8_unchecked(bytes)).ok() }\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n/// Fetch a single line\n", "file_path": "src/io/utils.rs", "rank": 35, "score": 69722.96314782268 }, { "content": "fn create_vertex_data(\n\n model: &mut Model,\n\n mesh: &Rust3DMesh,\n\n maybe_normals: Option<Vec<f32>>,\n\n) -> VertexData {\n\n let gl_ref = model.gl.as_ref().expect(\"GL Context not initialized!\");\n\n let mut vertices: Vec<f32> = vec![];\n\n let mut indices: Vec<u32> = vec![];\n\n for fid in 0..mesh.num_faces() {\n\n let vids = mesh.face_vertex_ids(FId { val: fid }).unwrap();\n\n indices.push(vids.a.val as u32);\n\n indices.push(vids.b.val as u32);\n\n indices.push(vids.c.val as u32);\n\n }\n\n for vid in 0..mesh.num_vertices() {\n\n let vertex = mesh.vertex(VId { val: vid }).unwrap();\n\n vertices.push(vertex.x as f32);\n\n vertices.push(vertex.y as f32);\n\n vertices.push(vertex.z as f32);\n\n }\n", "file_path": "src/mesh_loader.rs", "rank": 36, "score": 65427.84252326164 }, { "content": "type Rust3DMesh = rust_3d::Mesh3D<Point3D, PointCloud3D<Point3D>, Vec<usize>>;\n\n\n", "file_path": "src/mesh_loader.rs", "rank": 37, "score": 64815.71637102769 }, { "content": "fn view_element(model: &Model, data: &str) -> Html {\n\n let mesh_name = data.to_string();\n\n let remove_mesh_cb = model\n\n .link\n\n .callback(move |_| Msg::RemoveMesh(mesh_name.clone()));\n\n let mesh_name = data.to_string();\n\n let handle_check_cb = model\n\n .link\n\n .callback(move |_| Msg::MeshVisibilityToggle(mesh_name.clone()));\n\n html! {\n\n <table>\n\n <tr>\n\n <td>\n\n { data }\n\n </td>\n\n <td>\n\n <input type=\"checkbox\" checked={model.mesh.get(data).unwrap().visible } onclick=handle_check_cb />\n\n </td>\n\n <td>\n\n <button onclick=remove_mesh_cb.clone()>\n\n { \"Remove\" }\n\n </button>\n\n </td>\n\n </tr>\n\n </table>\n\n }\n\n}\n", "file_path": "src/mesh_list.rs", "rank": 38, "score": 55699.27934481435 }, { "content": "fn compute_normals(indices: &[u32], positions: &[f32]) -> Vec<f32> {\n\n let mut normals = vec![0.0f32; positions.len() * 3];\n\n for face in 0..indices.len() / 3 {\n\n let index0 = indices[face * 3] as usize;\n\n let p0 = vec3(\n\n positions[index0 * 3],\n\n positions[index0 * 3 + 1],\n\n positions[index0 * 3 + 2],\n\n );\n\n let index1 = indices[face * 3 + 1] as usize;\n\n let p1 = vec3(\n\n positions[index1 * 3],\n\n positions[index1 * 3 + 1],\n\n positions[index1 * 3 + 2],\n\n );\n\n let index2 = indices[face * 3 + 2] as usize;\n\n let p2 = vec3(\n\n positions[index2 * 3],\n\n positions[index2 * 3 + 1],\n\n positions[index2 * 3 + 2],\n", "file_path": "src/mesh_loader.rs", "rank": 39, "score": 53815.802242038444 }, { "content": "/// Trait for binary data readers\n\npub trait IsByteReader {\n\n fn read_i8<R>(read: &mut R) -> iRes<i8>\n\n where\n\n R: Read;\n\n\n\n fn read_u8<R>(read: &mut R) -> iRes<u8>\n\n where\n\n R: Read;\n\n\n\n fn read_i16<R>(read: &mut R) -> iRes<i16>\n\n where\n\n R: Read;\n\n\n\n fn read_u16<R>(read: &mut R) -> iRes<u16>\n\n where\n\n R: Read;\n\n\n\n fn read_i32<R>(read: &mut R) -> iRes<i32>\n\n where\n\n R: Read;\n", "file_path": "src/io/byte_reader.rs", "rank": 40, "score": 44705.05673240629 }, { "content": "fn main() {\n\n console_log::init_with_level(Level::Debug).unwrap();\n\n yew::start_app::<web_geo_viewer::Model>();\n\n}\n", "file_path": "src/main.rs", "rank": 41, "score": 41674.59254026748 }, { "content": " Some(surface.0.clone()),\n\n surface.1,\n\n );\n\n surfaces.push(mesh_surface);\n\n }\n\n } else {\n\n let gl_ref = model.gl.as_ref().expect(\"GL Context not initialized!\");\n\n let mut indices: Vec<u32> = vec![];\n\n for fid in 0..mesh.num_faces() {\n\n let vids = mesh.face_vertex_ids(FId { val: fid }).unwrap();\n\n indices.push(vids.a.val as u32);\n\n indices.push(vids.b.val as u32);\n\n indices.push(vids.c.val as u32);\n\n }\n\n let surface = MeshSurface::new(gl_ref, &indices[..], None, None).unwrap();\n\n surfaces.push(surface);\n\n }\n\n MeshContainer {\n\n vertices: vertices,\n\n surfaces: surfaces,\n\n visible: true,\n\n }\n\n}\n\n\n", "file_path": "src/mesh_loader.rs", "rank": 42, "score": 30790.586057504854 }, { "content": "use super::{MeshContainer, Model, Msg, ShouldRender};\n\nuse crate::io::*;\n\nuse crate::render_buffers::{MeshSurface, VertexData};\n\nuse image::GenericImageView;\n\nuse log::*;\n\nuse rust_3d::{io::load_stl_mesh_unique, *};\n\nuse std::collections::HashMap;\n\nuse std::path::Path;\n\nuse three_d::core::types::InnerSpace;\n\nuse three_d::vec3;\n\nuse yew::services::reader::File;\n\nuse yew::{html, ChangeData, Html};\n\n\n", "file_path": "src/mesh_loader.rs", "rank": 43, "score": 30790.235060464987 }, { "content": "\n\n // Initialize index vector for each surface.\n\n for surface in &material_info.surfaces {\n\n surface_indices.insert(surface.0.clone(), vec![]);\n\n }\n\n let mut all_indices = vec![];\n\n for fid in 0..mesh.num_faces() {\n\n let vids = mesh.face_vertex_ids(FId { val: fid }).unwrap();\n\n all_indices.push((fid * 3 + 0) as u32);\n\n all_indices.push((fid * 3 + 1) as u32);\n\n all_indices.push((fid * 3 + 2) as u32);\n\n // Insert this index into surfaces to which this face belongs.\n\n for surface in &material_info.surfaces {\n\n if surface.1.faces.contains(&vids) {\n\n let index_vector = surface_indices.get_mut(surface.0).unwrap();\n\n index_vector.push((fid * 3 + 0) as u32);\n\n index_vector.push((fid * 3 + 1) as u32);\n\n index_vector.push((fid * 3 + 2) as u32);\n\n }\n\n }\n", "file_path": "src/mesh_loader.rs", "rank": 44, "score": 30787.704243889053 }, { "content": " match maybe_normals {\n\n Some(normals) => {\n\n info!(\"Using {} provided normals.\", normals.len());\n\n VertexData::new(gl_ref, &vertices[..], &normals[..]).unwrap()\n\n }\n\n None => {\n\n info!(\n\n \"computing normals with {} indices and {} indices.\",\n\n vertices.len(),\n\n indices.len()\n\n );\n\n VertexData::new(\n\n gl_ref,\n\n &vertices[..],\n\n &compute_normals(&indices[..], &vertices[..]),\n\n )\n\n .unwrap()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/mesh_loader.rs", "rank": 45, "score": 30786.63082096072 }, { "content": " let vertex_data = VertexData::new(\n\n gl_ref,\n\n &vertices[..],\n\n &compute_normals(&all_indices[..], &vertices[..]),\n\n )\n\n .unwrap();\n\n let mut surfaces: Vec<MeshSurface> = vec![];\n\n for name_and_indices in surface_indices {\n\n info!(\"Material name {}\", name_and_indices.0);\n\n let surface = MeshSurface::new(\n\n gl_ref,\n\n &name_and_indices.1[..],\n\n Some(&uvs[..]),\n\n Some(name_and_indices.0),\n\n )\n\n .unwrap();\n\n surfaces.push(surface);\n\n }\n\n MeshContainer {\n\n vertices: vertex_data,\n\n surfaces: surfaces,\n\n visible: true,\n\n }\n\n}\n\n\n", "file_path": "src/mesh_loader.rs", "rank": 46, "score": 30786.57971138597 }, { "content": " info!(\n\n \"Could not load {} as a texture due to error {:?}\",\n\n file.name, e\n\n );\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n info!(\n\n \"Could not load {} as an image due to error {}\",\n\n file.name, e\n\n );\n\n }\n\n }\n\n } else if ext == \"ply\" || ext == \"obj\" {\n\n let mut m = Rust3DMesh::default();\n\n let mut material_info = MaterialInfo::new();\n\n let mut maybe_normals = None;\n\n if ext == \"ply\" {\n\n let mut material = Material::new();\n", "file_path": "src/mesh_loader.rs", "rank": 47, "score": 30785.46294671563 }, { "content": " match load_ply(\n\n &mut &file.content[..],\n\n &mut m,\n\n &mut material,\n\n &mut material_info,\n\n &file.name,\n\n ) {\n\n Ok(_) => {\n\n model.materials.insert(file.name.clone(), material);\n\n }\n\n Err(e) => {\n\n warn!(\"Could not load {} as a PLY due to {:?}\", file.name, e);\n\n }\n\n }\n\n } else if ext == \"obj\" {\n\n match load_obj_mesh(&mut &file.content[..], &mut m, &mut material_info) {\n\n Ok(_) => {}\n\n Err(e) => {\n\n warn!(\"Could not load {} as an OBJ due to {:?}\", file.name, e);\n\n }\n", "file_path": "src/mesh_loader.rs", "rank": 48, "score": 30785.244698275957 }, { "content": " let v0 = mesh.vertex(vids.a).unwrap();\n\n extend_by_vertex(&v0, &mut vertices);\n\n let v1 = mesh.vertex(vids.b).unwrap();\n\n extend_by_vertex(&v1, &mut vertices);\n\n let v2 = mesh.vertex(vids.c).unwrap();\n\n extend_by_vertex(&v2, &mut vertices);\n\n // 1 UV per face, each a point3D, is stored for each face,\n\n // for a total of 3 points per face.\n\n // UVs.\n\n let uv_base_idx = fid * 3;\n\n let uv0 = uv_in.get_d(uv_base_idx + 0);\n\n extend_by_vertex(&uv0, &mut uvs);\n\n let uv1 = uv_in.get_d(uv_base_idx + 1);\n\n extend_by_vertex(&uv1, &mut uvs);\n\n let uv2 = uv_in.get_d(uv_base_idx + 2);\n\n extend_by_vertex(&uv2, &mut uvs);\n\n }\n\n // we always have to compute normals here since the pre-computed\n\n // normals will not match, and it's better to have per-face\n\n // normals anyway.\n", "file_path": "src/mesh_loader.rs", "rank": 49, "score": 30784.560938592855 }, { "content": " }\n\n } else if ext == \"off\" {\n\n match load_off_mesh(&mut &file.content[..], &mut m) {\n\n Ok(_) => {}\n\n Err(e) => {\n\n warn!(\"Could not load {} as an off due to {:?}\", file.name, e);\n\n }\n\n }\n\n } else if ext == \"stl\" {\n\n let mut r3d_normals = vec![];\n\n match load_stl_mesh_unique(\n\n &mut &file.content[..],\n\n rust_3d::io::StlFormat::Auto,\n\n &mut m,\n\n &mut r3d_normals,\n\n ) {\n\n Ok(_) => {\n\n let mut normals = vec![];\n\n for normal in r3d_normals {\n\n normals.push(normal.x as f32);\n", "file_path": "src/mesh_loader.rs", "rank": 50, "score": 30784.050585210854 }, { "content": " let mut uv_vec: Vec<f32> = vec![];\n\n if uvs.len() > 0 {\n\n for point in uvs.data.iter() {\n\n uv_vec.push(point.x as f32);\n\n uv_vec.push(point.y as f32);\n\n uv_vec.push(point.z as f32);\n\n }\n\n info!(\"Loaded {} uvs\", uv_vec.len() / 3);\n\n maybe_uvs = Some(&uv_vec[..]);\n\n }\n\n MeshSurface::new(gl_ref, &indices[..], maybe_uvs, material_name).unwrap()\n\n}\n\n\n", "file_path": "src/mesh_loader.rs", "rank": 51, "score": 30782.45356211749 }, { "content": " info!(\"Loading an MTL file.\");\n\n match load_mtl(&mut &file.content[..]) {\n\n Ok(materials) => {\n\n for material in materials {\n\n info!(\"Loading material {}: {:?}.\", material.0, material.1);\n\n model.materials.insert(material.0, material.1);\n\n }\n\n }\n\n Err(e) => {\n\n warn!(\n\n \"Could not load {} as an mtl file due to error: {:?}\",\n\n file.name, e\n\n );\n\n }\n\n }\n\n }\n\n }\n\n }\n\n _ => {}\n\n };\n\n false\n\n}\n", "file_path": "src/mesh_loader.rs", "rank": 52, "score": 30781.385931436696 }, { "content": " }\n\n info!(\n\n \"{} has {} vertices and {} indices\",\n\n file.name,\n\n m.num_vertices(),\n\n m.num_faces()\n\n );\n\n if material_info.uv.len() == 3 * m.num_faces() {\n\n // Per-wedge UVs. If a model has both per-wedge and per-vertex\n\n // UVs we should prefer per-wedge.\n\n let meshes = divide_mesh_by_materials_per_wedge(model, &m, &material_info);\n\n model.mesh.insert(file.name.clone(), meshes);\n\n } else {\n\n // Per-vetex UVs\n\n let meshes =\n\n divide_mesh_by_materials(model, &m, &material_info, maybe_normals);\n\n model.mesh.insert(file.name.clone(), meshes);\n\n }\n\n return true;\n\n } else if ext == \"mtl\" {\n", "file_path": "src/mesh_loader.rs", "rank": 53, "score": 30780.875104083978 }, { "content": " normals.push(normal.x as f32);\n\n normals.push(normal.z as f32);\n\n }\n\n maybe_normals = Some(normals);\n\n let meshes = divide_mesh_by_materials(\n\n model,\n\n &m,\n\n &material_info,\n\n maybe_normals,\n\n );\n\n model.mesh.insert(file.name.clone(), meshes);\n\n }\n\n Err(e) => {\n\n warn!(\"Could not load {} as an off due to {:?}\", file.name, e);\n\n }\n\n }\n\n return true;\n\n }\n\n if m.num_vertices() == 0 {\n\n return false;\n", "file_path": "src/mesh_loader.rs", "rank": 54, "score": 30780.443064962474 }, { "content": "use super::{Model, Msg, ShouldRender};\n\nuse yew::{html, Html};\n\n\n", "file_path": "src/mesh_list.rs", "rank": 55, "score": 30778.35382992476 }, { "content": " Ok(image) => {\n\n let gl_ref = model.gl.as_ref().expect(\"GL Context not initialized!\");\n\n let (width, height) = image.dimensions();\n\n info!(\"Loaded {}x{} image {}\", file.name, width, height);\n\n match three_d::texture::Texture2D::new_with_u8(\n\n gl_ref,\n\n three_d::Interpolation::Linear,\n\n three_d::Interpolation::Linear,\n\n Some(three_d::Interpolation::Linear),\n\n three_d::Wrapping::ClampToEdge,\n\n three_d::Wrapping::ClampToEdge,\n\n width,\n\n height,\n\n &image.to_bytes()[..],\n\n ) {\n\n Ok(texture) => {\n\n info!(\"Created texture {}\", file.name);\n\n model.images.insert(file.name.clone(), texture);\n\n }\n\n Err(e) => {\n", "file_path": "src/mesh_loader.rs", "rank": 56, "score": 30777.90934618938 }, { "content": " );\n\n\n\n let normal = (p1 - p0).cross(p2 - p0);\n\n normals[index0 * 3] += normal.x;\n\n normals[index0 * 3 + 1] += normal.y;\n\n normals[index0 * 3 + 2] += normal.z;\n\n normals[index1 * 3] += normal.x;\n\n normals[index1 * 3 + 1] += normal.y;\n\n normals[index1 * 3 + 2] += normal.z;\n\n normals[index2 * 3] += normal.x;\n\n normals[index2 * 3 + 1] += normal.y;\n\n normals[index2 * 3 + 2] += normal.z;\n\n }\n\n\n\n for i in 0..normals.len() / 3 {\n\n let normal = vec3(normals[3 * i], normals[3 * i + 1], normals[3 * i + 2]).normalize();\n\n normals[3 * i] = normal.x;\n\n normals[3 * i + 1] = normal.y;\n\n normals[3 * i + 2] = normal.z;\n\n }\n\n normals\n\n}\n\n\n", "file_path": "src/mesh_loader.rs", "rank": 57, "score": 30774.37159409625 }, { "content": "use crate::io::Material;\n\nuse crate::render_buffers::{MeshSurface, VertexData};\n\nuse three_d::*;\n\n\n\npub struct UntexturedMeshRenderer {\n\n shader: program::Program,\n\n}\n\n\n\nimpl UntexturedMeshRenderer {\n\n pub fn new(gl: &Gl) -> UntexturedMeshRenderer {\n\n UntexturedMeshRenderer {\n\n shader: program::Program::from_source(\n\n &gl,\n\n include_str!(\"shaders/mesh_shaded.vert\"),\n\n include_str!(\"shaders/shaded.frag\"),\n\n )\n\n .unwrap(),\n\n }\n\n }\n\n\n", "file_path": "src/untextured_mesh_renderer.rs", "rank": 58, "score": 29524.79098066109 }, { "content": "use crate::io::Material;\n\nuse crate::render_buffers::{MeshSurface, VertexData};\n\nuse three_d::*;\n\n\n\npub struct TexturedMeshRenderer {\n\n shader: program::Program,\n\n}\n\n\n\nimpl TexturedMeshRenderer {\n\n pub fn new(gl: &Gl) -> TexturedMeshRenderer {\n\n TexturedMeshRenderer {\n\n shader: program::Program::from_source(\n\n &gl,\n\n include_str!(\"shaders/textured.vert\"),\n\n include_str!(\"shaders/textured.frag\"),\n\n )\n\n .unwrap(),\n\n }\n\n }\n\n\n", "file_path": "src/textured_mesh_renderer.rs", "rank": 59, "score": 29524.749150035856 }, { "content": "\n\n program\n\n .add_uniform_mat4(\"modelMatrix\", &transformation)\n\n .unwrap();\n\n\n\n program.use_texture(image, \"texture0\").unwrap();\n\n\n\n program.use_uniform_block(camera.matrix_buffer(), \"Camera\");\n\n program\n\n .add_uniform_mat4(\n\n \"normalMatrix\",\n\n &transformation.invert().unwrap().transpose(),\n\n )\n\n .unwrap();\n\n program\n\n .use_attribute_vec3_float(&vertex_data.position_buffer, \"position\")\n\n .unwrap();\n\n program\n\n .use_attribute_vec3_float(&vertex_data.normal_buffer, \"normal\")\n\n .unwrap();\n\n program\n\n .use_attribute_vec3_float(mesh_surface.maybe_uvs.as_ref().unwrap(), \"uvw\")\n\n .unwrap();\n\n program.draw_elements(&mesh_surface.index_buffer);\n\n }\n\n}\n", "file_path": "src/textured_mesh_renderer.rs", "rank": 60, "score": 29520.200878700016 }, { "content": " pub fn render(\n\n &self,\n\n transformation: &Mat4,\n\n camera: &camera::Camera,\n\n vertex_data: &VertexData,\n\n mesh_surface: &MeshSurface,\n\n material: &Material,\n\n ) {\n\n let program = &self.shader;\n\n program\n\n .add_uniform_float(\"diffuse_intensity\", &material.diffuse_intensity)\n\n .unwrap();\n\n program\n\n .add_uniform_float(\"specular_intensity\", &material.specular_intensity)\n\n .unwrap();\n\n let specular_power = 5.0;\n\n program\n\n .add_uniform_float(\"specular_power\", &specular_power)\n\n .unwrap();\n\n\n", "file_path": "src/untextured_mesh_renderer.rs", "rank": 61, "score": 29518.898731216912 }, { "content": " pub fn render(\n\n &self,\n\n transformation: &Mat4,\n\n camera: &camera::Camera,\n\n vertex_data: &VertexData,\n\n mesh_surface: &MeshSurface,\n\n material: &Material,\n\n image: &Texture2D,\n\n ) {\n\n let program = &self.shader;\n\n program\n\n .add_uniform_float(\"diffuse_intensity\", &material.diffuse_intensity)\n\n .unwrap();\n\n program\n\n .add_uniform_float(\"specular_intensity\", &material.specular_intensity)\n\n .unwrap();\n\n let specular_power = 5.0;\n\n program\n\n .add_uniform_float(\"specular_power\", &specular_power)\n\n .unwrap();\n", "file_path": "src/textured_mesh_renderer.rs", "rank": 62, "score": 29518.679097471155 }, { "content": " program\n\n .add_uniform_vec3(\"color\", &material.diffuse_color)\n\n .unwrap();\n\n program\n\n .add_uniform_mat4(\"modelMatrix\", &transformation)\n\n .unwrap();\n\n program.use_uniform_block(camera.matrix_buffer(), \"Camera\");\n\n program\n\n .add_uniform_mat4(\n\n \"normalMatrix\",\n\n &transformation.invert().unwrap().transpose(),\n\n )\n\n .unwrap();\n\n program\n\n .use_attribute_vec3_float(&vertex_data.position_buffer, \"position\")\n\n .unwrap();\n\n program\n\n .use_attribute_vec3_float(&vertex_data.normal_buffer, \"normal\")\n\n .unwrap();\n\n program.draw_elements(&mesh_surface.index_buffer);\n\n }\n\n}\n", "file_path": "src/untextured_mesh_renderer.rs", "rank": 63, "score": 29518.266390690856 }, { "content": "/*\n\nCopyright 2020 Martin Buck\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"),\n\nto deal in the Software without restriction, including without limitation the\n\nrights to use, copy, modify, merge, publish, distribute, sublicense,\n\nand/or sell copies of the Software, and to permit persons to whom the Software\n\nis furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall\n\nbe included all copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\n\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n\nDAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\n\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE\n\nOR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "src/io/ply/load.rs", "rank": 65, "score": 99.39384666875041 }, { "content": "/*\n\nCopyright 2020 Martin Buck\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"),\n\nto deal in the Software without restriction, including without limitation the\n\nrights to use, copy, modify, merge, publish, distribute, sublicense,\n\nand/or sell copies of the Software, and to permit persons to whom the Software\n\nis furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall\n\nbe included all copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\n\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n\nDAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\n\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE\n\nOR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "src/io/off.rs", "rank": 66, "score": 99.39384666875041 }, { "content": "/*\n\nCopyright 2020 Martin Buck\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"),\n\nto deal in the Software without restriction, including without limitation the\n\nrights to use, copy, modify, merge, publish, distribute, sublicense,\n\nand/or sell copies of the Software, and to permit persons to whom the Software\n\nis furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall\n\nbe included all copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\n\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n\nDAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\n\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE\n\nOR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "src/io/ply/types.rs", "rank": 67, "score": 99.39384666875041 }, { "content": "/*\n\nCopyright 2017 Martin Buck\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"),\n\nto deal in the Software without restriction, including without limitation the\n\nrights to use, copy, modify, merge, publish, distribute, sublicense,\n\nand/or sell copies of the Software, and to permit persons to whom the Software\n\nis furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall\n\nbe included all copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\n\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n\nDAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\n\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE\n\nOR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "src/io/ply/mod.rs", "rank": 68, "score": 99.39384666875038 }, { "content": "/*\n\nCopyright 2020 Martin Buck\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"),\n\nto deal in the Software without restriction, including without limitation the\n\nrights to use, copy, modify, merge, publish, distribute, sublicense,\n\nand/or sell copies of the Software, and to permit persons to whom the Software\n\nis furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall\n\nbe included all copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\n\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n\nDAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\n\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE\n\nOR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "src/io/ply/utils.rs", "rank": 69, "score": 99.3938466687504 }, { "content": "/*\n\nCopyright 2020 Martin Buck\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"),\n\nto deal in the Software without restriction, including without limitation the\n\nrights to use, copy, modify, merge, publish, distribute, sublicense,\n\nand/or sell copies of the Software, and to permit persons to whom the Software\n\nis furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall\n\nbe included all copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\n\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n\nDAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\n\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE\n\nOR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "src/io/ply/save.rs", "rank": 70, "score": 99.39384666875041 }, { "content": "/*\n\nCopyright 2020 Martin Buck\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"),\n\nto deal in the Software without restriction, including without limitation the\n\nrights to use, copy, modify, merge, publish, distribute, sublicense,\n\nand/or sell copies of the Software, and to permit persons to whom the Software\n\nis furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall\n\nbe included all copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\n\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n\nDAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\n\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE\n\nOR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "src/io/utils.rs", "rank": 71, "score": 99.3938466687504 }, { "content": "/*\n\nCopyright 2020 Martin Buck\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"),\n\nto deal in the Software without restriction, including without limitation the\n\nrights to use, copy, modify, merge, publish, distribute, sublicense,\n\nand/or sell copies of the Software, and to permit persons to whom the Software\n\nis furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall\n\nbe included all copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\n\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n\nDAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\n\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE\n\nOR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n*/\n\n\n\n//! Module containing the IsByteReader trait and Little/BigReader implementations for reading binary data\n\n\n\nuse std::io::{Read, Result as iRes};\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n/// Trait for binary data readers\n", "file_path": "src/io/byte_reader.rs", "rank": 72, "score": 91.4797649532243 }, { "content": " let mut after = BytesWords::default();\n\n\n\n let mut face_types = vec![];\n\n let mut texture_name = None;\n\n\n\n while let Ok(line) = fetch_line(read, line_buffer) {\n\n *i_line += 1;\n\n\n\n if line.starts_with(b\"comment\") {\n\n let mut words = to_words_skip_empty(line);\n\n match words.nth(1) {\n\n Some(next_word) => {\n\n if next_word == b\"TextureFile\" {\n\n match words.next() {\n\n Some(texture_name_blob) => {\n\n if let Ok(read_texture_name) = str::from_utf8(texture_name_blob) {\n\n info!(\"Read texture name: {}\", read_texture_name);\n\n texture_name = Some(read_texture_name.to_string());\n\n }\n\n }\n", "file_path": "src/io/ply/load.rs", "rank": 74, "score": 22.12022877275649 }, { "content": " break;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n/// Error type for .off file operations\n\npub enum OffError {\n\n AccessFile,\n\n InvalidMeshIndices(usize),\n\n LineParse(usize),\n\n}\n\n\n\n/// Result type for .off file operations\n\npub type OffResult<T> = std::result::Result<T, OffError>;\n\n\n\nimpl fmt::Debug for OffError {\n", "file_path": "src/io/off.rs", "rank": 77, "score": 20.546943272701043 }, { "content": " AccessFile,\n\n LineParse(usize),\n\n NoMaterialError(usize),\n\n}\n\n\n\nimpl fmt::Debug for MtlError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Self::AccessFile => write!(f, \"Unable to access file\"),\n\n Self::LineParse(x) => write!(f, \"Unable to parse line {}\", x),\n\n Self::NoMaterialError(x) => write!(f, \"Line {} occurs before newmtl.\", x),\n\n }\n\n }\n\n}\n\n\n\n/// Result type for .obj file operations\n\npub type MtlResult<T> = std::result::Result<T, MtlError>;\n\n\n\nimpl From<ioError> for MtlError {\n\n fn from(_error: ioError) -> Self {\n\n MtlError::AccessFile\n\n }\n\n}\n", "file_path": "src/io/mtl.rs", "rank": 78, "score": 20.191666027257945 }, { "content": " after.bytes += t.size_bytes();\n\n after.words += 1;\n\n }\n\n }\n\n }\n\n HeaderReadState::Face => {\n\n if line.starts_with(b\"property list\") {\n\n let mut words = to_words_skip_empty(line);\n\n skip_n(&mut words, 2); // skip \"property\" and \"list\"\n\n let t_count = FaceType::try_from(Type::try_from(\n\n words.next().ok_or(PlyError::InvalidProperty(*i_line))?,\n\n )?)?;\n\n let t_index = FaceType::try_from(Type::try_from(\n\n words.next().ok_or(PlyError::InvalidProperty(*i_line))?,\n\n )?)?;\n\n let name: String =\n\n str::from_utf8(words.next().ok_or(PlyError::InvalidProperty(*i_line))?)\n\n .unwrap()\n\n .to_string();\n\n\n", "file_path": "src/io/ply/load.rs", "rank": 81, "score": 19.25845888121268 }, { "content": " }\n\n }\n\n Some(_) => {}\n\n }\n\n\n\n if line.starts_with(b\"property\") {\n\n match read_state {\n\n HeaderReadState::Vertex => {\n\n let mut words = to_words_skip_empty(line);\n\n skip_n(&mut words, 1); // skip \"property\"\n\n\n\n let t =\n\n Type::try_from(words.next().ok_or(PlyError::InvalidProperty(*i_line))?)?;\n\n let id = words.next().ok_or(PlyError::InvalidProperty(*i_line))?;\n\n if id == b\"x\" {\n\n opt_fst_type = Some(VertexType::try_from(t)?);\n\n n_types_found += 1;\n\n vertex_order[i_vertex_order] = Xyz::X;\n\n i_vertex_order += 1;\n\n } else if id == b\"y\" {\n", "file_path": "src/io/ply/load.rs", "rank": 82, "score": 18.740434432689483 }, { "content": "//------------------------------------------------------------------------------\n\n\n\n#[derive(Default, Debug)]\n\npub struct BytesWords {\n\n pub bytes: usize,\n\n pub words: usize,\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum VertexType {\n\n Float,\n\n Double,\n\n}\n\n\n\nimpl TryFrom<Type> for VertexType {\n\n type Error = PlyError;\n\n\n\n fn try_from(x: Type) -> PlyResult<Self> {\n", "file_path": "src/io/ply/types.rs", "rank": 83, "score": 18.7339620691808 }, { "content": "*/\n\n\n\n//! Module for interal types for IO operations of the ply file format\n\n\n\nuse core::convert::TryFrom;\n\n\n\nuse std::{fmt, io::Error as ioError};\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n#[derive(Copy, Clone)]\n\npub enum Type {\n\n Char,\n\n UChar,\n\n Short,\n\n UShort,\n\n Int,\n\n UInt,\n\n Float,\n\n Double,\n", "file_path": "src/io/ply/types.rs", "rank": 84, "score": 18.58189197914905 }, { "content": "#[cfg(test)]\n\nmod test {\n\n use crate::io::*;\n\n use rust_3d::*;\n\n use std::{fs::File, io::BufReader};\n\n #[test]\n\n fn load_untextured_obj() {\n\n let box_path = \"src/tests/data/box.obj\".to_string();\n\n let mut m = rust_3d::Mesh3D::<Point3D, PointCloud3D<Point3D>, Vec<usize>>::default();\n\n let mut uv = MaterialInfo::new();\n\n load_obj_mesh(\n\n &mut BufReader::new(File::open(box_path).unwrap()),\n\n &mut m,\n\n &mut uv,\n\n )\n\n .unwrap();\n\n assert_eq!(m.num_vertices(), 8);\n\n assert_eq!(m.num_faces(), 12);\n\n assert_eq!(uv.uv.len(), 0);\n\n assert_eq!(uv.surfaces.contains_key(\"Default\"), true);\n", "file_path": "src/tests/io.rs", "rank": 85, "score": 18.561928891541264 }, { "content": "use three_d::objects::Error;\n\nuse three_d::*;\n\n\n\n/// All of the information needed to render a mesh, in the most convenient\n\n/// form possible for rendering.\n\n\n\n/// Everything associated with a surface.\n\npub struct MeshSurface {\n\n /// Indices for triangular faces. A 1D int array with a\n\n /// stride of 3.\n\n pub index_buffer: ElementBuffer,\n\n\n\n /// UVs. a 1D float array with a stride of 2.\n\n pub maybe_uvs: Option<VertexBuffer>,\n\n\n\n /// The name of the material, if there is one.\n\n pub maybe_material_name: Option<String>,\n\n}\n\n\n\n/// Everything associated with vertices. We separate vertex data from\n", "file_path": "src/render_buffers.rs", "rank": 86, "score": 18.36557834334975 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Self::AccessFile => write!(f, \"Unable to access file\"),\n\n Self::LineParse(x) => write!(f, \"Unable to parse line {}\", x),\n\n Self::InvalidMeshIndices(x) => {\n\n write!(f, \"File contains invalid mesh indices on line {}\", x)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl From<ioError> for OffError {\n\n fn from(_error: ioError) -> Self {\n\n OffError::AccessFile\n\n }\n\n}\n", "file_path": "src/io/off.rs", "rank": 87, "score": 18.147739000998932 }, { "content": " 0.005,\n\n )\n\n .unwrap();\n\n let renderer = model.renderer.as_mut().unwrap();\n\n let camera = model.camera.as_ref().unwrap();\n\n let mesh_groups = &model.mesh;\n\n let materials = &model.materials;\n\n let images = &model.images;\n\n let untextured = model.untextured_mesh_renderer.as_ref();\n\n let textured = model.textured_mesh_renderer.as_ref();\n\n renderer\n\n .geometry_pass(screen_width as usize, screen_height as usize, &|| {\n\n for group in mesh_groups.iter() {\n\n for surface in group.1.surfaces.iter() {\n\n if group.1.visible {\n\n let mut maybe_texture: Option<&Texture2D> = None;\n\n let material = match &surface.maybe_material_name {\n\n Some(material_name) => match materials.get(material_name) {\n\n Some(material) => {\n\n if let Some(texture_name) = &material.texture_name {\n", "file_path": "src/render_canvas.rs", "rank": 89, "score": 18.099393243164386 }, { "content": "\n\n mesh.add_vertex(P::new(x, y, z));\n\n } else {\n\n let mut words = to_words_skip_empty(line);\n\n\n\n let count_face = words.next().ok_or(OffError::LineParse(i_line))?;\n\n\n\n if count_face == b\"3\" {\n\n let a = words\n\n .next()\n\n .and_then(|word| from_ascii(word))\n\n .ok_or(OffError::LineParse(i_line))?;\n\n\n\n let b = words\n\n .next()\n\n .and_then(|word| from_ascii(word))\n\n .ok_or(OffError::LineParse(i_line))?;\n\n\n\n let c = words\n\n .next()\n", "file_path": "src/io/off.rs", "rank": 90, "score": 18.03291277003624 }, { "content": " }\n\n\n\n // safe since checked above\n\n if mesh.num_vertices() < counts.unwrap()[0] {\n\n let mut words = to_words_skip_empty(line);\n\n\n\n let x = words\n\n .next()\n\n .and_then(|word| from_ascii(word))\n\n .ok_or(OffError::LineParse(i_line))?;\n\n\n\n let y = words\n\n .next()\n\n .and_then(|word| from_ascii(word))\n\n .ok_or(OffError::LineParse(i_line))?;\n\n\n\n let z = words\n\n .next()\n\n .and_then(|word| from_ascii(word))\n\n .ok_or(OffError::LineParse(i_line))?;\n", "file_path": "src/io/off.rs", "rank": 91, "score": 17.679957725941726 }, { "content": " if line.is_empty() || line.starts_with(b\"#\") {\n\n continue;\n\n }\n\n\n\n if counts.is_none() {\n\n let mut words = to_words_skip_empty(line);\n\n let n_vertices = words\n\n .next()\n\n .and_then(|word| from_ascii(word))\n\n .ok_or(OffError::LineParse(i_line))?;\n\n let n_faces = words\n\n .next()\n\n .and_then(|word| from_ascii(word))\n\n .ok_or(OffError::LineParse(i_line))?;\n\n\n\n mesh.reserve_vertices(n_vertices);\n\n mesh.reserve_faces(n_faces);\n\n\n\n counts = Some([n_vertices, n_faces]);\n\n continue;\n", "file_path": "src/io/off.rs", "rank": 94, "score": 17.12681259287589 }, { "content": " }\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum Xyz {\n\n X,\n\n Y,\n\n Z,\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum VertexOrder {\n\n Xyz,\n\n Xzy,\n\n Yxz,\n\n Yzx,\n", "file_path": "src/io/ply/types.rs", "rank": 96, "score": 16.93610080669944 }, { "content": "\n\n if line.is_empty() || line.starts_with(b\"#\") {\n\n continue;\n\n }\n\n\n\n if n_vertices.is_none() {\n\n let mut words = to_words_skip_empty(line);\n\n n_vertices = Some(\n\n words\n\n .next()\n\n .and_then(|word| from_ascii(word))\n\n .ok_or(OffError::LineParse(i_line))?,\n\n );\n\n ip.reserve(n_vertices.unwrap());\n\n\n\n continue;\n\n }\n\n\n\n // safe since checked above\n\n if n_added < n_vertices.unwrap() {\n", "file_path": "src/io/off.rs", "rank": 97, "score": 16.69218900455187 }, { "content": " // skip \"map_Kd\"\n\n words.next().ok_or(MtlError::LineParse(i_line))?;\n\n if let Some(next_word) = words.next().and_then(|w| from_ascii(w)) {\n\n if let Some(mtl_name) = mtl_name.clone() {\n\n if let Some(mtl) = result.get_mut(&mtl_name) {\n\n mtl.texture_name = Some(next_word);\n\n } else {\n\n return Err(MtlError::NoMaterialError(i_line));\n\n }\n\n }\n\n }\n\n } else if line.starts_with(b\"Ns \") {\n\n let mut words = to_words_skip_empty(line);\n\n // skip \"map_Kd\"\n\n words.next().ok_or(MtlError::LineParse(i_line))?;\n\n if let Some(next_word) = words.next().and_then(|w| from_ascii(w)) {\n\n if let Some(mtl_name) = mtl_name.clone() {\n\n if let Some(mtl) = result.get_mut(&mtl_name) {\n\n mtl.specular_intensity = next_word;\n\n } else {\n", "file_path": "src/io/mtl.rs", "rank": 98, "score": 16.616580901777517 }, { "content": " .ok_or(PlyError::LineParse(*i_line))?,\n\n );\n\n continue;\n\n }\n\n }\n\n Some(_) => {}\n\n }\n\n\n\n match opt_n_faces {\n\n None => {\n\n if line.starts_with(b\"element face\") {\n\n read_state = HeaderReadState::Face;\n\n let mut words = to_words_skip_empty(line);\n\n opt_n_faces = Some(\n\n words\n\n .nth(2)\n\n .and_then(|w| from_ascii(w))\n\n .ok_or(PlyError::LineParse(*i_line))?,\n\n );\n\n continue;\n", "file_path": "src/io/ply/load.rs", "rank": 99, "score": 16.525480002635454 } ]
Rust
step09/src/virtio.rs
13696652781/RSCV
f07db4aad2a5758ceae92fd41cd6e8520d117ace
use crate::bus::*; use crate::cpu::*; use crate::trap::*; pub const VIRTIO_IRQ: u64 = 1; const VRING_DESC_SIZE: u64 = 16; const DESC_NUM: u64 = 8; pub const VIRTIO_MAGIC: u64 = VIRTIO_BASE + 0x000; pub const VIRTIO_VERSION: u64 = VIRTIO_BASE + 0x004; pub const VIRTIO_DEVICE_ID: u64 = VIRTIO_BASE + 0x008; pub const VIRTIO_VENDOR_ID: u64 = VIRTIO_BASE + 0x00c; pub const VIRTIO_DEVICE_FEATURES: u64 = VIRTIO_BASE + 0x010; pub const VIRTIO_DRIVER_FEATURES: u64 = VIRTIO_BASE + 0x020; pub const VIRTIO_GUEST_PAGE_SIZE: u64 = VIRTIO_BASE + 0x028; pub const VIRTIO_QUEUE_SEL: u64 = VIRTIO_BASE + 0x030; pub const VIRTIO_QUEUE_NUM_MAX: u64 = VIRTIO_BASE + 0x034; pub const VIRTIO_QUEUE_NUM: u64 = VIRTIO_BASE + 0x038; pub const VIRTIO_QUEUE_PFN: u64 = VIRTIO_BASE + 0x040; pub const VIRTIO_QUEUE_NOTIFY: u64 = VIRTIO_BASE + 0x050; pub const VIRTIO_STATUS: u64 = VIRTIO_BASE + 0x070; pub struct Virtio { id: u64, driver_features: u32, page_size: u32, queue_sel: u32, queue_num: u32, queue_pfn: u32, queue_notify: u32, status: u32, disk: Vec<u8>, } impl Device for Virtio { fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> { match size { 32 => Ok(self.load32(addr)), _ => Err(Exception::LoadAccessFault), } } fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> { match size { 32 => Ok(self.store32(addr, value)), _ => Err(Exception::StoreAMOAccessFault), } } } impl Virtio { pub fn new(disk_image: Vec<u8>) -> Self { let mut disk = Vec::new(); disk.extend(disk_image.iter().cloned()); Self { id: 0, driver_features: 0, page_size: 0, queue_sel: 0, queue_num: 0, queue_pfn: 0, queue_notify: 9999, status: 0, disk, } } pub fn is_interrupting(&mut self) -> bool { if self.queue_notify != 9999 { self.queue_notify = 9999; return true; } false } pub fn load32(&self, addr: u64) -> u64 { match addr { VIRTIO_MAGIC => 0x74726976, VIRTIO_VERSION => 0x1, VIRTIO_DEVICE_ID => 0x2, VIRTIO_VENDOR_ID => 0x554d4551, VIRTIO_DEVICE_FEATURES => 0, VIRTIO_DRIVER_FEATURES => self.driver_features as u64, VIRTIO_QUEUE_NUM_MAX => 8, VIRTIO_QUEUE_PFN => self.queue_pfn as u64, VIRTIO_STATUS => self.status as u64, _ => 0, } } pub fn store32(&mut self, addr: u64, value: u64) { let val = value as u32; match addr { VIRTIO_DEVICE_FEATURES => self.driver_features = val, VIRTIO_GUEST_PAGE_SIZE => self.page_size = val, VIRTIO_QUEUE_SEL => self.queue_sel = val, VIRTIO_QUEUE_NUM => self.queue_num = val, VIRTIO_QUEUE_PFN => self.queue_pfn = val, VIRTIO_QUEUE_NOTIFY => self.queue_notify = val, VIRTIO_STATUS => self.status = val, _ => {} } } fn get_new_id(&mut self) -> u64 { self.id = self.id.wrapping_add(1); self.id } fn desc_addr(&self) -> u64 { self.queue_pfn as u64 * self.page_size as u64 } fn read_disk(&self, addr: u64) -> u64 { self.disk[addr as usize] as u64 } fn write_disk(&mut self, addr: u64, value: u64) { self.disk[addr as usize] = value as u8 } pub fn disk_access(cpu: &mut Cpu) { let desc_addr = cpu.bus.virtio.desc_addr(); let avail_addr = cpu.bus.virtio.desc_addr() + 0x40; let used_addr = cpu.bus.virtio.desc_addr() + 4096; let offset = cpu .bus .load(avail_addr.wrapping_add(1), 16) .expect("failed to read offset"); let index = cpu .bus .load( avail_addr.wrapping_add(offset % DESC_NUM).wrapping_add(2), 16, ) .expect("failed to read index"); let desc_addr0 = desc_addr + VRING_DESC_SIZE * index; let addr0 = cpu .bus .load(desc_addr0, 64) .expect("failed to read an address field in a descriptor"); let next0 = cpu .bus .load(desc_addr0.wrapping_add(14), 16) .expect("failed to read a next field in a descripor"); let desc_addr1 = desc_addr + VRING_DESC_SIZE * next0; let addr1 = cpu .bus .load(desc_addr1, 64) .expect("failed to read an address field in a descriptor"); let len1 = cpu .bus .load(desc_addr1.wrapping_add(8), 32) .expect("failed to read a length field in a descriptor"); let flags1 = cpu .bus .load(desc_addr1.wrapping_add(12), 16) .expect("failed to read a flags field in a descriptor"); let blk_sector = cpu .bus .load(addr0.wrapping_add(8), 64) .expect("failed to read a sector field in a virtio_blk_outhdr"); match (flags1 & 2) == 0 { true => { for i in 0..len1 as u64 { let data = cpu .bus .load(addr1 + i, 8) .expect("failed to read from memory"); cpu.bus.virtio.write_disk(blk_sector * 512 + i, data); } } false => { for i in 0..len1 as u64 { let data = cpu.bus.virtio.read_disk(blk_sector * 512 + i); cpu.bus .store(addr1 + i, 8, data) .expect("failed to write to memory"); } } }; let new_id = cpu.bus.virtio.get_new_id(); cpu.bus .store(used_addr.wrapping_add(2), 16, new_id % 8) .expect("failed to write to memory"); } }
use crate::bus::*; use crate::cpu::*; use crate::trap::*; pub const VIRTIO_IRQ: u64 = 1; const VRING_DESC_SIZE: u64 = 16; const DESC_NUM: u64 = 8; pub const VIRTIO_MAGIC: u64 = VIRTIO_BASE + 0x000; pub const VIRTIO_VERSION: u64 = VIRTIO_BASE + 0x004; pub const VIRTIO_DEVICE_ID: u64 = VIRTIO_BASE + 0x008; pub const VIRTIO_VENDOR_ID: u64 = VIRTIO_BASE + 0x00c; pub const VIRTIO_DEVICE_FEATURES: u64 = VIRTIO_BASE + 0x010; pub const VIRTIO_DRIVER_FEATURES: u64 = VIRTIO_BASE + 0x020; pub const VIRTIO_GUEST_PAGE_SIZE: u64 = VIRTIO_BASE + 0x028; pub const VIRTIO_QUEUE_SEL: u64 = VIRTIO_BASE + 0x030; pub const VIRTIO_QUEUE_NUM_MAX: u64 = VIRTIO_BASE + 0x034; pub const VIRTIO_QUEUE_NUM: u64 = VIRTIO_BASE + 0x038; pub const VIRTIO_QUEUE_PFN: u64 = VIRTIO_BASE + 0x040; pub const VIRTIO_QUEUE_NOTIFY: u64 = VIRTIO_BASE + 0x050; pub const VIRTIO_STATUS: u64 = VIRTIO_BASE + 0x070; pub struct Virtio { id: u64, driver_features: u32, page_size: u32, queue_sel: u32, queue_num: u32, queue_pfn: u32, queue_notify: u32, status: u32, disk: Vec<u8>, } impl Device for Virtio { fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> { match size { 32 => Ok(self.load32(addr)), _ => Err(Exception::LoadAccessFault), } } fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {
} } impl Virtio { pub fn new(disk_image: Vec<u8>) -> Self { let mut disk = Vec::new(); disk.extend(disk_image.iter().cloned()); Self { id: 0, driver_features: 0, page_size: 0, queue_sel: 0, queue_num: 0, queue_pfn: 0, queue_notify: 9999, status: 0, disk, } } pub fn is_interrupting(&mut self) -> bool { if self.queue_notify != 9999 { self.queue_notify = 9999; return true; } false } pub fn load32(&self, addr: u64) -> u64 { match addr { VIRTIO_MAGIC => 0x74726976, VIRTIO_VERSION => 0x1, VIRTIO_DEVICE_ID => 0x2, VIRTIO_VENDOR_ID => 0x554d4551, VIRTIO_DEVICE_FEATURES => 0, VIRTIO_DRIVER_FEATURES => self.driver_features as u64, VIRTIO_QUEUE_NUM_MAX => 8, VIRTIO_QUEUE_PFN => self.queue_pfn as u64, VIRTIO_STATUS => self.status as u64, _ => 0, } } pub fn store32(&mut self, addr: u64, value: u64) { let val = value as u32; match addr { VIRTIO_DEVICE_FEATURES => self.driver_features = val, VIRTIO_GUEST_PAGE_SIZE => self.page_size = val, VIRTIO_QUEUE_SEL => self.queue_sel = val, VIRTIO_QUEUE_NUM => self.queue_num = val, VIRTIO_QUEUE_PFN => self.queue_pfn = val, VIRTIO_QUEUE_NOTIFY => self.queue_notify = val, VIRTIO_STATUS => self.status = val, _ => {} } } fn get_new_id(&mut self) -> u64 { self.id = self.id.wrapping_add(1); self.id } fn desc_addr(&self) -> u64 { self.queue_pfn as u64 * self.page_size as u64 } fn read_disk(&self, addr: u64) -> u64 { self.disk[addr as usize] as u64 } fn write_disk(&mut self, addr: u64, value: u64) { self.disk[addr as usize] = value as u8 } pub fn disk_access(cpu: &mut Cpu) { let desc_addr = cpu.bus.virtio.desc_addr(); let avail_addr = cpu.bus.virtio.desc_addr() + 0x40; let used_addr = cpu.bus.virtio.desc_addr() + 4096; let offset = cpu .bus .load(avail_addr.wrapping_add(1), 16) .expect("failed to read offset"); let index = cpu .bus .load( avail_addr.wrapping_add(offset % DESC_NUM).wrapping_add(2), 16, ) .expect("failed to read index"); let desc_addr0 = desc_addr + VRING_DESC_SIZE * index; let addr0 = cpu .bus .load(desc_addr0, 64) .expect("failed to read an address field in a descriptor"); let next0 = cpu .bus .load(desc_addr0.wrapping_add(14), 16) .expect("failed to read a next field in a descripor"); let desc_addr1 = desc_addr + VRING_DESC_SIZE * next0; let addr1 = cpu .bus .load(desc_addr1, 64) .expect("failed to read an address field in a descriptor"); let len1 = cpu .bus .load(desc_addr1.wrapping_add(8), 32) .expect("failed to read a length field in a descriptor"); let flags1 = cpu .bus .load(desc_addr1.wrapping_add(12), 16) .expect("failed to read a flags field in a descriptor"); let blk_sector = cpu .bus .load(addr0.wrapping_add(8), 64) .expect("failed to read a sector field in a virtio_blk_outhdr"); match (flags1 & 2) == 0 { true => { for i in 0..len1 as u64 { let data = cpu .bus .load(addr1 + i, 8) .expect("failed to read from memory"); cpu.bus.virtio.write_disk(blk_sector * 512 + i, data); } } false => { for i in 0..len1 as u64 { let data = cpu.bus.virtio.read_disk(blk_sector * 512 + i); cpu.bus .store(addr1 + i, 8, data) .expect("failed to write to memory"); } } }; let new_id = cpu.bus.virtio.get_new_id(); cpu.bus .store(used_addr.wrapping_add(2), 16, new_id % 8) .expect("failed to write to memory"); } }
match size { 32 => Ok(self.store32(addr, value)), _ => Err(Exception::StoreAMOAccessFault), }
if_condition
[ { "content": "fn main() -> io::Result<()> {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 2 {\n\n panic!(\"Usage: rvemu-for-book <filename>\");\n\n }\n\n let mut file = File::open(&args[1])?;\n\n let mut binary = Vec::new();\n\n file.read_to_end(&mut binary)?;\n\n\n\n let mut cpu = Cpu::new(binary);\n\n\n\n loop {\n\n // 1. Fetch.\n\n let inst = match cpu.fetch() {\n\n // Break the loop if an error occurs.\n\n Ok(inst) => inst,\n\n Err(exception) => {\n\n exception.take_trap(&mut cpu);\n\n if exception.is_fatal() {\n", "file_path": "step06/src/main.rs", "rank": 0, "score": 104393.75999448093 }, { "content": "fn main() -> io::Result<()> {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 2 {\n\n panic!(\"Usage: rvemu-for-book <filename>\");\n\n }\n\n let mut file = File::open(&args[1])?;\n\n let mut binary = Vec::new();\n\n file.read_to_end(&mut binary)?;\n\n\n\n let mut cpu = Cpu::new(binary);\n\n\n\n loop {\n\n // 1. Fetch.\n\n let inst = match cpu.fetch() {\n\n // Break the loop if an error occurs.\n\n Ok(inst) => inst,\n\n Err(exception) => {\n\n exception.take_trap(&mut cpu);\n\n if exception.is_fatal() {\n", "file_path": "step07/src/main.rs", "rank": 1, "score": 104393.75999448093 }, { "content": "fn main() -> io::Result<()> {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if (args.len() != 2) && (args.len() != 3) {\n\n panic!(\"Usage: rvemu-for-book <filename> <(option) image>\");\n\n }\n\n let mut file = File::open(&args[1])?;\n\n let mut binary = Vec::new();\n\n file.read_to_end(&mut binary)?;\n\n\n\n let mut disk_image = Vec::new();\n\n if args.len() == 3 {\n\n let mut file = File::open(&args[2])?;\n\n file.read_to_end(&mut disk_image)?;\n\n }\n\n\n\n let mut cpu = Cpu::new(binary, disk_image);\n\n\n\n loop {\n\n // 1. Fetch.\n", "file_path": "step10/src/main.rs", "rank": 2, "score": 104393.75999448093 }, { "content": "fn main() -> io::Result<()> {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 2 {\n\n panic!(\"Usage: rvemu-for-book <filename>\");\n\n }\n\n let mut file = File::open(&args[1])?;\n\n let mut binary = Vec::new();\n\n file.read_to_end(&mut binary)?;\n\n\n\n let mut cpu = Cpu::new(binary);\n\n\n\n loop {\n\n // 1. Fetch.\n\n let inst = match cpu.fetch() {\n\n // Break the loop if an error occurs.\n\n Ok(inst) => inst,\n\n Err(_) => break,\n\n };\n\n\n", "file_path": "step03/src/main.rs", "rank": 3, "score": 104393.75999448093 }, { "content": "fn main() -> io::Result<()> {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if (args.len() != 2) && (args.len() != 3) {\n\n panic!(\"Usage: rvemu-for-book <filename> <(option) image>\");\n\n }\n\n let mut file = File::open(&args[1])?;\n\n let mut binary = Vec::new();\n\n file.read_to_end(&mut binary)?;\n\n\n\n let mut disk_image = Vec::new();\n\n if args.len() == 3 {\n\n let mut file = File::open(&args[2])?;\n\n file.read_to_end(&mut disk_image)?;\n\n }\n\n\n\n let mut cpu = Cpu::new(binary, disk_image);\n\n\n\n loop {\n\n // 1. Fetch.\n", "file_path": "step09/src/main.rs", "rank": 4, "score": 104393.75999448093 }, { "content": "fn main() -> io::Result<()> {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 2 {\n\n panic!(\"Usage: rvemu-for-book <filename>\");\n\n }\n\n let mut file = File::open(&args[1])?;\n\n let mut binary = Vec::new();\n\n file.read_to_end(&mut binary)?;\n\n\n\n let mut cpu = Cpu::new(binary);\n\n\n\n loop {\n\n // 1. Fetch.\n\n let inst = match cpu.fetch() {\n\n // Break the loop if an error occurs.\n\n Ok(inst) => inst,\n\n Err(exception) => {\n\n exception.take_trap(&mut cpu);\n\n if exception.is_fatal() {\n", "file_path": "step05/src/main.rs", "rank": 5, "score": 104393.75999448093 }, { "content": "fn main() -> io::Result<()> {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 2 {\n\n panic!(\"Usage: rvemu-for-book <filename>\");\n\n }\n\n let mut file = File::open(&args[1])?;\n\n let mut binary = Vec::new();\n\n file.read_to_end(&mut binary)?;\n\n\n\n let mut cpu = Cpu::new(binary);\n\n\n\n while cpu.pc < cpu.memory.len() as u64 {\n\n // 1. Fetch.\n\n let inst = cpu.fetch();\n\n\n\n // 2. Add 4 to the program counter.\n\n cpu.pc += 4;\n\n\n\n // 3. Decode.\n\n // 4. Execute.\n\n cpu.execute(inst);\n\n }\n\n cpu.dump_registers();\n\n\n\n Ok(())\n\n}\n", "file_path": "step01/src/main.rs", "rank": 6, "score": 104393.75999448093 }, { "content": "fn main() -> io::Result<()> {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 2 {\n\n panic!(\"Usage: rvemu-for-book <filename>\");\n\n }\n\n let mut file = File::open(&args[1])?;\n\n let mut binary = Vec::new();\n\n file.read_to_end(&mut binary)?;\n\n\n\n let mut cpu = Cpu::new(binary);\n\n\n\n loop {\n\n // 1. Fetch.\n\n let inst = match cpu.fetch() {\n\n // Break the loop if an error occurs.\n\n Ok(inst) => inst,\n\n Err(exception) => {\n\n exception.take_trap(&mut cpu);\n\n if exception.is_fatal() {\n", "file_path": "step08/src/main.rs", "rank": 7, "score": 104393.75999448093 }, { "content": "fn main() -> io::Result<()> {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 2 {\n\n panic!(\"Usage: rvemu-for-book <filename>\");\n\n }\n\n let mut file = File::open(&args[1])?;\n\n let mut binary = Vec::new();\n\n file.read_to_end(&mut binary)?;\n\n\n\n let mut cpu = Cpu::new(binary);\n\n\n\n loop {\n\n // 1. Fetch.\n\n let inst = match cpu.fetch() {\n\n // Break the loop if an error occurs.\n\n Ok(inst) => inst,\n\n Err(_) => break,\n\n };\n\n\n", "file_path": "step02/src/main.rs", "rank": 8, "score": 104393.75999448093 }, { "content": "fn main() -> io::Result<()> {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 2 {\n\n panic!(\"Usage: rvemu-for-book <filename>\");\n\n }\n\n let mut file = File::open(&args[1])?;\n\n let mut binary = Vec::new();\n\n file.read_to_end(&mut binary)?;\n\n\n\n let mut cpu = Cpu::new(binary);\n\n\n\n loop {\n\n // 1. Fetch.\n\n let inst = match cpu.fetch() {\n\n // Break the loop if an error occurs.\n\n Ok(inst) => inst,\n\n Err(_) => break,\n\n };\n\n\n", "file_path": "step04/src/main.rs", "rank": 9, "score": 104393.75999448093 }, { "content": "pub trait Device {\n\n fn load(&self, addr: u64, size: u64) -> Result<u64, Exception>;\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception>;\n\n}\n\n\n\n/// The system bus.\n\npub struct Bus {\n\n clint: Clint,\n\n plic: Plic,\n\n memory: Memory,\n\n}\n\n\n\nimpl Bus {\n\n /// Create a new system bus object.\n\n pub fn new(binary: Vec<u8>) -> Bus {\n\n Self {\n\n clint: Clint::new(),\n\n plic: Plic::new(),\n\n memory: Memory::new(binary),\n\n }\n", "file_path": "step06/src/bus.rs", "rank": 10, "score": 93089.85741602186 }, { "content": "pub trait Device {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception>;\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception>;\n\n}\n\n\n\n/// The system bus.\n\npub struct Bus {\n\n clint: Clint,\n\n plic: Plic,\n\n pub uart: Uart,\n\n pub virtio: Virtio,\n\n memory: Memory,\n\n}\n\n\n\nimpl Bus {\n\n /// Create a new system bus object.\n\n pub fn new(binary: Vec<u8>, disk_image: Vec<u8>) -> Bus {\n\n Self {\n\n clint: Clint::new(),\n\n plic: Plic::new(),\n", "file_path": "step10/src/bus.rs", "rank": 11, "score": 93089.85741602186 }, { "content": "pub trait Device {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception>;\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception>;\n\n}\n\n\n\n/// The system bus.\n\npub struct Bus {\n\n clint: Clint,\n\n plic: Plic,\n\n pub uart: Uart,\n\n pub virtio: Virtio,\n\n memory: Memory,\n\n}\n\n\n\nimpl Bus {\n\n /// Create a new system bus object.\n\n pub fn new(binary: Vec<u8>, disk_image: Vec<u8>) -> Bus {\n\n Self {\n\n clint: Clint::new(),\n\n plic: Plic::new(),\n", "file_path": "step09/src/bus.rs", "rank": 12, "score": 93089.85741602186 }, { "content": "pub trait Device {\n\n fn load(&self, addr: u64, size: u64) -> Result<u64, Exception>;\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception>;\n\n}\n\n\n\n/// The system bus.\n\npub struct Bus {\n\n memory: Memory,\n\n}\n\n\n\nimpl Bus {\n\n /// Create a new system bus object.\n\n pub fn new(binary: Vec<u8>) -> Bus {\n\n Self {\n\n memory: Memory::new(binary),\n\n }\n\n }\n\n\n\n pub fn load(&self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n if MEMORY_BASE <= addr {\n", "file_path": "step05/src/bus.rs", "rank": 13, "score": 93089.85741602186 }, { "content": "pub trait Device {\n\n fn load(&self, addr: u64, size: u64) -> Result<u64, ()>;\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), ()>;\n\n}\n\n\n\n/// The system bus.\n\npub struct Bus {\n\n memory: Memory,\n\n}\n\n\n\nimpl Bus {\n\n /// Create a new system bus object.\n\n pub fn new(binary: Vec<u8>) -> Bus {\n\n Self {\n\n memory: Memory::new(binary),\n\n }\n\n }\n\n\n\n pub fn load(&self, addr: u64, size: u64) -> Result<u64, ()> {\n\n if MEMORY_BASE <= addr {\n", "file_path": "step04/src/bus.rs", "rank": 14, "score": 93089.85741602186 }, { "content": "pub trait Device {\n\n fn load(&self, addr: u64, size: u64) -> Result<u64, ()>;\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), ()>;\n\n}\n\n\n\n/// The system bus.\n\npub struct Bus {\n\n memory: Memory,\n\n}\n\n\n\nimpl Bus {\n\n /// Create a new system bus object.\n\n pub fn new(binary: Vec<u8>) -> Bus {\n\n Self {\n\n memory: Memory::new(binary),\n\n }\n\n }\n\n\n\n pub fn load(&self, addr: u64, size: u64) -> Result<u64, ()> {\n\n if MEMORY_BASE <= addr {\n", "file_path": "step03/src/bus.rs", "rank": 15, "score": 93089.85741602186 }, { "content": "pub trait Device {\n\n fn load(&self, addr: u64, size: u64) -> Result<u64, ()>;\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), ()>;\n\n}\n\n\n\n/// The system bus.\n\npub struct Bus {\n\n memory: Memory,\n\n}\n\n\n\nimpl Bus {\n\n /// Create a new system bus object.\n\n pub fn new(binary: Vec<u8>) -> Bus {\n\n Self {\n\n memory: Memory::new(binary),\n\n }\n\n }\n\n\n\n pub fn load(&self, addr: u64, size: u64) -> Result<u64, ()> {\n\n if MEMORY_BASE <= addr {\n", "file_path": "step02/src/bus.rs", "rank": 16, "score": 93089.85741602186 }, { "content": "pub trait Device {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception>;\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception>;\n\n}\n\n\n\n/// The system bus.\n\npub struct Bus {\n\n clint: Clint,\n\n plic: Plic,\n\n uart: Uart,\n\n memory: Memory,\n\n}\n\n\n\nimpl Bus {\n\n /// Create a new system bus object.\n\n pub fn new(binary: Vec<u8>) -> Bus {\n\n Self {\n\n clint: Clint::new(),\n\n plic: Plic::new(),\n\n uart: Uart::new(),\n", "file_path": "step07/src/bus.rs", "rank": 17, "score": 93089.85741602186 }, { "content": "pub trait Device {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception>;\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception>;\n\n}\n\n\n\n/// The system bus.\n\npub struct Bus {\n\n clint: Clint,\n\n plic: Plic,\n\n pub uart: Uart,\n\n memory: Memory,\n\n}\n\n\n\nimpl Bus {\n\n /// Create a new system bus object.\n\n pub fn new(binary: Vec<u8>) -> Bus {\n\n Self {\n\n clint: Clint::new(),\n\n plic: Plic::new(),\n\n uart: Uart::new(),\n", "file_path": "step08/src/bus.rs", "rank": 18, "score": 93089.85741602186 }, { "content": "/// The CPU to contain registers, a program coutner, and memory.\n\nstruct Cpu {\n\n /// 32 64-bit integer registers.\n\n regs: [u64; 32],\n\n /// Program counter to hold the the memory address of the next instruction that would be executed.\n\n pc: u64,\n\n /// Computer memory to store executable instructions.\n\n memory: Vec<u8>,\n\n}\n\n\n\nimpl Cpu {\n\n /// Create a new `Cpu` object.\n\n fn new(binary: Vec<u8>) -> Self {\n\n let mut regs = [0; 32];\n\n regs[2] = MEMORY_SIZE;\n\n\n\n Self {\n\n regs,\n\n pc: 0,\n\n memory: binary,\n\n }\n", "file_path": "step01/src/main.rs", "rank": 19, "score": 61046.844222439904 }, { "content": "/// The transfer of control to a trap handler caused by either an\n\n/// exception or an interrupt.\n\npub trait Trap {\n\n /// Returns an exception code that identifys the last exception.\n\n fn exception_code(&self) -> u64;\n\n /// Helper method for a trap handler.\n\n fn take_trap(&self, cpu: &mut Cpu) {\n\n let exception_pc = cpu.pc.wrapping_sub(4);\n\n let previous_mode = cpu.mode;\n\n\n\n let cause = self.exception_code();\n\n if (previous_mode <= Mode::Supervisor)\n\n && ((cpu.load_csr(MEDELEG).wrapping_shr(cause as u32)) & 1 != 0)\n\n {\n\n // Handle the trap in S-mode.\n\n cpu.mode = Mode::Supervisor;\n\n\n\n // Set the program counter to the supervisor trap-handler base address (stvec).\n\n cpu.pc = cpu.load_csr(STVEC) & !1;\n\n\n\n // 4.1.9 Supervisor Exception Program Counter (sepc)\n\n // \"The low bit of sepc (sepc[0]) is always zero.\"\n", "file_path": "step07/src/trap.rs", "rank": 20, "score": 55485.214292371296 }, { "content": "/// The transfer of control to a trap handler caused by either an\n\n/// exception or an interrupt.\n\npub trait Trap {\n\n /// Returns an exception code that identifys the last exception.\n\n fn exception_code(&self) -> u64;\n\n /// Trap handler.\n\n fn take_trap(&self, cpu: &mut Cpu);\n\n /// Helper method for a trap handler.\n\n fn take_trap_helper(&self, cpu: &mut Cpu, is_interrupt: bool) {\n\n let exception_pc = cpu.pc.wrapping_sub(4);\n\n let previous_mode = cpu.mode;\n\n\n\n let mut cause = self.exception_code();\n\n // Set an interrupt bit if a trap is an interrupt.\n\n if is_interrupt {\n\n cause = (1 << 63) | cause;\n\n }\n\n if (previous_mode <= Mode::Supervisor)\n\n && ((cpu.load_csr(MEDELEG).wrapping_shr(cause as u32)) & 1 != 0)\n\n {\n\n // Handle the trap in S-mode.\n\n cpu.mode = Mode::Supervisor;\n", "file_path": "step08/src/trap.rs", "rank": 21, "score": 55485.214292371296 }, { "content": "/// The transfer of control to a trap handler caused by either an\n\n/// exception or an interrupt.\n\npub trait Trap {\n\n /// Returns an exception code that identifys the last exception.\n\n fn exception_code(&self) -> u64;\n\n /// Trap handler.\n\n fn take_trap(&self, cpu: &mut Cpu);\n\n /// Helper method for a trap handler.\n\n fn take_trap_helper(&self, cpu: &mut Cpu, is_interrupt: bool) {\n\n let exception_pc = cpu.pc.wrapping_sub(4);\n\n let previous_mode = cpu.mode;\n\n\n\n let mut cause = self.exception_code();\n\n // Set an interrupt bit if a trap is an interrupt.\n\n if is_interrupt {\n\n cause = (1 << 63) | cause;\n\n }\n\n if (previous_mode <= Mode::Supervisor)\n\n && ((cpu.load_csr(MEDELEG).wrapping_shr(cause as u32)) & 1 != 0)\n\n {\n\n // Handle the trap in S-mode.\n\n cpu.mode = Mode::Supervisor;\n", "file_path": "step10/src/trap.rs", "rank": 22, "score": 55485.214292371296 }, { "content": "/// The transfer of control to a trap handler caused by either an\n\n/// exception or an interrupt.\n\npub trait Trap {\n\n /// Returns an exception code that identifys the last exception.\n\n fn exception_code(&self) -> u64;\n\n /// Helper method for a trap handler.\n\n fn take_trap(&self, cpu: &mut Cpu) {\n\n let exception_pc = cpu.pc.wrapping_sub(4);\n\n let previous_mode = cpu.mode;\n\n\n\n let cause = self.exception_code();\n\n if (previous_mode <= Mode::Supervisor)\n\n && ((cpu.load_csr(MEDELEG).wrapping_shr(cause as u32)) & 1 != 0)\n\n {\n\n // Handle the trap in S-mode.\n\n cpu.mode = Mode::Supervisor;\n\n\n\n // Set the program counter to the supervisor trap-handler base address (stvec).\n\n cpu.pc = cpu.load_csr(STVEC) & !1;\n\n\n\n // 4.1.9 Supervisor Exception Program Counter (sepc)\n\n // \"The low bit of sepc (sepc[0]) is always zero.\"\n", "file_path": "step05/src/trap.rs", "rank": 23, "score": 55485.214292371296 }, { "content": "/// The transfer of control to a trap handler caused by either an\n\n/// exception or an interrupt.\n\npub trait Trap {\n\n /// Returns an exception code that identifys the last exception.\n\n fn exception_code(&self) -> u64;\n\n /// Helper method for a trap handler.\n\n fn take_trap(&self, cpu: &mut Cpu) {\n\n let exception_pc = cpu.pc.wrapping_sub(4);\n\n let previous_mode = cpu.mode;\n\n\n\n let cause = self.exception_code();\n\n if (previous_mode <= Mode::Supervisor)\n\n && ((cpu.load_csr(MEDELEG).wrapping_shr(cause as u32)) & 1 != 0)\n\n {\n\n // Handle the trap in S-mode.\n\n cpu.mode = Mode::Supervisor;\n\n\n\n // Set the program counter to the supervisor trap-handler base address (stvec).\n\n cpu.pc = cpu.load_csr(STVEC) & !1;\n\n\n\n // 4.1.9 Supervisor Exception Program Counter (sepc)\n\n // \"The low bit of sepc (sepc[0]) is always zero.\"\n", "file_path": "step06/src/trap.rs", "rank": 24, "score": 55485.214292371296 }, { "content": "/// The transfer of control to a trap handler caused by either an\n\n/// exception or an interrupt.\n\npub trait Trap {\n\n /// Returns an exception code that identifys the last exception.\n\n fn exception_code(&self) -> u64;\n\n /// Trap handler.\n\n fn take_trap(&self, cpu: &mut Cpu);\n\n /// Helper method for a trap handler.\n\n fn take_trap_helper(&self, cpu: &mut Cpu, is_interrupt: bool) {\n\n let exception_pc = cpu.pc.wrapping_sub(4);\n\n let previous_mode = cpu.mode;\n\n\n\n let mut cause = self.exception_code();\n\n // Set an interrupt bit if a trap is an interrupt.\n\n if is_interrupt {\n\n cause = (1 << 63) | cause;\n\n }\n\n if (previous_mode <= Mode::Supervisor)\n\n && ((cpu.load_csr(MEDELEG).wrapping_shr(cause as u32)) & 1 != 0)\n\n {\n\n // Handle the trap in S-mode.\n\n cpu.mode = Mode::Supervisor;\n", "file_path": "step09/src/trap.rs", "rank": 25, "score": 55485.214292371296 }, { "content": " fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 32 => Ok(self.load32(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 32 => Ok(self.store32(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n\nimpl Virtio {\n\n /// Create a new virtio object.\n\n pub fn new(disk_image: Vec<u8>) -> Self {\n\n let mut disk = Vec::new();\n\n disk.extend(disk_image.iter().cloned());\n", "file_path": "step10/src/virtio.rs", "rank": 27, "score": 40237.222282289236 }, { "content": " pub fn store32(&mut self, addr: u64, value: u64) {\n\n let val = value as u32;\n\n match addr {\n\n VIRTIO_DEVICE_FEATURES => self.driver_features = val,\n\n VIRTIO_GUEST_PAGE_SIZE => self.page_size = val,\n\n VIRTIO_QUEUE_SEL => self.queue_sel = val,\n\n VIRTIO_QUEUE_NUM => self.queue_num = val,\n\n VIRTIO_QUEUE_PFN => self.queue_pfn = val,\n\n VIRTIO_QUEUE_NOTIFY => self.queue_notify = val,\n\n VIRTIO_STATUS => self.status = val,\n\n _ => {}\n\n }\n\n }\n\n\n\n fn get_new_id(&mut self) -> u64 {\n\n self.id = self.id.wrapping_add(1);\n\n self.id\n\n }\n\n\n\n fn desc_addr(&self) -> u64 {\n", "file_path": "step10/src/virtio.rs", "rank": 29, "score": 40226.75680733346 }, { "content": "pub const VIRTIO_QUEUE_NOTIFY: u64 = VIRTIO_BASE + 0x050;\n\n/// Device status, read and write. Reading from this register returns the current device status flags.\n\n/// Writing non-zero values to this register sets the status flags, indicating the OS/driver\n\n/// progress. Writing zero (0x0) to this register triggers a device reset.\n\npub const VIRTIO_STATUS: u64 = VIRTIO_BASE + 0x070;\n\n\n\n/// Paravirtualized drivers for IO virtualization.\n\npub struct Virtio {\n\n id: u64,\n\n driver_features: u32,\n\n page_size: u32,\n\n queue_sel: u32,\n\n queue_num: u32,\n\n queue_pfn: u32,\n\n queue_notify: u32,\n\n status: u32,\n\n disk: Vec<u8>,\n\n}\n\n\n\nimpl Device for Virtio {\n", "file_path": "step10/src/virtio.rs", "rank": 31, "score": 40225.52531242547 }, { "content": " self.queue_pfn as u64 * self.page_size as u64\n\n }\n\n\n\n fn read_disk(&self, addr: u64) -> u64 {\n\n self.disk[addr as usize] as u64\n\n }\n\n\n\n fn write_disk(&mut self, addr: u64, value: u64) {\n\n self.disk[addr as usize] = value as u8\n\n }\n\n\n\n /// Access the disk via virtio. This is an associated function which takes a `cpu` object to\n\n /// read and write with a memory directly (DMA).\n\n pub fn disk_access(cpu: &mut Cpu) {\n\n // See more information in\n\n // https://github.com/mit-pdos/xv6-riscv/blob/riscv/kernel/virtio_disk.c\n\n\n\n // the spec says that legacy block operations use three\n\n // descriptors: one for type/reserved/sector, one for\n\n // the data, one for a 1-byte status result.\n", "file_path": "step10/src/virtio.rs", "rank": 32, "score": 40221.41997652512 }, { "content": "pub const VIRTIO_VERSION: u64 = VIRTIO_BASE + 0x004;\n\n/// device type; 1 is net, 2 is disk.\n\npub const VIRTIO_DEVICE_ID: u64 = VIRTIO_BASE + 0x008;\n\n/// Always return 0x554d4551\n\npub const VIRTIO_VENDOR_ID: u64 = VIRTIO_BASE + 0x00c;\n\n/// Device features.\n\npub const VIRTIO_DEVICE_FEATURES: u64 = VIRTIO_BASE + 0x010;\n\n/// Driver features.\n\npub const VIRTIO_DRIVER_FEATURES: u64 = VIRTIO_BASE + 0x020;\n\n/// Page size for PFN, write-only.\n\npub const VIRTIO_GUEST_PAGE_SIZE: u64 = VIRTIO_BASE + 0x028;\n\n/// Select queue, write-only.\n\npub const VIRTIO_QUEUE_SEL: u64 = VIRTIO_BASE + 0x030;\n\n/// Max size of current queue, read-only. In QEMU, `VIRTIO_COUNT = 8`.\n\npub const VIRTIO_QUEUE_NUM_MAX: u64 = VIRTIO_BASE + 0x034;\n\n/// Size of current queue, write-only.\n\npub const VIRTIO_QUEUE_NUM: u64 = VIRTIO_BASE + 0x038;\n\n/// Physical page number for queue, read and write.\n\npub const VIRTIO_QUEUE_PFN: u64 = VIRTIO_BASE + 0x040;\n\n/// Notify the queue number, write-only.\n", "file_path": "step10/src/virtio.rs", "rank": 34, "score": 40217.81793432597 }, { "content": " false\n\n }\n\n\n\n /// Load 4 bytes from virtio only if the addr is valid. Otherwise, return 0.\n\n pub fn load32(&self, addr: u64) -> u64 {\n\n match addr {\n\n VIRTIO_MAGIC => 0x74726976,\n\n VIRTIO_VERSION => 0x1,\n\n VIRTIO_DEVICE_ID => 0x2,\n\n VIRTIO_VENDOR_ID => 0x554d4551,\n\n VIRTIO_DEVICE_FEATURES => 0, // TODO: what should it return?\n\n VIRTIO_DRIVER_FEATURES => self.driver_features as u64,\n\n VIRTIO_QUEUE_NUM_MAX => 8,\n\n VIRTIO_QUEUE_PFN => self.queue_pfn as u64,\n\n VIRTIO_STATUS => self.status as u64,\n\n _ => 0,\n\n }\n\n }\n\n\n\n /// Store 4 bytes to virtio only if the addr is valid. Otherwise, does nothing.\n", "file_path": "step10/src/virtio.rs", "rank": 37, "score": 40217.009345708655 }, { "content": "//! The virtio module contains a virtualization standard for network and disk device drivers.\n\n//! This is the \"legacy\" virtio interface.\n\n//!\n\n//! The virtio spec:\n\n//! https://docs.oasis-open.org/virtio/virtio/v1.1/virtio-v1.1.pdf\n\n\n\nuse crate::bus::*;\n\nuse crate::cpu::*;\n\nuse crate::trap::*;\n\n\n\n/// The interrupt request of virtio.\n\npub const VIRTIO_IRQ: u64 = 1;\n\n\n\nconst VRING_DESC_SIZE: u64 = 16;\n\n/// The number of virtio descriptors. It must be a power of two.\n\nconst DESC_NUM: u64 = 8;\n\n\n\n/// Always return 0x74726976.\n\npub const VIRTIO_MAGIC: u64 = VIRTIO_BASE + 0x000;\n\n/// The version. 1 is legacy.\n", "file_path": "step10/src/virtio.rs", "rank": 38, "score": 40215.5551568459 }, { "content": " }\n\n false => {\n\n // Read disk data and write it to memory directly (DMA).\n\n for i in 0..len1 as u64 {\n\n let data = cpu.bus.virtio.read_disk(blk_sector * 512 + i);\n\n cpu.bus\n\n .store(addr1 + i, 8, data)\n\n .expect(\"failed to write to memory\");\n\n }\n\n }\n\n };\n\n\n\n // Write id to `UsedArea`. Add 2 because of its structure.\n\n // struct UsedArea {\n\n // uint16 flags;\n\n // uint16 id;\n\n // struct VRingUsedElem elems[NUM];\n\n // };\n\n let new_id = cpu.bus.virtio.get_new_id();\n\n cpu.bus\n\n .store(used_addr.wrapping_add(2), 16, new_id % 8)\n\n .expect(\"failed to write to memory\");\n\n }\n\n}\n", "file_path": "step10/src/virtio.rs", "rank": 40, "score": 40211.47706527354 }, { "content": "\n\n Self {\n\n id: 0,\n\n driver_features: 0,\n\n page_size: 0,\n\n queue_sel: 0,\n\n queue_num: 0,\n\n queue_pfn: 0,\n\n queue_notify: 9999, // TODO: what is the correct initial value?\n\n status: 0,\n\n disk,\n\n }\n\n }\n\n\n\n /// Return true if an interrupt is pending.\n\n pub fn is_interrupting(&mut self) -> bool {\n\n if self.queue_notify != 9999 {\n\n self.queue_notify = 9999;\n\n return true;\n\n }\n", "file_path": "step10/src/virtio.rs", "rank": 42, "score": 40209.60768745074 }, { "content": "\n\n // desc = pages -- num * VRingDesc\n\n // avail = pages + 0x40 -- 2 * uint16, then num * uint16\n\n // used = pages + 4096 -- 2 * uint16, then num * vRingUsedElem\n\n let desc_addr = cpu.bus.virtio.desc_addr();\n\n let avail_addr = cpu.bus.virtio.desc_addr() + 0x40;\n\n let used_addr = cpu.bus.virtio.desc_addr() + 4096;\n\n\n\n // avail[0] is flags\n\n // avail[1] tells the device how far to look in avail[2...].\n\n let offset = cpu\n\n .bus\n\n .load(avail_addr.wrapping_add(1), 16)\n\n .expect(\"failed to read offset\");\n\n // avail[2...] are desc[] indices the device should process.\n\n // we only tell device the first index in our chain of descriptors.\n\n let index = cpu\n\n .bus\n\n .load(\n\n avail_addr.wrapping_add(offset % DESC_NUM).wrapping_add(2),\n", "file_path": "step10/src/virtio.rs", "rank": 45, "score": 40205.706693824606 }, { "content": " // uint32 type;\n\n // uint32 reserved;\n\n // uint64 sector;\n\n // } buf0;\n\n let blk_sector = cpu\n\n .bus\n\n .load(addr0.wrapping_add(8), 64)\n\n .expect(\"failed to read a sector field in a virtio_blk_outhdr\");\n\n\n\n // Write to a device if the second bit `flag1` is set.\n\n match (flags1 & 2) == 0 {\n\n true => {\n\n // Read memory data and write it to a disk directly (DMA).\n\n for i in 0..len1 as u64 {\n\n let data = cpu\n\n .bus\n\n .load(addr1 + i, 8)\n\n .expect(\"failed to read from memory\");\n\n cpu.bus.virtio.write_disk(blk_sector * 512 + i, data);\n\n }\n", "file_path": "step10/src/virtio.rs", "rank": 47, "score": 40204.97645475709 }, { "content": " 16,\n\n )\n\n .expect(\"failed to read index\");\n\n\n\n // Read `VRingDesc`, virtio descriptors.\n\n let desc_addr0 = desc_addr + VRING_DESC_SIZE * index;\n\n let addr0 = cpu\n\n .bus\n\n .load(desc_addr0, 64)\n\n .expect(\"failed to read an address field in a descriptor\");\n\n // Add 14 because of `VRingDesc` structure.\n\n // struct VRingDesc {\n\n // uint64 addr;\n\n // uint32 len;\n\n // uint16 flags;\n\n // uint16 next\n\n // };\n\n // The `next` field can be accessed by offset 14 (8 + 4 + 2) bytes.\n\n let next0 = cpu\n\n .bus\n", "file_path": "step10/src/virtio.rs", "rank": 49, "score": 40197.841330413285 }, { "content": " .load(desc_addr0.wrapping_add(14), 16)\n\n .expect(\"failed to read a next field in a descripor\");\n\n\n\n // Read `VRingDesc` again, virtio descriptors.\n\n let desc_addr1 = desc_addr + VRING_DESC_SIZE * next0;\n\n let addr1 = cpu\n\n .bus\n\n .load(desc_addr1, 64)\n\n .expect(\"failed to read an address field in a descriptor\");\n\n let len1 = cpu\n\n .bus\n\n .load(desc_addr1.wrapping_add(8), 32)\n\n .expect(\"failed to read a length field in a descriptor\");\n\n let flags1 = cpu\n\n .bus\n\n .load(desc_addr1.wrapping_add(12), 16)\n\n .expect(\"failed to read a flags field in a descriptor\");\n\n\n\n // Read `virtio_blk_outhdr`. Add 8 because of its structure.\n\n // struct virtio_blk_outhdr {\n", "file_path": "step10/src/virtio.rs", "rank": 51, "score": 40197.5243348636 }, { "content": "impl Device for Clint {\n\n fn load(&self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 64 => Ok(self.load64(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n\nimpl Clint {\n\n /// Create a new `Clint` object.\n\n pub fn new() -> Self {\n\n Self {\n", "file_path": "step06/src/clint.rs", "rank": 52, "score": 47.71070694758223 }, { "content": "impl Device for Clint {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 64 => Ok(self.load64(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n\nimpl Clint {\n\n /// Create a new `Clint` object.\n\n pub fn new() -> Self {\n\n Self {\n", "file_path": "step08/src/clint.rs", "rank": 53, "score": 47.548470694029625 }, { "content": "impl Device for Clint {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 64 => Ok(self.load64(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n\nimpl Clint {\n\n /// Create a new `Clint` object.\n\n pub fn new() -> Self {\n\n Self {\n", "file_path": "step09/src/clint.rs", "rank": 54, "score": 47.54847069402964 }, { "content": "impl Device for Clint {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 64 => Ok(self.load64(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n\nimpl Clint {\n\n /// Create a new `Clint` object.\n\n pub fn new() -> Self {\n\n Self {\n", "file_path": "step10/src/clint.rs", "rank": 55, "score": 47.54847069402963 }, { "content": "impl Device for Clint {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 64 => Ok(self.load64(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n\nimpl Clint {\n\n /// Create a new `Clint` object.\n\n pub fn new() -> Self {\n\n Self {\n", "file_path": "step07/src/clint.rs", "rank": 56, "score": 47.54847069402963 }, { "content": "pub struct Uart {\n\n /// Pair of an array for UART buffer and a conditional variable.\n\n uart: Arc<(Mutex<[u8; UART_SIZE as usize]>, Condvar)>,\n\n /// Bit if an interrupt happens.\n\n interrupting: Arc<AtomicBool>,\n\n}\n\n\n\nimpl Device for Uart {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n", "file_path": "step08/src/uart.rs", "rank": 57, "score": 40.40137951543964 }, { "content": "pub struct Uart {\n\n /// Pair of an array for UART buffer and a conditional variable.\n\n uart: Arc<(Mutex<[u8; UART_SIZE as usize]>, Condvar)>,\n\n /// Bit if an interrupt happens.\n\n interrupting: Arc<AtomicBool>,\n\n}\n\n\n\nimpl Device for Uart {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n", "file_path": "step10/src/uart.rs", "rank": 58, "score": 40.40137951543964 }, { "content": "pub struct Uart {\n\n /// Pair of an array for UART buffer and a conditional variable.\n\n uart: Arc<(Mutex<[u8; UART_SIZE as usize]>, Condvar)>,\n\n /// Bit if an interrupt happens.\n\n interrupting: Arc<AtomicBool>,\n\n}\n\n\n\nimpl Device for Uart {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n", "file_path": "step09/src/uart.rs", "rank": 59, "score": 40.40137951543964 }, { "content": " senable: u64,\n\n spriority: u64,\n\n sclaim: u64,\n\n}\n\n\n\nimpl Device for Plic {\n\n fn load(&self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 32 => Ok(self.load32(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 32 => Ok(self.store32(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n", "file_path": "step06/src/plic.rs", "rank": 60, "score": 39.697289755172484 }, { "content": " senable: u64,\n\n spriority: u64,\n\n sclaim: u64,\n\n}\n\n\n\nimpl Device for Plic {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 32 => Ok(self.load32(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 32 => Ok(self.store32(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n", "file_path": "step07/src/plic.rs", "rank": 61, "score": 39.566843443417504 }, { "content": " senable: u64,\n\n spriority: u64,\n\n sclaim: u64,\n\n}\n\n\n\nimpl Device for Plic {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 32 => Ok(self.load32(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 32 => Ok(self.store32(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n", "file_path": "step09/src/plic.rs", "rank": 62, "score": 39.566843443417504 }, { "content": " senable: u64,\n\n spriority: u64,\n\n sclaim: u64,\n\n}\n\n\n\nimpl Device for Plic {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 32 => Ok(self.load32(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 32 => Ok(self.store32(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n", "file_path": "step08/src/plic.rs", "rank": 63, "score": 39.5668434434175 }, { "content": " senable: u64,\n\n spriority: u64,\n\n sclaim: u64,\n\n}\n\n\n\nimpl Device for Plic {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 32 => Ok(self.load32(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 32 => Ok(self.store32(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n", "file_path": "step10/src/plic.rs", "rank": 64, "score": 39.5668434434175 }, { "content": " /// Bit if an interrupt happens.\n\n interrupting: Arc<AtomicBool>,\n\n}\n\n\n\nimpl Device for Uart {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n", "file_path": "step07/src/uart.rs", "rank": 65, "score": 39.22528712780178 }, { "content": "//! The memory module contains a memory structure and implementation for memory access.\n\n\n\nuse crate::bus::*;\n\nuse crate::trap::*;\n\n\n\n/// Default memory size (128MiB).\n\npub const MEMORY_SIZE: u64 = 1024 * 1024 * 128;\n\n\n\n/// The dynamic random access memory (DRAM).\n\n#[derive(Debug)]\n\npub struct Memory {\n\n pub memory: Vec<u8>,\n\n}\n\n\n\nimpl Device for Memory {\n\n fn load(&self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n 16 => Ok(self.load16(addr)),\n\n 32 => Ok(self.load32(addr)),\n", "file_path": "step05/src/memory.rs", "rank": 66, "score": 39.100729737377506 }, { "content": "//! The memory module contains a memory structure and implementation for memory access.\n\n\n\nuse crate::bus::*;\n\nuse crate::trap::*;\n\n\n\n/// Default memory size (128MiB).\n\npub const MEMORY_SIZE: u64 = 1024 * 1024 * 128;\n\n\n\n/// The dynamic random access memory (DRAM).\n\n#[derive(Debug)]\n\npub struct Memory {\n\n pub memory: Vec<u8>,\n\n}\n\n\n\nimpl Device for Memory {\n\n fn load(&self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n 16 => Ok(self.load16(addr)),\n\n 32 => Ok(self.load32(addr)),\n", "file_path": "step06/src/memory.rs", "rank": 67, "score": 39.10072973737751 }, { "content": "//! The memory module contains a memory structure and implementation for memory access.\n\n\n\nuse crate::bus::*;\n\nuse crate::trap::*;\n\n\n\n/// Default memory size (128MiB).\n\npub const MEMORY_SIZE: u64 = 1024 * 1024 * 128;\n\n\n\n/// The dynamic random access memory (DRAM).\n\n#[derive(Debug)]\n\npub struct Memory {\n\n pub memory: Vec<u8>,\n\n}\n\n\n\nimpl Device for Memory {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n 16 => Ok(self.load16(addr)),\n\n 32 => Ok(self.load32(addr)),\n", "file_path": "step08/src/memory.rs", "rank": 68, "score": 38.94906626992906 }, { "content": "//! The memory module contains a memory structure and implementation for memory access.\n\n\n\nuse crate::bus::*;\n\nuse crate::trap::*;\n\n\n\n/// Default memory size (128MiB).\n\npub const MEMORY_SIZE: u64 = 1024 * 1024 * 128;\n\n\n\n/// The dynamic random access memory (DRAM).\n\n#[derive(Debug)]\n\npub struct Memory {\n\n pub memory: Vec<u8>,\n\n}\n\n\n\nimpl Device for Memory {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n 16 => Ok(self.load16(addr)),\n\n 32 => Ok(self.load32(addr)),\n", "file_path": "step10/src/memory.rs", "rank": 69, "score": 38.94906626992906 }, { "content": "//! The memory module contains a memory structure and implementation for memory access.\n\n\n\nuse crate::bus::*;\n\nuse crate::trap::*;\n\n\n\n/// Default memory size (128MiB).\n\npub const MEMORY_SIZE: u64 = 1024 * 1024 * 128;\n\n\n\n/// The dynamic random access memory (DRAM).\n\n#[derive(Debug)]\n\npub struct Memory {\n\n pub memory: Vec<u8>,\n\n}\n\n\n\nimpl Device for Memory {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n 16 => Ok(self.load16(addr)),\n\n 32 => Ok(self.load32(addr)),\n", "file_path": "step07/src/memory.rs", "rank": 70, "score": 38.94906626992906 }, { "content": "//! The memory module contains a memory structure and implementation for memory access.\n\n\n\nuse crate::bus::*;\n\nuse crate::trap::*;\n\n\n\n/// Default memory size (128MiB).\n\npub const MEMORY_SIZE: u64 = 1024 * 1024 * 128;\n\n\n\n/// The dynamic random access memory (DRAM).\n\n#[derive(Debug)]\n\npub struct Memory {\n\n pub memory: Vec<u8>,\n\n}\n\n\n\nimpl Device for Memory {\n\n fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n 16 => Ok(self.load16(addr)),\n\n 32 => Ok(self.load32(addr)),\n", "file_path": "step09/src/memory.rs", "rank": 71, "score": 38.94906626992906 }, { "content": " pub fn store_csr(&mut self, addr: usize, value: u64) {\n\n match addr {\n\n SIE => {\n\n self.csrs[MIE] =\n\n (self.csrs[MIE] & !self.csrs[MIDELEG]) | (value & self.csrs[MIDELEG]);\n\n }\n\n _ => self.csrs[addr] = value,\n\n }\n\n }\n\n\n\n /// Load a value from a memory.\n\n pub fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n self.bus.load(addr, size)\n\n }\n\n\n\n /// Store a value to a memory.\n\n pub fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n self.bus.store(addr, size, value)\n\n }\n\n\n", "file_path": "step08/src/cpu.rs", "rank": 72, "score": 36.35398844934246 }, { "content": " 64 => Ok(self.load64(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n 16 => Ok(self.store16(addr, value)),\n\n 32 => Ok(self.store32(addr, value)),\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n\nimpl Memory {\n\n /// Create a new `Memory` object with default memory size.\n\n pub fn new(binary: Vec<u8>) -> Memory {\n\n let mut memory = vec![0; MEMORY_SIZE as usize];\n", "file_path": "step05/src/memory.rs", "rank": 73, "score": 36.1478578601221 }, { "content": " 64 => Ok(self.load64(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n 16 => Ok(self.store16(addr, value)),\n\n 32 => Ok(self.store32(addr, value)),\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n\nimpl Memory {\n\n /// Create a new `Memory` object with default memory size.\n\n pub fn new(binary: Vec<u8>) -> Memory {\n\n let mut memory = vec![0; MEMORY_SIZE as usize];\n", "file_path": "step06/src/memory.rs", "rank": 74, "score": 36.147857860122095 }, { "content": " 64 => Ok(self.load64(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n 16 => Ok(self.store16(addr, value)),\n\n 32 => Ok(self.store32(addr, value)),\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n\nimpl Memory {\n\n /// Create a new `Memory` object with default memory size.\n\n pub fn new(binary: Vec<u8>) -> Memory {\n\n let mut memory = vec![0; MEMORY_SIZE as usize];\n", "file_path": "step08/src/memory.rs", "rank": 75, "score": 36.1478578601221 }, { "content": " 64 => Ok(self.load64(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n 16 => Ok(self.store16(addr, value)),\n\n 32 => Ok(self.store32(addr, value)),\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n\nimpl Memory {\n\n /// Create a new `Memory` object with default memory size.\n\n pub fn new(binary: Vec<u8>) -> Memory {\n\n let mut memory = vec![0; MEMORY_SIZE as usize];\n", "file_path": "step09/src/memory.rs", "rank": 76, "score": 36.1478578601221 }, { "content": " 64 => Ok(self.load64(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n 16 => Ok(self.store16(addr, value)),\n\n 32 => Ok(self.store32(addr, value)),\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n\nimpl Memory {\n\n /// Create a new `Memory` object with default memory size.\n\n pub fn new(binary: Vec<u8>) -> Memory {\n\n let mut memory = vec![0; MEMORY_SIZE as usize];\n", "file_path": "step10/src/memory.rs", "rank": 77, "score": 36.147857860122095 }, { "content": " 64 => Ok(self.load64(addr)),\n\n _ => Err(Exception::LoadAccessFault),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n 16 => Ok(self.store16(addr, value)),\n\n 32 => Ok(self.store32(addr, value)),\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(Exception::StoreAMOAccessFault),\n\n }\n\n }\n\n}\n\n\n\nimpl Memory {\n\n /// Create a new `Memory` object with default memory size.\n\n pub fn new(binary: Vec<u8>) -> Memory {\n\n let mut memory = vec![0; MEMORY_SIZE as usize];\n", "file_path": "step07/src/memory.rs", "rank": 78, "score": 36.147857860122095 }, { "content": "//! The memory module contains a memory structure and implementation for memory access.\n\n\n\nuse crate::bus::*;\n\n\n\n/// Default memory size (128MiB).\n\npub const MEMORY_SIZE: u64 = 1024 * 1024 * 128;\n\n\n\n/// The dynamic random access memory (DRAM).\n\n#[derive(Debug)]\n\npub struct Memory {\n\n pub memory: Vec<u8>,\n\n}\n\n\n\nimpl Device for Memory {\n\n fn load(&self, addr: u64, size: u64) -> Result<u64, ()> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n 16 => Ok(self.load16(addr)),\n\n 32 => Ok(self.load32(addr)),\n\n 64 => Ok(self.load64(addr)),\n", "file_path": "step02/src/memory.rs", "rank": 79, "score": 35.316985180086974 }, { "content": "//! The memory module contains a memory structure and implementation for memory access.\n\n\n\nuse crate::bus::*;\n\n\n\n/// Default memory size (128MiB).\n\npub const MEMORY_SIZE: u64 = 1024 * 1024 * 128;\n\n\n\n/// The dynamic random access memory (DRAM).\n\n#[derive(Debug)]\n\npub struct Memory {\n\n pub memory: Vec<u8>,\n\n}\n\n\n\nimpl Device for Memory {\n\n fn load(&self, addr: u64, size: u64) -> Result<u64, ()> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n 16 => Ok(self.load16(addr)),\n\n 32 => Ok(self.load32(addr)),\n\n 64 => Ok(self.load64(addr)),\n", "file_path": "step04/src/memory.rs", "rank": 80, "score": 35.316985180086974 }, { "content": "//! The memory module contains a memory structure and implementation for memory access.\n\n\n\nuse crate::bus::*;\n\n\n\n/// Default memory size (128MiB).\n\npub const MEMORY_SIZE: u64 = 1024 * 1024 * 128;\n\n\n\n/// The dynamic random access memory (DRAM).\n\n#[derive(Debug)]\n\npub struct Memory {\n\n pub memory: Vec<u8>,\n\n}\n\n\n\nimpl Device for Memory {\n\n fn load(&self, addr: u64, size: u64) -> Result<u64, ()> {\n\n match size {\n\n 8 => Ok(self.load8(addr)),\n\n 16 => Ok(self.load16(addr)),\n\n 32 => Ok(self.load32(addr)),\n\n 64 => Ok(self.load64(addr)),\n", "file_path": "step03/src/memory.rs", "rank": 81, "score": 35.31698518008698 }, { "content": " return self.memory.load(addr, size);\n\n }\n\n Err(Exception::LoadAccessFault)\n\n }\n\n\n\n pub fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n if CLINT_BASE <= addr && addr < CLINT_BASE + CLINT_SIZE {\n\n return self.clint.store(addr, size, value);\n\n }\n\n if PLIC_BASE <= addr && addr < PLIC_BASE + PLIC_SIZE {\n\n return self.plic.store(addr, size, value);\n\n }\n\n if UART_BASE <= addr && addr < UART_BASE + UART_SIZE {\n\n return self.uart.store(addr, size, value);\n\n }\n\n if VIRTIO_BASE <= addr && addr < VIRTIO_BASE + VIRTIO_SIZE {\n\n return self.virtio.store(addr, size, value);\n\n }\n\n if MEMORY_BASE <= addr {\n\n return self.memory.store(addr, size, value);\n\n }\n\n Err(Exception::StoreAMOAccessFault)\n\n }\n\n}\n", "file_path": "step09/src/bus.rs", "rank": 82, "score": 34.54944104027901 }, { "content": " return self.memory.load(addr, size);\n\n }\n\n Err(Exception::LoadAccessFault)\n\n }\n\n\n\n pub fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n if CLINT_BASE <= addr && addr < CLINT_BASE + CLINT_SIZE {\n\n return self.clint.store(addr, size, value);\n\n }\n\n if PLIC_BASE <= addr && addr < PLIC_BASE + PLIC_SIZE {\n\n return self.plic.store(addr, size, value);\n\n }\n\n if UART_BASE <= addr && addr < UART_BASE + UART_SIZE {\n\n return self.uart.store(addr, size, value);\n\n }\n\n if VIRTIO_BASE <= addr && addr < VIRTIO_BASE + VIRTIO_SIZE {\n\n return self.virtio.store(addr, size, value);\n\n }\n\n if MEMORY_BASE <= addr {\n\n return self.memory.store(addr, size, value);\n\n }\n\n Err(Exception::StoreAMOAccessFault)\n\n }\n\n}\n", "file_path": "step10/src/bus.rs", "rank": 83, "score": 34.54944104027901 }, { "content": " SIE => {\n\n self.csrs[MIE] =\n\n (self.csrs[MIE] & !self.csrs[MIDELEG]) | (value & self.csrs[MIDELEG]);\n\n }\n\n _ => self.csrs[addr] = value,\n\n }\n\n }\n\n\n\n /// Load a value from a memory.\n\n pub fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n self.bus.load(addr, size)\n\n }\n\n\n\n /// Store a value to a memory.\n\n pub fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n self.bus.store(addr, size, value)\n\n }\n\n\n\n /// Get an instruction from the memory.\n\n pub fn fetch(&mut self) -> Result<u64, Exception> {\n", "file_path": "step07/src/cpu.rs", "rank": 84, "score": 34.086090623295455 }, { "content": " SIE => {\n\n self.csrs[MIE] =\n\n (self.csrs[MIE] & !self.csrs[MIDELEG]) | (value & self.csrs[MIDELEG]);\n\n }\n\n _ => self.csrs[addr] = value,\n\n }\n\n }\n\n\n\n /// Load a value from a memory.\n\n pub fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n self.bus.load(addr, size)\n\n }\n\n\n\n /// Store a value to a memory.\n\n pub fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n self.bus.store(addr, size, value)\n\n }\n\n\n\n /// Get an instruction from the memory.\n\n pub fn fetch(&mut self) -> Result<u64, Exception> {\n", "file_path": "step05/src/cpu.rs", "rank": 85, "score": 34.086090623295455 }, { "content": " SIE => {\n\n self.csrs[MIE] =\n\n (self.csrs[MIE] & !self.csrs[MIDELEG]) | (value & self.csrs[MIDELEG]);\n\n }\n\n _ => self.csrs[addr] = value,\n\n }\n\n }\n\n\n\n /// Load a value from a memory.\n\n pub fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n self.bus.load(addr, size)\n\n }\n\n\n\n /// Store a value to a memory.\n\n pub fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n self.bus.store(addr, size, value)\n\n }\n\n\n\n /// Get an instruction from the memory.\n\n pub fn fetch(&mut self) -> Result<u64, Exception> {\n", "file_path": "step06/src/cpu.rs", "rank": 86, "score": 34.08609062329546 }, { "content": "\n\n /// Store a value to a memory.\n\n pub fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n let p_addr = self.translate(addr, AccessType::Store)?;\n\n self.bus.store(p_addr, size, value)\n\n }\n\n\n\n /// Get an instruction from the memory.\n\n pub fn fetch(&mut self) -> Result<u64, Exception> {\n\n let p_pc = self.translate(self.pc, AccessType::Instruction)?;\n\n match self.bus.load(p_pc, 32) {\n\n Ok(inst) => Ok(inst),\n\n Err(_e) => Err(Exception::InstructionAccessFault),\n\n }\n\n }\n\n\n\n /// Execute an instruction after decoding. Return true if an error happens, otherwise false.\n\n pub fn execute(&mut self, inst: u64) -> Result<(), Exception> {\n\n let opcode = inst & 0x0000007f;\n\n let rd = ((inst & 0x00000f80) >> 7) as usize;\n", "file_path": "step10/src/cpu.rs", "rank": 87, "score": 33.696420600009496 }, { "content": " _ => self.csrs[addr],\n\n }\n\n }\n\n\n\n /// Store a value to a CSR.\n\n pub fn store_csr(&mut self, addr: usize, value: u64) {\n\n match addr {\n\n SIE => {\n\n self.csrs[MIE] =\n\n (self.csrs[MIE] & !self.csrs[MIDELEG]) | (value & self.csrs[MIDELEG]);\n\n }\n\n _ => self.csrs[addr] = value,\n\n }\n\n }\n\n\n\n /// Load a value from a memory.\n\n pub fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n let p_addr = self.translate(addr, AccessType::Load)?;\n\n self.bus.load(p_addr, size)\n\n }\n", "file_path": "step10/src/cpu.rs", "rank": 88, "score": 33.37439635884545 }, { "content": " SIE => self.csrs[MIE] & self.csrs[MIDELEG],\n\n _ => self.csrs[addr],\n\n }\n\n }\n\n\n\n /// Store a value to a CSR.\n\n pub fn store_csr(&mut self, addr: usize, value: u64) {\n\n match addr {\n\n SIE => {\n\n self.csrs[MIE] =\n\n (self.csrs[MIE] & !self.csrs[MIDELEG]) | (value & self.csrs[MIDELEG]);\n\n }\n\n _ => self.csrs[addr] = value,\n\n }\n\n }\n\n\n\n /// Load a value from a memory.\n\n pub fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n self.bus.load(addr, size)\n\n }\n", "file_path": "step09/src/cpu.rs", "rank": 89, "score": 33.32426821463284 }, { "content": "\n\n /// Store a value to a memory.\n\n pub fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n self.bus.store(addr, size, value)\n\n }\n\n\n\n /// Get an instruction from the memory.\n\n pub fn fetch(&mut self) -> Result<u64, Exception> {\n\n match self.bus.load(self.pc, 32) {\n\n Ok(inst) => Ok(inst),\n\n Err(_e) => Err(Exception::InstructionAccessFault),\n\n }\n\n }\n\n\n\n /// Execute an instruction after decoding. Return true if an error happens, otherwise false.\n\n pub fn execute(&mut self, inst: u64) -> Result<(), Exception> {\n\n let opcode = inst & 0x0000007f;\n\n let rd = ((inst & 0x00000f80) >> 7) as usize;\n\n let rs1 = ((inst & 0x000f8000) >> 15) as usize;\n\n let rs2 = ((inst & 0x01f00000) >> 20) as usize;\n", "file_path": "step09/src/cpu.rs", "rank": 90, "score": 33.23179024771652 }, { "content": " self.csrs[MIE] =\n\n (self.csrs[MIE] & !self.csrs[MIDELEG]) | (value & self.csrs[MIDELEG]);\n\n }\n\n _ => self.csrs[addr] = value,\n\n }\n\n }\n\n\n\n /// Load a value from a memory.\n\n pub fn load(&mut self, addr: u64, size: u64) -> Result<u64, ()> {\n\n self.bus.load(addr, size)\n\n }\n\n\n\n /// Store a value to a memory.\n\n pub fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), ()> {\n\n self.bus.store(addr, size, value)\n\n }\n\n\n\n /// Get an instruction from the memory.\n\n pub fn fetch(&mut self) -> Result<u64, ()> {\n\n match self.bus.load(self.pc, 32) {\n", "file_path": "step04/src/cpu.rs", "rank": 91, "score": 32.575272332270806 }, { "content": " println!(\"{}\", output);\n\n }\n\n\n\n /// Load a value from a memory.\n\n pub fn load(&mut self, addr: u64, size: u64) -> Result<u64, ()> {\n\n self.bus.load(addr, size)\n\n }\n\n\n\n /// Store a value to a memory.\n\n pub fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), ()> {\n\n self.bus.store(addr, size, value)\n\n }\n\n\n\n /// Get an instruction from the memory.\n\n pub fn fetch(&mut self) -> Result<u64, ()> {\n\n match self.bus.load(self.pc, 32) {\n\n Ok(inst) => Ok(inst),\n\n Err(_e) => Err(()),\n\n }\n\n }\n", "file_path": "step02/src/cpu.rs", "rank": 92, "score": 32.38509771067909 }, { "content": " _ => Err(()),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), ()> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n 16 => Ok(self.store16(addr, value)),\n\n 32 => Ok(self.store32(addr, value)),\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(()),\n\n }\n\n }\n\n}\n\n\n\nimpl Memory {\n\n /// Create a new `Memory` object with default memory size.\n\n pub fn new(binary: Vec<u8>) -> Memory {\n\n let mut memory = vec![0; MEMORY_SIZE as usize];\n\n memory.splice(..binary.len(), binary.iter().cloned());\n", "file_path": "step04/src/memory.rs", "rank": 93, "score": 32.211373280605514 }, { "content": " _ => Err(()),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), ()> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n 16 => Ok(self.store16(addr, value)),\n\n 32 => Ok(self.store32(addr, value)),\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(()),\n\n }\n\n }\n\n}\n\n\n\nimpl Memory {\n\n /// Create a new `Memory` object with default memory size.\n\n pub fn new(binary: Vec<u8>) -> Memory {\n\n let mut memory = vec![0; MEMORY_SIZE as usize];\n\n memory.splice(..binary.len(), binary.iter().cloned());\n", "file_path": "step03/src/memory.rs", "rank": 94, "score": 32.211373280605514 }, { "content": " _ => Err(()),\n\n }\n\n }\n\n\n\n fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), ()> {\n\n match size {\n\n 8 => Ok(self.store8(addr, value)),\n\n 16 => Ok(self.store16(addr, value)),\n\n 32 => Ok(self.store32(addr, value)),\n\n 64 => Ok(self.store64(addr, value)),\n\n _ => Err(()),\n\n }\n\n }\n\n}\n\n\n\nimpl Memory {\n\n /// Create a new `Memory` object with default memory size.\n\n pub fn new(binary: Vec<u8>) -> Memory {\n\n let mut memory = vec![0; MEMORY_SIZE as usize];\n\n memory.splice(..binary.len(), binary.iter().cloned());\n", "file_path": "step02/src/memory.rs", "rank": 95, "score": 32.21137328060552 }, { "content": " uart: Uart::new(),\n\n virtio: Virtio::new(disk_image),\n\n memory: Memory::new(binary),\n\n }\n\n }\n\n\n\n pub fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n if CLINT_BASE <= addr && addr < CLINT_BASE + CLINT_SIZE {\n\n return self.clint.load(addr, size);\n\n }\n\n if PLIC_BASE <= addr && addr < PLIC_BASE + PLIC_SIZE {\n\n return self.plic.load(addr, size);\n\n }\n\n if UART_BASE <= addr && addr < UART_BASE + UART_SIZE {\n\n return self.uart.load(addr, size);\n\n }\n\n if VIRTIO_BASE <= addr && addr < VIRTIO_BASE + VIRTIO_SIZE {\n\n return self.virtio.load(addr, size);\n\n }\n\n if MEMORY_BASE <= addr {\n", "file_path": "step09/src/bus.rs", "rank": 96, "score": 32.1063318657451 }, { "content": " uart: Uart::new(),\n\n virtio: Virtio::new(disk_image),\n\n memory: Memory::new(binary),\n\n }\n\n }\n\n\n\n pub fn load(&mut self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n if CLINT_BASE <= addr && addr < CLINT_BASE + CLINT_SIZE {\n\n return self.clint.load(addr, size);\n\n }\n\n if PLIC_BASE <= addr && addr < PLIC_BASE + PLIC_SIZE {\n\n return self.plic.load(addr, size);\n\n }\n\n if UART_BASE <= addr && addr < UART_BASE + UART_SIZE {\n\n return self.uart.load(addr, size);\n\n }\n\n if VIRTIO_BASE <= addr && addr < VIRTIO_BASE + VIRTIO_SIZE {\n\n return self.virtio.load(addr, size);\n\n }\n\n if MEMORY_BASE <= addr {\n", "file_path": "step10/src/bus.rs", "rank": 97, "score": 32.10633186574509 }, { "content": " }\n\n\n\n pub fn load(&self, addr: u64, size: u64) -> Result<u64, Exception> {\n\n if CLINT_BASE <= addr && addr < CLINT_BASE + CLINT_SIZE {\n\n return self.clint.load(addr, size);\n\n }\n\n if PLIC_BASE <= addr && addr < PLIC_BASE + PLIC_SIZE {\n\n return self.plic.load(addr, size);\n\n }\n\n if MEMORY_BASE <= addr {\n\n return self.memory.load(addr, size);\n\n }\n\n Err(Exception::LoadAccessFault)\n\n }\n\n\n\n pub fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n if CLINT_BASE <= addr && addr < CLINT_BASE + CLINT_SIZE {\n\n return self.clint.store(addr, size, value);\n\n }\n\n if PLIC_BASE <= addr && addr < PLIC_BASE + PLIC_SIZE {\n\n return self.plic.store(addr, size, value);\n\n }\n\n if MEMORY_BASE <= addr {\n\n return self.memory.store(addr, size, value);\n\n }\n\n Err(Exception::StoreAMOAccessFault)\n\n }\n\n}\n", "file_path": "step06/src/bus.rs", "rank": 98, "score": 31.93868862060419 }, { "content": " return self.memory.load(addr, size);\n\n }\n\n Err(Exception::LoadAccessFault)\n\n }\n\n\n\n pub fn store(&mut self, addr: u64, size: u64, value: u64) -> Result<(), Exception> {\n\n if MEMORY_BASE <= addr {\n\n return self.memory.store(addr, size, value);\n\n }\n\n Err(Exception::StoreAMOAccessFault)\n\n }\n\n}\n", "file_path": "step05/src/bus.rs", "rank": 99, "score": 31.439669198842715 } ]
Rust
descriptor/src/ranges.rs
Moxinilian/rendy
c521b6d97d8154042bcc2215c7d7906cc70d7282
use std::{ cmp::Ordering, ops::{Add, AddAssign, Mul, MulAssign, Sub, SubAssign}, }; pub use gfx_hal::pso::{DescriptorRangeDesc, DescriptorSetLayoutBinding, DescriptorType}; const DESCPTOR_TYPES_COUNT: usize = 11; const DESCRIPTOR_TYPES: [DescriptorType; DESCPTOR_TYPES_COUNT] = [ DescriptorType::Sampler, DescriptorType::CombinedImageSampler, DescriptorType::SampledImage, DescriptorType::StorageImage, DescriptorType::UniformTexelBuffer, DescriptorType::StorageTexelBuffer, DescriptorType::UniformBuffer, DescriptorType::StorageBuffer, DescriptorType::UniformBufferDynamic, DescriptorType::StorageBufferDynamic, DescriptorType::InputAttachment, ]; #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct DescriptorRanges { counts: [u32; DESCPTOR_TYPES_COUNT], } impl DescriptorRanges { pub fn zero() -> Self { DescriptorRanges { counts: [0; DESCPTOR_TYPES_COUNT], } } pub fn add_binding(&mut self, binding: DescriptorSetLayoutBinding) { self.counts[binding.ty as usize] += binding.count as u32; } pub fn iter(&self) -> DescriptorRangesIter<'_> { DescriptorRangesIter { counts: &self.counts, index: 0, } } pub fn counts(&self) -> &[u32] { &self.counts } pub fn counts_mut(&mut self) -> &mut [u32] { &mut self.counts } pub fn from_bindings(bindings: &[DescriptorSetLayoutBinding]) -> Self { let mut descs = Self::zero(); for binding in bindings { descs.counts[binding.ty as usize] += binding.count as u32; } descs } pub fn from_binding_iter<I>(bindings: I) -> Self where I: Iterator<Item = DescriptorSetLayoutBinding> { let mut descs = Self::zero(); for binding in bindings { descs.counts[binding.ty as usize] += binding.count as u32; } descs } } impl PartialOrd for DescriptorRanges { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { let mut ord = self.counts[0].partial_cmp(&other.counts[0])?; for i in 1..DESCPTOR_TYPES_COUNT { match (ord, self.counts[i].partial_cmp(&other.counts[i])?) { (Ordering::Less, Ordering::Greater) | (Ordering::Greater, Ordering::Less) => { return None; } (Ordering::Equal, new) => ord = new, _ => (), } } Some(ord) } } impl Add for DescriptorRanges { type Output = Self; fn add(mut self, rhs: Self) -> Self { self += rhs; self } } impl AddAssign for DescriptorRanges { fn add_assign(&mut self, rhs: Self) { for i in 0..DESCPTOR_TYPES_COUNT { self.counts[i] += rhs.counts[i]; } } } impl Sub for DescriptorRanges { type Output = Self; fn sub(mut self, rhs: Self) -> Self { self -= rhs; self } } impl SubAssign for DescriptorRanges { fn sub_assign(&mut self, rhs: Self) { for i in 0..DESCPTOR_TYPES_COUNT { self.counts[i] -= rhs.counts[i]; } } } impl Mul<u32> for DescriptorRanges { type Output = Self; fn mul(mut self, rhs: u32) -> Self { self *= rhs; self } } impl MulAssign<u32> for DescriptorRanges { fn mul_assign(&mut self, rhs: u32) { for i in 0..DESCPTOR_TYPES_COUNT { self.counts[i] *= rhs; } } } impl<'a> IntoIterator for &'a DescriptorRanges { type Item = DescriptorRangeDesc; type IntoIter = DescriptorRangesIter<'a>; fn into_iter(self) -> DescriptorRangesIter<'a> { self.iter() } } pub struct DescriptorRangesIter<'a> { counts: &'a [u32; DESCPTOR_TYPES_COUNT], index: u8, } impl<'a> Iterator for DescriptorRangesIter<'a> { type Item = DescriptorRangeDesc; fn next(&mut self) -> Option<DescriptorRangeDesc> { loop { let index = self.index as usize; if index >= DESCPTOR_TYPES_COUNT { return None; } else { self.index += 1; if self.counts[index] > 0 { return Some(DescriptorRangeDesc { count: self.counts[index] as usize, ty: DESCRIPTOR_TYPES[index], }); } } } } }
use std::{ cmp::Ordering, ops::{Add, AddAssign, Mul, MulAssign, Sub, SubAssign}, }; pub use gfx_hal::pso::{DescriptorRangeDesc, DescriptorSetLayoutBinding, DescriptorType}; const DESCPTOR_TYPES_COUNT: usize = 11; const DESCRIPTOR_TYPES: [DescriptorType; DESCPTOR_TYPES_COUNT] = [ DescriptorType::Sampler, DescriptorType::CombinedImageSampler, DescriptorType::SampledImage, DescriptorType::StorageImage, DescriptorType::UniformTexelBuffer, DescriptorType::StorageTexelBuffer, DescriptorType::UniformBuffer, DescriptorType::StorageBuffer, DescriptorType::UniformBufferDynamic, DescriptorType::StorageBufferDynamic, DescriptorType::InputAttachment, ]; #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct DescriptorRanges { counts: [u32; DESCPTOR_TYPES_COUNT], } impl DescriptorRanges { pub fn zero() -> Self { DescriptorRanges { counts: [0; DESCPTOR_TYPES_COUNT], } } pub fn add_binding(&mut self, binding: DescriptorSetLayoutBinding) { self.counts[binding.ty as usize] += binding.count as u32; } pub fn iter(&self) -> DescriptorRangesIter<'_> { DescriptorRangesIter { counts: &self.counts, index: 0, } } pub fn counts(&self) -> &[u32] { &self.counts } pub fn counts_mut(&mut self) -> &mut [u32] { &mut self.counts } pub fn from_bindings(bindings: &[DescriptorSetLayoutBinding]) -> Self { let mut descs = Self::zero(); for binding in bindings { descs.counts[binding.ty as usize] += binding.count as u32; } descs } pub fn from_binding_iter<I>(bindings: I) -> Self where I: Iterator<Item = DescriptorSetLayoutBinding> { let mut descs = Self::zero(); for binding in bindings { descs.counts[binding.ty as usize] += binding.count as u32; } descs } } impl PartialOrd for DescriptorRanges { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { let mut ord = self.counts[0].partial_cmp(&other.counts[0])?; for i in 1..DESCPTOR_TYPES_COUNT { match (ord, self.counts[i].partial_cmp(&other.counts[i])?) { (Ordering::Less, Ordering::Greater) | (Ordering::Greater, Ordering::Less) => { return None; } (Ordering::Equal, new) => ord = new, _ => (), } } Some(ord) } } impl Add for DescriptorRanges { type Output = Self; fn add(mut self, rhs: Self) -> Self { self += rhs; self } } impl AddAssign for DescriptorRanges { fn add_assign(&mut self, rhs: Self) { for i in 0..DESCPTOR_TYPES_COUNT { self.counts[i] += rhs.counts[i]; } } } impl Sub for DescriptorRanges { type Output = Self; fn sub(mut self, rhs: Self) -> Self { self -= rhs; self } } impl SubAssign for DescriptorRanges { fn sub_assign(&mut self, rhs: Self) { for i in 0..DESCPTOR_TYPES_COUNT { self.counts[i] -= rhs.counts[i]; } } } impl Mul<u32> for DescriptorRanges { type Output = Self; fn mul(mut self, rhs: u32) -> Self { self *= rhs; self } } impl MulAssign<u32> for DescriptorRanges { fn mul_assign(&mut self, rhs: u32) { for i in 0..DESCPTOR_TYPES_COUNT { self.counts[i] *= rhs; } } } impl<'a> IntoIterator for &'a DescriptorRanges { type Item = DescriptorRangeDesc; type IntoIter = DescriptorRangesIter<'a>; fn into_iter(self) -> DescriptorRangesIter<'a> { self.iter() } } pub struct DescriptorRangesIter<'a> { counts: &'a [u32; DESCPTOR_TYPES_COUNT], index: u8, } impl<'a> Iterator for DescriptorRangesIter<'a> { type Item = DescriptorRangeDesc; fn next(&mut self) -> Option<DescriptorRangeDesc> { loop { let index = self.index as usize; if index >= DESCPTOR_TYPES_COUNT { return None; } else { self.index += 1; if self.counts[index] > 0 { return
; } } } } }
Some(DescriptorRangeDesc { count: self.counts[index] as usize, ty: DESCRIPTOR_TYPES[index], })
call_expression
[ { "content": "/// Trait for vertex attributes to implement\n\npub trait AsAttribute: Debug + PartialEq + PartialOrd + Copy + Send + Sync + 'static {\n\n /// Name of the attribute\n\n const NAME: &'static str;\n\n /// Attribute format.\n\n const FORMAT: Format;\n\n}\n\n\n\n/// A unique identifier for vertex attribute of given name, format and array index.\n\n#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct AttrUuid(u16);\n\n\n\nlazy_static::lazy_static! {\n\n static ref UUID_MAP: parking_lot::RwLock<fnv::FnvHashMap<(Cow<'static, str>, u8, Format), AttrUuid>> =\n\n Default::default();\n\n}\n\n\n", "file_path": "util/src/types/vertex.rs", "rank": 0, "score": 264876.4016146197 }, { "content": "/// Trait implemented by all valid vertex formats.\n\npub trait AsVertex: Debug + PartialEq + PartialOrd + Copy + Sized + Send + Sync + 'static {\n\n /// List of all attributes formats with name and offset.\n\n fn vertex() -> VertexFormat;\n\n}\n\n\n\nimpl<T> AsVertex for T\n\nwhere\n\n T: AsAttribute,\n\n{\n\n fn vertex() -> VertexFormat {\n\n VertexFormat::new(Some((T::FORMAT, T::NAME)))\n\n }\n\n}\n\n\n\n/// Vertex format with position and RGBA8 color attributes.\n\n#[repr(C)]\n\n#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]\n\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\n\npub struct PosColor {\n\n /// Position of the vertex in 3D space.\n", "file_path": "util/src/types/vertex.rs", "rank": 1, "score": 256411.80980042164 }, { "content": "/// Calculate the number of mip levels for a 2D image with given dimensions\n\npub fn mip_levels_from_dims(width: u32, height: u32) -> u8 {\n\n ((32 - width.max(height).leading_zeros()).max(1) as u8).min(gfx_hal::image::MAX_LEVEL)\n\n}\n\n\n\n/// Generics-free texture builder.\n\n#[derive(Clone, Derivative)]\n\n#[derivative(Debug)]\n\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\n\n/// Struct for staging data in preparation of building a `Texture`\n\npub struct TextureBuilder<'a> {\n\n kind: image::Kind,\n\n view_kind: image::ViewKind,\n\n format: Format,\n\n #[derivative(Debug = \"ignore\")]\n\n data: std::borrow::Cow<'a, [u8]>,\n\n data_width: u32,\n\n data_height: u32,\n\n sampler_info: gfx_hal::image::SamplerInfo,\n\n swizzle: Swizzle,\n\n mip_levels: MipLevels,\n", "file_path": "texture/src/texture.rs", "rank": 2, "score": 244031.49301182342 }, { "content": "/// Trait to abstract of specific access flags.\n\npub trait AccessFlags: Copy + Debug + BitOr<Output = Self> + BitOrAssign + 'static {\n\n /// Get flags value with no flags set.\n\n fn empty() -> Self;\n\n\n\n /// Check if this access must be exclusive.\n\n ///\n\n /// Basically this checks if all flags are known read flags.\n\n fn exclusive(&self) -> bool;\n\n}\n\n\n\nimpl AccessFlags for gfx_hal::buffer::Access {\n\n #[inline]\n\n fn empty() -> Self {\n\n Self::empty()\n\n }\n\n\n\n #[inline]\n\n fn exclusive(&self) -> bool {\n\n self.intersects(\n\n Self::SHADER_WRITE | Self::TRANSFER_WRITE | Self::HOST_WRITE | Self::MEMORY_WRITE,\n", "file_path": "chain/src/resource.rs", "rank": 3, "score": 239480.66162868554 }, { "content": "/// Trait to abstract of specific usage flags.\n\npub trait UsageFlags: Copy + Debug + BitOr<Output = Self> + BitOrAssign + 'static {}\n\n\n\nimpl UsageFlags for gfx_hal::buffer::Usage {}\n\nimpl UsageFlags for gfx_hal::image::Usage {}\n\n\n", "file_path": "chain/src/resource.rs", "rank": 4, "score": 239480.6616286856 }, { "content": "/// Cast vec of some arbitrary type into vec of bytes.\n\n/// Can lead to UB if allocator changes. Use with caution.\n\n/// TODO: Replace with something safer.\n\npub fn cast_vec<T: Copy>(mut vec: Vec<T>) -> Vec<u8> {\n\n let len = std::mem::size_of::<T>() * vec.len();\n\n let cap = std::mem::size_of::<T>() * vec.capacity();\n\n let ptr = vec.as_mut_ptr();\n\n std::mem::forget(vec);\n\n unsafe { Vec::from_raw_parts(ptr as _, len, cap) }\n\n}\n\n\n", "file_path": "util/src/casts.rs", "rank": 5, "score": 233854.20633805537 }, { "content": "/// Retreive a unique identifier for vertex attribute of given name, format and array index.\n\n///\n\n/// Non-array attributes should always use index 0.\n\n/// Matrices and arrays must be specified as a series of attributes with the same name and consecutive indices.\n\npub fn attribute_uuid(name: &str, index: u8, format: Format) -> AttrUuid {\n\n let read_map = UUID_MAP.read();\n\n if let Some(val) = read_map.get(&(Cow::Borrowed(name), index, format)) {\n\n return *val;\n\n }\n\n drop(read_map);\n\n\n\n let mut write_map = UUID_MAP.write();\n\n // First check again if value was not written by previous owner of the lock.\n\n if let Some(val) = write_map.get(&(Cow::Borrowed(name), index, format)) {\n\n return *val;\n\n }\n\n\n\n // uuid 0 is reserved for unused attribute indices\n\n let val = AttrUuid(write_map.len() as u16 + 1);\n\n write_map.insert((Cow::Owned(name.to_owned()), index, format), val);\n\n val\n\n}\n\n\n\n/// Type for position attribute of vertex.\n", "file_path": "util/src/types/vertex.rs", "rank": 6, "score": 233365.71346518685 }, { "content": "struct LookupBuilder<I: Hash + Eq + Copy> {\n\n forward: fnv::FnvHashMap<I, usize>,\n\n backward: Vec<I>,\n\n}\n\nimpl<I: Hash + Eq + Copy> LookupBuilder<I> {\n\n fn new() -> LookupBuilder<I> {\n\n LookupBuilder {\n\n forward: fnv::FnvHashMap::default(),\n\n backward: Vec::new(),\n\n }\n\n }\n\n\n\n fn forward(&mut self, id: I) -> usize {\n\n if let Some(&id_num) = self.forward.get(&id) {\n\n id_num\n\n } else {\n\n let id_num = self.backward.len();\n\n self.backward.push(id);\n\n self.forward.insert(id, id_num);\n\n id_num\n\n }\n\n }\n\n}\n\n\n", "file_path": "chain/src/collect.rs", "rank": 7, "score": 226389.55817929108 }, { "content": "/// Cast `cow` of some arbitrary type into `cow` of bytes.\n\n/// Can lead to UB if allocator changes. Use with caution.\n\n/// TODO: Replace with something safer.\n\npub fn cast_cow<T: Copy>(cow: Cow<'_, [T]>) -> Cow<'_, [u8]> {\n\n match cow {\n\n Cow::Borrowed(slice) => Cow::Borrowed(cast_slice(slice)),\n\n Cow::Owned(vec) => Cow::Owned(cast_vec(vec)),\n\n }\n\n}\n\n\n", "file_path": "util/src/casts.rs", "rank": 8, "score": 212440.2066574345 }, { "content": "/// Abstract capability specifier.\n\npub trait Capability: Copy + std::fmt::Debug + 'static {\n\n /// Try to create capability instance from queue_type.\n\n /// Instance will be created if all required queue_type set.\n\n fn from_queue_type(queue_type: QueueType) -> Option<Self>;\n\n\n\n /// Convert into `QueueType`\n\n fn into_queue_type(self) -> QueueType;\n\n}\n\n\n\nimpl Capability for QueueType {\n\n fn from_queue_type(queue_type: QueueType) -> Option<Self> {\n\n Some(queue_type)\n\n }\n\n\n\n fn into_queue_type(self) -> QueueType {\n\n self\n\n }\n\n}\n\n\n\nimpl Capability for Transfer {\n", "file_path": "command/src/capability.rs", "rank": 9, "score": 208087.76399878052 }, { "content": "/// Type-level usage flags.\n\n/// It defines if buffer can be resubmitted without reset.\n\n/// Or even resubmitted while being executed.\n\npub trait Usage: Copy + Default + std::fmt::Debug + 'static {\n\n /// Flags required to begin command buffer.\n\n fn flags(&self) -> gfx_hal::command::CommandBufferFlags;\n\n}\n\n\n\nimpl Usage for OneShot {\n\n fn flags(&self) -> gfx_hal::command::CommandBufferFlags {\n\n gfx_hal::command::CommandBufferFlags::ONE_TIME_SUBMIT\n\n }\n\n}\n\n\n\nimpl Usage for MultiShot {\n\n fn flags(&self) -> gfx_hal::command::CommandBufferFlags {\n\n gfx_hal::command::CommandBufferFlags::empty()\n\n }\n\n}\n\n\n\nimpl Usage for MultiShot<SimultaneousUse> {\n\n fn flags(&self) -> gfx_hal::command::CommandBufferFlags {\n\n gfx_hal::command::CommandBufferFlags::SIMULTANEOUS_USE\n\n }\n\n}\n\n\n\nimpl Usage for NoSimultaneousUse {\n\n fn flags(&self) -> gfx_hal::command::CommandBufferFlags {\n\n gfx_hal::command::CommandBufferFlags::empty()\n\n }\n\n}\n\n\n", "file_path": "command/src/buffer/usage.rs", "rank": 10, "score": 196678.41728714987 }, { "content": "/// Type-level buffer level flag.\n\n/// It defines whether buffer can be submitted to the command queues\n\n/// or executed as part of the primary buffers.\n\npub trait Level: Copy + Default + std::fmt::Debug + 'static {\n\n /// Get raw level value for command buffer allocation.\n\n fn raw_level(&self) -> gfx_hal::command::RawLevel;\n\n}\n\n\n\nimpl Level for PrimaryLevel {\n\n fn raw_level(&self) -> gfx_hal::command::RawLevel {\n\n gfx_hal::command::RawLevel::Primary\n\n }\n\n}\n\n\n\nimpl Level for SecondaryLevel {\n\n fn raw_level(&self) -> gfx_hal::command::RawLevel {\n\n gfx_hal::command::RawLevel::Secondary\n\n }\n\n}\n", "file_path": "command/src/buffer/level.rs", "rank": 11, "score": 196678.27636510605 }, { "content": "/// Specify flags required for command pool creation to allow individual buffer reset.\n\npub trait Reset: Copy + Default + std::fmt::Debug + 'static {\n\n /// Get flags for reset parameter.\n\n fn flags(&self) -> gfx_hal::pool::CommandPoolCreateFlags;\n\n}\n\n\n\nimpl Reset for IndividualReset {\n\n fn flags(&self) -> gfx_hal::pool::CommandPoolCreateFlags {\n\n gfx_hal::pool::CommandPoolCreateFlags::RESET_INDIVIDUAL\n\n }\n\n}\n\n\n\nimpl Reset for NoIndividualReset {\n\n fn flags(&self) -> gfx_hal::pool::CommandPoolCreateFlags {\n\n gfx_hal::pool::CommandPoolCreateFlags::empty()\n\n }\n\n}\n\n\n", "file_path": "command/src/buffer/reset.rs", "rank": 12, "score": 196673.78782065873 }, { "content": "/// AsPixel trait for extracting the underlying data representation information from a Rust data type\n\n/// # Example\n\n/// ```rust,no-run\n\n/// struct Rgba([u8; 4]);\n\n/// ```\n\npub trait AsPixel: Copy + std::fmt::Debug + Default + Send + Sync + 'static {\n\n /// Name of the pixel type.\n\n const NAME: &'static str;\n\n\n\n /// Size of the pixel.\n\n const SIZE: u32;\n\n\n\n /// Pixel format.\n\n const FORMAT: gfx_hal::format::Format;\n\n}\n\n\n\nmacro_rules! impl_pixel {\n\n ($($alias:ident = $channels:ident $size:ident $type:ident;)*) => {\n\n $(\n\n /// Pixel type alias.\n\n pub type $alias = Pixel<$channels, $size, $type>;\n\n\n\n impl AsPixel for $alias {\n\n const NAME: &'static str = stringify!($alias);\n\n const SIZE: u32 = num_channels!($channels) * <$size as ChannelSize>::SIZE;\n", "file_path": "texture/src/pixel.rs", "rank": 13, "score": 187091.98860042845 }, { "content": "#[allow(unused)]\n\ntype NoWaits<B> = std::iter::Empty<(\n\n &'static <B as gfx_hal::Backend>::Semaphore,\n\n gfx_hal::pso::PipelineStage,\n\n)>;\n", "file_path": "command/src/family/submission.rs", "rank": 14, "score": 186173.38463780028 }, { "content": "/// Memory usage trait.\n\npub trait MemoryUsage: std::fmt::Debug {\n\n /// Get set of properties required for the usage.\n\n fn properties_required(&self) -> gfx_hal::memory::Properties;\n\n\n\n /// Get comparable fitness value for memory properties.\n\n ///\n\n /// # Panics\n\n ///\n\n /// This function will panic if properties set doesn't contain required properties.\n\n fn memory_fitness(&self, properties: gfx_hal::memory::Properties) -> u32;\n\n\n\n /// Get comparable fitness value for memory allocator.\n\n fn allocator_fitness(&self, kind: Kind) -> u32;\n\n}\n\n\n\nimpl<T> MemoryUsage for T\n\nwhere\n\n T: std::ops::Deref + std::fmt::Debug,\n\n T::Target: MemoryUsage,\n\n{\n", "file_path": "memory/src/usage.rs", "rank": 15, "score": 185048.27412001727 }, { "content": "fn index_stride(index_type: gfx_hal::IndexType) -> usize {\n\n match index_type {\n\n gfx_hal::IndexType::U16 => size_of::<u16>(),\n\n gfx_hal::IndexType::U32 => size_of::<u32>(),\n\n }\n\n}\n\n\n\nimpl<'a> MeshBuilder<'a> {\n\n /// Create empty builder.\n\n pub fn new() -> Self {\n\n MeshBuilder {\n\n vertices: smallvec::SmallVec::new(),\n\n indices: None,\n\n prim: gfx_hal::Primitive::TriangleList,\n\n }\n\n }\n\n\n\n /// Convert builder into fully owned type. This forces internal vertex and index buffers\n\n /// to be cloned, which allows borrowed source buffers to be released.\n\n pub fn into_owned(self) -> MeshBuilder<'static> {\n", "file_path": "mesh/src/mesh.rs", "rank": 16, "score": 184043.16280914433 }, { "content": "/// Trait implemented for type-level render pass relation flags.\n\n/// `RenderPassContinue` and `OutsideRenderPass`.\n\npub trait RenderPassRelation<L>: Copy + Default + std::fmt::Debug + 'static {\n\n /// Flags required to begin command buffer.\n\n fn flags(&self) -> gfx_hal::command::CommandBufferFlags;\n\n}\n\n\n\nimpl RenderPassRelation<SecondaryLevel> for RenderPassContinue {\n\n fn flags(&self) -> gfx_hal::command::CommandBufferFlags {\n\n gfx_hal::command::CommandBufferFlags::RENDER_PASS_CONTINUE\n\n }\n\n}\n\n\n\nimpl<L> RenderPassRelation<L> for OutsideRenderPass {\n\n fn flags(&self) -> gfx_hal::command::CommandBufferFlags {\n\n gfx_hal::command::CommandBufferFlags::empty()\n\n }\n\n}\n", "file_path": "command/src/buffer/usage.rs", "rank": 17, "score": 183189.02971598716 }, { "content": "/// Descriptor for render group\n\npub trait RenderGroupDesc<B: Backend, T: ?Sized>: std::fmt::Debug {\n\n /// Make render group builder.\n\n fn builder(self) -> DescBuilder<B, T, Self>\n\n where\n\n Self: Sized,\n\n {\n\n DescBuilder {\n\n desc: self,\n\n buffers: Vec::new(),\n\n images: Vec::new(),\n\n dependencies: Vec::new(),\n\n marker: std::marker::PhantomData,\n\n }\n\n }\n\n\n\n /// Get buffers used by the group. Empty by default.\n\n fn buffers(&self) -> Vec<BufferAccess> {\n\n Vec::new()\n\n }\n\n\n", "file_path": "graph/src/node/render/group/mod.rs", "rank": 18, "score": 177155.7074051562 }, { "content": "/// Cast slice of some arbitrary type into slice of bytes.\n\npub fn cast_slice<T>(slice: &[T]) -> &[u8] {\n\n let len = std::mem::size_of::<T>() * slice.len();\n\n let ptr = slice.as_ptr();\n\n unsafe { std::slice::from_raw_parts(ptr as _, len) }\n\n}\n\n\n", "file_path": "util/src/casts.rs", "rank": 19, "score": 175867.4137690505 }, { "content": "/// Description of the node.\n\n/// Implementation of the builder type provide framegraph with static information about node\n\n/// that is used for building the node.\n\npub trait NodeDesc<B: Backend, T: ?Sized>: std::fmt::Debug + Sized + 'static {\n\n /// Node this builder builds.\n\n type Node: Node<B, T>;\n\n\n\n /// Make node builder.\n\n fn builder(self) -> DescBuilder<B, T, Self> {\n\n DescBuilder {\n\n desc: self,\n\n buffers: Vec::new(),\n\n images: Vec::new(),\n\n dependencies: Vec::new(),\n\n marker: std::marker::PhantomData,\n\n }\n\n }\n\n\n\n /// Get set or buffer resources the node uses.\n\n fn buffers(&self) -> Vec<BufferAccess> {\n\n Vec::new()\n\n }\n\n\n", "file_path": "graph/src/node/mod.rs", "rank": 20, "score": 174680.72337986546 }, { "content": "/// Descriptor for simple graphics pipeline implementation.\n\npub trait SimpleGraphicsPipelineDesc<B: Backend, T: ?Sized>: std::fmt::Debug {\n\n /// Simple graphics pipeline implementation\n\n type Pipeline: SimpleGraphicsPipeline<B, T>;\n\n\n\n /// Make simple render group builder.\n\n fn builder(self) -> DescBuilder<B, T, SimpleRenderGroupDesc<Self>>\n\n where\n\n Self: Sized,\n\n {\n\n SimpleRenderGroupDesc { inner: self }.builder()\n\n }\n\n\n\n /// Get set or buffer resources the node uses.\n\n fn buffers(&self) -> Vec<BufferAccess> {\n\n Vec::new()\n\n }\n\n\n\n /// Get set or image resources the node uses.\n\n fn images(&self) -> Vec<ImageAccess> {\n\n Vec::new()\n", "file_path": "graph/src/node/render/group/simple.rs", "rank": 21, "score": 174359.30249549152 }, { "content": "/// Logically compares two descriptor layout bindings to determine their relational equality.\n\npub fn compare_bindings(\n\n lhv: &gfx_hal::pso::DescriptorSetLayoutBinding,\n\n rhv: &gfx_hal::pso::DescriptorSetLayoutBinding,\n\n) -> BindingEquality {\n\n if lhv.binding == rhv.binding\n\n && lhv.count == rhv.count\n\n && lhv.immutable_samplers == rhv.immutable_samplers\n\n && lhv.ty == rhv.ty\n\n {\n\n return BindingEquality::Equal;\n\n } else {\n\n if lhv.binding == rhv.binding {\n\n return BindingEquality::SameBindingNonEqual;\n\n }\n\n }\n\n\n\n return BindingEquality::NotEqual;\n\n}\n\n\n\n/// This enum provides logical comparison results for sets. Because shaders can share bindings,\n\n/// we cannot do a strict equality check for exclusion - we must see if shaders match, or if they are the same bindings\n\n/// but mismatched descriptions.\n", "file_path": "shader/src/reflect/mod.rs", "rank": 22, "score": 173809.71608693583 }, { "content": "#[allow(unused)]\n\ntype NoSubmits<B> = std::iter::Empty<Submit<B, NoSimultaneousUse, PrimaryLevel, OutsideRenderPass>>;\n\n\n\n/// Command queue submission.\n\n#[derive(Debug)]\n\npub struct Submission<B, W = NoWaits<B>, C = NoSubmits<B>, S = NoSignals<B>> {\n\n /// Iterator over semaphores with stage flag to wait on.\n\n pub waits: W,\n\n\n\n /// Iterator over submittables.\n\n pub submits: C,\n\n\n\n /// Iterator over semaphores to signal.\n\n pub signals: S,\n\n\n\n /// Marker type for submission backend.\n\n pub marker: std::marker::PhantomData<fn() -> B>,\n\n}\n\n\n\nimpl<B> Submission<B>\n\nwhere\n", "file_path": "command/src/family/submission.rs", "rank": 23, "score": 172434.39678776462 }, { "content": "/// Find required synchronization for all submissions in `Chains`.\n\npub fn sync<F, S, W>(chains: &Chains, mut new_semaphore: F) -> Schedule<SyncData<S, W>>\n\nwhere\n\n F: FnMut() -> (S, W),\n\n{\n\n let ref schedule = chains.schedule;\n\n let ref buffers = chains.buffers;\n\n let ref images = chains.images;\n\n\n\n let mut sync = SyncTemp(fnv::FnvHashMap::default());\n\n for (&id, chain) in buffers {\n\n sync_chain(id, chain, schedule, &mut sync);\n\n }\n\n for (&id, chain) in images {\n\n sync_chain(id, chain, schedule, &mut sync);\n\n }\n\n if schedule.queue_count() > 1 {\n\n optimize(schedule, &mut sync);\n\n }\n\n\n\n let mut result = Schedule::new();\n", "file_path": "chain/src/sync.rs", "rank": 24, "score": 158488.2464830378 }, { "content": "/// Check if slice is sorted using ordered key and key extractor\n\nfn is_slice_sorted_by_key<'a, T, K: Ord>(slice: &'a [T], f: impl Fn(&'a T) -> K) -> bool {\n\n if let Some((first, slice)) = slice.split_first() {\n\n let mut cmp = f(first);\n\n for item in slice {\n\n let item = f(item);\n\n if cmp > item {\n\n return false;\n\n }\n\n cmp = item;\n\n }\n\n }\n\n true\n\n}\n\n\n\nimpl<'a, A> From<Vec<A>> for MeshBuilder<'a>\n\nwhere\n\n A: AsVertex + 'a,\n\n{\n\n fn from(vertices: Vec<A>) -> Self {\n\n MeshBuilder::new().with_vertices(vertices)\n", "file_path": "mesh/src/mesh.rs", "rank": 25, "score": 157241.5384744396 }, { "content": "/// Dynamic node builder that emits `DynNode`.\n\npub trait NodeBuilder<B: Backend, T: ?Sized>: std::fmt::Debug {\n\n /// Pick family for this node to be executed onto.\n\n fn family(&self, factory: &mut Factory<B>, families: &[Family<B>]) -> Option<FamilyId>;\n\n\n\n /// Get buffer accessed by the node.\n\n fn buffers(&self) -> Vec<(BufferId, BufferAccess)>;\n\n\n\n /// Get images accessed by the node.\n\n fn images(&self) -> Vec<(ImageId, ImageAccess)>;\n\n\n\n /// Indices of nodes this one dependes on.\n\n fn dependencies(&self) -> Vec<NodeId>;\n\n\n\n /// Build node.\n\n fn build<'a>(\n\n self: Box<Self>,\n\n ctx: &GraphContext<B>,\n\n factory: &mut Factory<B>,\n\n family: &mut Family<B>,\n\n queue: usize,\n", "file_path": "graph/src/node/mod.rs", "rank": 26, "score": 155917.74154184808 }, { "content": "/// Builder fror render group.\n\npub trait RenderGroupBuilder<B: Backend, T: ?Sized>: std::fmt::Debug {\n\n /// Make subpass from render group.\n\n fn into_subpass(self) -> SubpassBuilder<B, T>\n\n where\n\n Self: Sized + 'static,\n\n {\n\n SubpassBuilder::new().with_group(self)\n\n }\n\n\n\n /// Number of color output images.\n\n fn colors(&self) -> usize;\n\n\n\n /// Is depth image used.\n\n fn depth(&self) -> bool;\n\n\n\n /// Get buffers used by the group\n\n fn buffers(&self) -> Vec<(BufferId, BufferAccess)>;\n\n\n\n /// Get images used by the group\n\n fn images(&self) -> Vec<(ImageId, ImageAccess)>;\n", "file_path": "graph/src/node/render/group/mod.rs", "rank": 27, "score": 148936.18876677073 }, { "content": "#[allow(unused)]\n\ntype NoSignals<B> = std::iter::Empty<&'static <B as gfx_hal::Backend>::Semaphore>;\n", "file_path": "command/src/family/submission.rs", "rank": 28, "score": 148874.81456953293 }, { "content": "/// Trait-object safe `Node`.\n\npub trait DynNode<B: Backend, T: ?Sized>: std::fmt::Debug + Sync + Send {\n\n /// Record commands required by node.\n\n /// Recorded buffers go into `submits`.\n\n unsafe fn run<'a>(\n\n &mut self,\n\n ctx: &GraphContext<B>,\n\n factory: &Factory<B>,\n\n queue: &mut Queue<B>,\n\n aux: &T,\n\n frames: &Frames<B>,\n\n waits: &[(&'a B::Semaphore, gfx_hal::pso::PipelineStage)],\n\n signals: &[&'a B::Semaphore],\n\n fence: Option<&mut Fence<B>>,\n\n );\n\n\n\n /// Dispose of the node.\n\n ///\n\n /// # Safety\n\n ///\n\n /// Must be called after waiting for device idle.\n", "file_path": "graph/src/node/mod.rs", "rank": 29, "score": 145715.28809988915 }, { "content": "/// One or more graphics pipelines to be called in subpass.\n\npub trait RenderGroup<B: Backend, T: ?Sized>: std::fmt::Debug + Send + Sync {\n\n /// Prepare resources and data for rendering.\n\n fn prepare(\n\n &mut self,\n\n factory: &Factory<B>,\n\n queue: QueueId,\n\n index: usize,\n\n subpass: gfx_hal::pass::Subpass<'_, B>,\n\n aux: &T,\n\n ) -> PrepareResult;\n\n\n\n /// Record commands.\n\n fn draw_inline(\n\n &mut self,\n\n encoder: RenderPassEncoder<'_, B>,\n\n index: usize,\n\n subpass: gfx_hal::pass::Subpass<'_, B>,\n\n aux: &T,\n\n );\n\n\n\n /// Free all resources and destroy group instance.\n\n fn dispose(self: Box<Self>, factory: &mut Factory<B>, aux: &T);\n\n}\n\n\n", "file_path": "graph/src/node/render/group/mod.rs", "rank": 30, "score": 141413.4070264092 }, { "content": "/// Load mesh data from obj.\n\npub fn load_from_obj(\n\n bytes: &[u8],\n\n) -> Result<Vec<(MeshBuilder<'static>, Option<String>)>, failure::Error> {\n\n let string = std::str::from_utf8(bytes)?;\n\n let set = obj::parse(string).map_err(|e| {\n\n failure::format_err!(\n\n \"Error during parsing obj-file at line '{}': {}\",\n\n e.line_number,\n\n e.message\n\n )\n\n })?;\n\n load_from_data(set)\n\n}\n\n\n", "file_path": "mesh/src/format/obj.rs", "rank": 31, "score": 135914.31321578793 }, { "content": "#[allow(unused_variables)]\n\npub fn init<B>(\n\n config: Config<impl DevicesConfigure, impl HeapsConfigure, impl QueuesConfigure>,\n\n) -> Result<(Factory<B>, Families<B>), failure::Error>\n\nwhere\n\n B: Backend,\n\n{\n\n log::debug!(\"Creating factory\");\n\n rendy_backend_match!(B as backend => {\n\n profile_scope!(concat!(\"init_factory\"));\n\n let instance = backend::Instance::create(\"Rendy\", 1);\n\n Ok(identical_cast(init_with_instance(instance, config)?))\n\n });\n\n}\n\n\n", "file_path": "factory/src/factory.rs", "rank": 32, "score": 134720.78352098085 }, { "content": "/// Initialize `Factory` and Queue `Families` associated with Device\n\n/// using existing `Instance`.\n\npub fn init_with_instance<B>(\n\n instance: impl gfx_hal::Instance<Backend = B>,\n\n config: Config<impl DevicesConfigure, impl HeapsConfigure, impl QueuesConfigure>,\n\n) -> Result<(Factory<B>, Families<B>), failure::Error>\n\nwhere\n\n B: Backend,\n\n{\n\n rendy_with_slow_safety_checks!(\n\n log::warn!(\"Slow safety checks are enabled! Disable them in production by enabling the 'no-slow-safety-checks' feature!\")\n\n );\n\n let mut adapters = instance.enumerate_adapters();\n\n\n\n if adapters.is_empty() {\n\n failure::bail!(\"No physical devices found\");\n\n }\n\n\n\n log::debug!(\n\n \"Physical devices:\\n{:#?}\",\n\n adapters\n\n .iter()\n", "file_path": "factory/src/factory.rs", "rank": 33, "score": 132067.3761895332 }, { "content": "/// Attempts to load a Texture from an image.\n\npub fn load_from_image<R>(\n\n mut reader: R,\n\n config: ImageTextureConfig,\n\n) -> Result<TextureBuilder<'static>, failure::Error>\n\nwhere\n\n R: std::io::BufRead + std::io::Seek,\n\n{\n\n use gfx_hal::format::{Component, Swizzle};\n\n use image::{DynamicImage, GenericImageView};\n\n\n\n let image_format = config.format.map_or_else(\n\n || {\n\n let r = reader.by_ref();\n\n // Longest size of image crate supported magic bytes\n\n let mut format_magic_bytes = [0u8; 10];\n\n r.read_exact(&mut format_magic_bytes)?;\n\n r.seek(std::io::SeekFrom::Current(-10))?;\n\n image::guess_format(&format_magic_bytes)\n\n },\n\n |f| Ok(f),\n", "file_path": "texture/src/format/image.rs", "rank": 34, "score": 129579.94498612928 }, { "content": "/// Convert graph barriers into gfx barriers.\n\npub fn gfx_release_barriers<'a, B: Backend>(\n\n ctx: &'a GraphContext<B>,\n\n buffers: impl IntoIterator<Item = &'a NodeBuffer>,\n\n images: impl IntoIterator<Item = &'a NodeImage>,\n\n) -> (\n\n std::ops::Range<gfx_hal::pso::PipelineStage>,\n\n Vec<gfx_hal::memory::Barrier<'a, B>>,\n\n) {\n\n let mut bstart = gfx_hal::pso::PipelineStage::empty();\n\n let mut bend = gfx_hal::pso::PipelineStage::empty();\n\n\n\n let mut istart = gfx_hal::pso::PipelineStage::empty();\n\n let mut iend = gfx_hal::pso::PipelineStage::empty();\n\n\n\n let barriers: Vec<gfx_hal::memory::Barrier<'_, B>> = buffers\n\n .into_iter()\n\n .filter_map(|buffer| {\n\n buffer.release.as_ref().map(|release| {\n\n bstart |= release.stages.start;\n\n bend |= release.stages.end;\n", "file_path": "graph/src/node/mod.rs", "rank": 35, "score": 118673.97627534963 }, { "content": "/// Chech if slice o f ordered values is sorted.\n\nfn is_slice_sorted<T: Ord>(slice: &[T]) -> bool {\n\n is_slice_sorted_by_key(slice, |i| i)\n\n}\n\n\n", "file_path": "mesh/src/mesh.rs", "rank": 36, "score": 117413.429933376 }, { "content": "/// Convert graph barriers into gfx barriers.\n\npub fn gfx_acquire_barriers<'a, 'b, B: Backend>(\n\n ctx: &'a GraphContext<B>,\n\n buffers: impl IntoIterator<Item = &'b NodeBuffer>,\n\n images: impl IntoIterator<Item = &'b NodeImage>,\n\n) -> (\n\n std::ops::Range<gfx_hal::pso::PipelineStage>,\n\n Vec<gfx_hal::memory::Barrier<'a, B>>,\n\n) {\n\n let mut bstart = gfx_hal::pso::PipelineStage::empty();\n\n let mut bend = gfx_hal::pso::PipelineStage::empty();\n\n\n\n let mut istart = gfx_hal::pso::PipelineStage::empty();\n\n let mut iend = gfx_hal::pso::PipelineStage::empty();\n\n\n\n let barriers: Vec<gfx_hal::memory::Barrier<'_, B>> = buffers\n\n .into_iter()\n\n .filter_map(|buffer| {\n\n buffer.acquire.as_ref().map(|acquire| {\n\n bstart |= acquire.stages.start;\n\n bend |= acquire.stages.end;\n", "file_path": "graph/src/node/mod.rs", "rank": 37, "score": 115985.12453492222 }, { "content": "fn align_by(align: usize, value: usize) -> usize {\n\n ((value + align - 1) / align) * align\n\n}\n\n\n\n/// Single mesh is a collection of buffer ranges that provides available attributes.\n\n/// Usually exactly one mesh is used per draw call.\n\n#[derive(Debug)]\n\npub struct Mesh<B: gfx_hal::Backend> {\n\n vertex_buffer: Escape<Buffer<B>>,\n\n vertex_layouts: Vec<VertexBufferLayout>,\n\n index_buffer: Option<IndexBuffer<B>>,\n\n prim: gfx_hal::Primitive,\n\n len: u32,\n\n}\n\n\n\nimpl<B> Mesh<B>\n\nwhere\n\n B: gfx_hal::Backend,\n\n{\n\n /// Build new mesh with `MeshBuilder`\n", "file_path": "mesh/src/mesh.rs", "rank": 38, "score": 111781.73153625792 }, { "content": "#[derive(Debug, Default)]\n\nstruct GravBounceDesc;\n\n\n\nimpl<B, T> NodeDesc<B, T> for GravBounceDesc\n\nwhere\n\n B: hal::Backend,\n\n T: ?Sized,\n\n{\n\n type Node = GravBounce<B>;\n\n\n\n fn buffers(&self) -> Vec<BufferAccess> {\n\n vec![BufferAccess {\n\n access: hal::buffer::Access::SHADER_READ | hal::buffer::Access::SHADER_WRITE,\n\n stages: hal::pso::PipelineStage::COMPUTE_SHADER,\n\n usage: hal::buffer::Usage::STORAGE | hal::buffer::Usage::TRANSFER_DST,\n\n }]\n\n }\n\n\n\n fn build<'a>(\n\n self,\n\n ctx: &GraphContext<B>,\n", "file_path": "rendy/examples/quads/main.rs", "rank": 39, "score": 109052.74898449902 }, { "content": "/// Represent types that can be interpreted as list of vertex attributes.\n\npub trait AsAttributes {\n\n /// The iterator type for retreived attributes\n\n type Iter: Iterator<Item = Attribute>;\n\n /// Retreive a list of vertex attributes with offsets relative to beginning of that list\n\n fn attributes(self) -> Self::Iter;\n\n}\n\n\n\nimpl AsAttributes for Vec<Attribute> {\n\n type Iter = std::vec::IntoIter<Attribute>;\n\n fn attributes(self) -> Self::Iter {\n\n self.into_iter()\n\n }\n\n}\n\n\n\nimpl AsAttributes for VertexFormat {\n\n type Iter = std::vec::IntoIter<Attribute>;\n\n fn attributes(self) -> Self::Iter {\n\n self.attributes.into_iter()\n\n }\n\n}\n", "file_path": "util/src/types/vertex.rs", "rank": 40, "score": 108842.48712193337 }, { "content": "fn optimize<S>(schedule: &Schedule<S>, sync: &mut SyncTemp) {\n\n for queue in schedule.iter().flat_map(|family| family.iter()) {\n\n let mut found = fnv::FnvHashMap::default();\n\n for submission in queue.iter() {\n\n optimize_submission(submission.id(), &mut found, sync);\n\n }\n\n }\n\n}\n", "file_path": "chain/src/sync.rs", "rank": 41, "score": 108657.92843783982 }, { "content": "/// Function copied from range_contains RFC rust implementation in nightly\n\nfn range_contains<U, R>(range: &R, item: &U) -> bool\n\nwhere\n\n U: ?Sized + PartialOrd<U>,\n\n R: RangeBounds<U>,\n\n{\n\n (match range.start_bound() {\n\n Bound::Included(ref start) => *start <= item,\n\n Bound::Excluded(ref start) => *start < item,\n\n Bound::Unbounded => true,\n\n }) && (match range.end_bound() {\n\n Bound::Included(ref end) => item <= *end,\n\n Bound::Excluded(ref end) => item < *end,\n\n Bound::Unbounded => true,\n\n })\n\n}\n", "file_path": "shader/src/reflect/mod.rs", "rank": 42, "score": 106918.8936618085 }, { "content": "#[derive(Debug, Default)]\n\nstruct SpriteGraphicsPipelineDesc;\n\n\n", "file_path": "rendy/examples/sprite/main.rs", "rank": 43, "score": 106559.16805680966 }, { "content": "#[derive(Debug, Default)]\n\nstruct QuadsRenderPipelineDesc;\n\n\n", "file_path": "rendy/examples/quads/main.rs", "rank": 44, "score": 106559.16805680966 }, { "content": "#[derive(Debug, Default)]\n\nstruct MeshRenderPipelineDesc;\n\n\n", "file_path": "rendy/examples/meshes/main.rs", "rank": 45, "score": 106559.16805680966 }, { "content": "#[derive(Debug, Default)]\n\nstruct TriangleRenderPipelineDesc;\n\n\n", "file_path": "rendy/examples/triangle/main.rs", "rank": 46, "score": 106559.16805680966 }, { "content": "#[derive(Debug, Default)]\n\nstruct TriangleRenderPipelineDesc;\n\n\n", "file_path": "rendy/examples/source_shaders/main.rs", "rank": 47, "score": 104224.9084714688 }, { "content": "/// Casts identical types.\n\n/// Useful in generic environment where caller knows that two types are the same\n\n/// but Rust is not convinced.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if types are actually different.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # extern crate rendy_util;\n\n/// # use rendy_util::identical_cast;\n\n/// # use std::any::TypeId;\n\n/// # fn foo<T: 'static>() {\n\n/// if TypeId::of::<T>() == TypeId::of::<u32>() {\n\n/// let value: T = identical_cast(42u32);\n\n/// }\n\n/// # }\n\n///\n\n/// ```\n\npub fn identical_cast<T: 'static, U: 'static>(value: T) -> U {\n\n assert_eq!(TypeId::of::<T>(), TypeId::of::<U>());\n\n unsafe {\n\n // We know types are the same.\n\n let mut value = std::mem::ManuallyDrop::new(value);\n\n let ptr: *mut T = &mut *value;\n\n std::ptr::read(ptr as *mut U)\n\n }\n\n}\n", "file_path": "util/src/casts.rs", "rank": 48, "score": 103628.22196445594 }, { "content": "/// Calculate automatic `Chains` for nodes.\n\n/// This function tries to find the most appropriate schedule for nodes execution.\n\npub fn collect<Q>(nodes: Vec<Node>, max_queues: Q) -> Chains\n\nwhere\n\n Q: Fn(gfx_hal::queue::QueueFamilyId) -> usize,\n\n{\n\n // Resolve nodes into a form faster to work with.\n\n let (nodes, mut unscheduled_nodes) = resolve_nodes(nodes, max_queues);\n\n let mut ready_nodes = Vec::new();\n\n\n\n // Chains.\n\n let mut images: Vec<ChainData<Image>> = fill(nodes.images.len());\n\n let mut buffers: Vec<ChainData<Buffer>> = fill(nodes.buffers.len());\n\n\n\n // Schedule\n\n let mut schedule = Vec::with_capacity(nodes.queues.len());\n\n for i in 0..nodes.queues.len() {\n\n schedule.push(QueueData {\n\n queue: Queue::new(nodes.queues[i]),\n\n wait_factor: 0,\n\n });\n\n }\n", "file_path": "chain/src/collect.rs", "rank": 49, "score": 103602.2942894169 }, { "content": "fn add_to_chain<R, S>(\n\n id: Id,\n\n family: gfx_hal::queue::QueueFamilyId,\n\n chain_data: &mut ChainData<R>,\n\n sid: SubmissionId,\n\n submission: &mut Submission<S>,\n\n state: State<R>,\n\n set_link: impl FnOnce(&mut Submission<S>, Id, usize),\n\n) where\n\n R: Resource,\n\n{\n\n let node = LinkNode { sid, state };\n\n\n\n chain_data.current_family = Some(family);\n\n chain_data.current_link_wait_factor = max(\n\n submission.wait_factor() + 1,\n\n chain_data.current_link_wait_factor,\n\n );\n\n\n\n let ref mut chain = chain_data.chain;\n", "file_path": "chain/src/collect.rs", "rank": 50, "score": 103289.89386206659 }, { "content": "#[cfg(feature = \"no-slow-safety-checks\")]\n\nfn new_instance_id() -> InstanceId {\n\n InstanceId {}\n\n}\n\n\n", "file_path": "util/src/wrap.rs", "rank": 51, "score": 102669.70826530422 }, { "content": "fn push_vertex_desc(\n\n elements: &[gfx_hal::pso::Element<gfx_hal::format::Format>],\n\n stride: gfx_hal::pso::ElemStride,\n\n rate: gfx_hal::pso::VertexInputRate,\n\n vertex_buffers: &mut Vec<gfx_hal::pso::VertexBufferDesc>,\n\n attributes: &mut Vec<gfx_hal::pso::AttributeDesc>,\n\n) {\n\n let index = vertex_buffers.len() as gfx_hal::pso::BufferIndex;\n\n\n\n vertex_buffers.push(gfx_hal::pso::VertexBufferDesc {\n\n binding: index,\n\n stride,\n\n rate,\n\n });\n\n\n\n let mut location = attributes.last().map_or(0, |a| a.location + 1);\n\n for &element in elements {\n\n attributes.push(gfx_hal::pso::AttributeDesc {\n\n location,\n\n binding: index,\n\n element,\n\n });\n\n location += 1;\n\n }\n\n}\n", "file_path": "graph/src/node/render/group/simple.rs", "rank": 52, "score": 102480.8145217411 }, { "content": "fn max_chunks_per_size() -> usize {\n\n let value = (std::mem::size_of::<usize>() * 8).pow(4);\n\n debug_assert!(fits_u32(value));\n\n value\n\n}\n", "file_path": "memory/src/allocator/dynamic.rs", "rank": 53, "score": 100339.71154720121 }, { "content": "/// Trait for memory region suitable for host writes.\n\npub trait Write<T: Copy> {\n\n /// Get mutable slice of `T` bound to mapped range.\n\n ///\n\n /// # Safety\n\n ///\n\n /// * Returned slice should not be read.\n\n unsafe fn slice(&mut self) -> &mut [T];\n\n\n\n /// Write data into mapped memory sub-region.\n\n ///\n\n /// # Panic\n\n ///\n\n /// Panics if `data.len()` is greater than this sub-region len.\n\n fn write(&mut self, data: &[T]) {\n\n unsafe {\n\n let slice = self.slice();\n\n assert!(data.len() <= slice.len());\n\n copy_nonoverlapping(data.as_ptr(), slice.as_mut_ptr(), data.len());\n\n }\n\n }\n", "file_path": "memory/src/mapping/write.rs", "rank": 54, "score": 100273.76464432149 }, { "content": "#[cfg(feature = \"no-slow-safety-checks\")]\n\nfn new_device_id(instance: InstanceId) -> DeviceId {\n\n DeviceId { instance }\n\n}\n\n\n\n/// Id of the hal instance.\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]\n\npub struct InstanceId {\n\n /// Unique id.\n\n #[cfg(not(feature = \"no-slow-safety-checks\"))]\n\n pub id: u32,\n\n}\n\n\n\nimpl InstanceId {\n\n /// Create new instance id.\n\n pub fn new() -> Self {\n\n new_instance_id()\n\n }\n\n}\n\n\n\n/// Raw instance wrapper with id.\n", "file_path": "util/src/wrap.rs", "rank": 55, "score": 92550.08383893355 }, { "content": "fn fill<T: Default>(num: usize) -> Vec<T> {\n\n let mut vec = Vec::with_capacity(num);\n\n for _ in 0..num {\n\n vec.push(T::default());\n\n }\n\n vec\n\n}\n\n\n", "file_path": "chain/src/collect.rs", "rank": 56, "score": 90627.97670858382 }, { "content": "fn sync_chain<R, S>(id: Id, chain: &Chain<R>, schedule: &Schedule<S>, sync: &mut SyncTemp)\n\nwhere\n\n R: Resource,\n\n{\n\n let uid = id.into();\n\n\n\n let pairs = chain\n\n .links()\n\n .windows(2)\n\n .map(|pair| (&pair[0], &pair[1]))\n\n .chain(\n\n chain\n\n .links()\n\n .first()\n\n .and_then(|first| chain.links().last().map(move |last| (last, first))),\n\n );\n\n\n\n for (prev_link, link) in pairs {\n\n log::trace!(\"Sync {:#?}:{:#?}\", prev_link.access(), link.access());\n\n if prev_link.family() == link.family() {\n", "file_path": "chain/src/sync.rs", "rank": 57, "score": 87819.95639828179 }, { "content": "struct SyncTemp(fnv::FnvHashMap<SubmissionId, SyncData<Semaphore, Semaphore>>);\n\nimpl SyncTemp {\n\n fn get_sync(&mut self, sid: SubmissionId) -> &mut SyncData<Semaphore, Semaphore> {\n\n self.0.entry(sid).or_insert_with(|| SyncData::new())\n\n }\n\n}\n\n\n", "file_path": "chain/src/sync.rs", "rank": 58, "score": 82696.44080356683 }, { "content": "fn match_kind(kind: Kind, view_kind: ViewKind, view_caps: ViewCapabilities) -> bool {\n\n match kind {\n\n Kind::D1(..) => match view_kind {\n\n ViewKind::D1 | ViewKind::D1Array => true,\n\n _ => false,\n\n },\n\n Kind::D2(..) => match view_kind {\n\n ViewKind::D2 | ViewKind::D2Array => true,\n\n ViewKind::Cube => {\n\n if view_caps.contains(ViewCapabilities::KIND_CUBE) {\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n _ => false,\n\n },\n\n Kind::D3(..) => {\n\n if view_caps == ViewCapabilities::KIND_2D_ARRAY {\n\n if view_kind == ViewKind::D2 {\n", "file_path": "resource/src/image.rs", "rank": 59, "score": 78854.39654856178 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq, Hash)]\n\nstruct Semaphore {\n\n id: Id,\n\n points: Range<SubmissionId>,\n\n}\n\n\n\nimpl Semaphore {\n\n fn new(id: Id, points: Range<SubmissionId>) -> Self {\n\n Semaphore { id, points }\n\n }\n\n}\n\n\n\n/// Semaphore signal info.\n\n/// There must be paired wait.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub struct Signal<S>(S);\n\n\n\nimpl<S> Signal<S> {\n\n /// Create signaling for specified point.\n\n /// At this point `Wait` must be created as well.\n\n /// `id` and `point` combination must be unique.\n", "file_path": "chain/src/sync.rs", "rank": 60, "score": 75514.72545184026 }, { "content": "fn resolve_nodes<Q>(nodes: Vec<Node>, max_queues: Q) -> (ResolvedNodeSet, Vec<usize>)\n\nwhere\n\n Q: Fn(gfx_hal::queue::QueueFamilyId) -> usize,\n\n{\n\n let node_count = nodes.len();\n\n\n\n let mut unscheduled_nodes = fill(nodes.len());\n\n let mut reified_nodes: Vec<ResolvedNode> = fill(nodes.len());\n\n let mut node_ids = LookupBuilder::new();\n\n let mut queues = LookupBuilder::new();\n\n let mut buffers = LookupBuilder::new();\n\n let mut images = LookupBuilder::new();\n\n\n\n let mut family_full = fnv::FnvHashMap::default();\n\n for node in nodes {\n\n let family = node.family;\n\n if !family_full.contains_key(&family) {\n\n let count = max_queues(family);\n\n assert!(count > 0, \"Cannot create a family with 0 max queues.\");\n\n for i in 0..count {\n", "file_path": "chain/src/collect.rs", "rank": 61, "score": 75514.12919358836 }, { "content": "#[derive(PartialEq, PartialOrd, Eq, Ord)]\n\nstruct Fitness {\n\n transfers: usize,\n\n wait_factor: usize,\n\n}\n\n\n", "file_path": "chain/src/collect.rs", "rank": 62, "score": 75510.75273099831 }, { "content": "#[derive(Clone, Copy, Debug)]\n\n#[repr(C, align(16))]\n\nstruct Light {\n\n pos: nalgebra::Vector3<f32>,\n\n pad: f32,\n\n intencity: f32,\n\n}\n\n\n", "file_path": "rendy/examples/meshes/main.rs", "rank": 63, "score": 73970.0688158061 }, { "content": "#[derive(Debug)]\n\nstruct Camera {\n\n view: nalgebra::Projective3<f32>,\n\n proj: nalgebra::Perspective3<f32>,\n\n}\n\n\n", "file_path": "rendy/examples/meshes/main.rs", "rank": 64, "score": 73965.40854538152 }, { "content": "struct ResolvedNode {\n\n id: usize,\n\n family: gfx_hal::queue::QueueFamilyId,\n\n queues: Range<usize>,\n\n rev_deps: Vec<usize>,\n\n buffers: Vec<(usize, State<Buffer>)>,\n\n images: Vec<(usize, State<Image>)>,\n\n}\n\n\n\nimpl Default for ResolvedNode {\n\n fn default() -> Self {\n\n ResolvedNode {\n\n id: 0,\n\n family: gfx_hal::queue::QueueFamilyId(0),\n\n queues: 0..0,\n\n rev_deps: Vec::new(),\n\n buffers: Vec::new(),\n\n images: Vec::new(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "chain/src/collect.rs", "rank": 65, "score": 73960.28564862139 }, { "content": "struct QueueData {\n\n queue: Queue<Unsynchronized>,\n\n wait_factor: usize,\n\n}\n\n\n", "file_path": "chain/src/collect.rs", "rank": 66, "score": 73960.28564862139 }, { "content": "fn fitness(\n\n node: &ResolvedNode,\n\n images: &mut Vec<ChainData<Image>>,\n\n buffers: &mut Vec<ChainData<Buffer>>,\n\n schedule: &mut Vec<QueueData>,\n\n) -> (Fitness, usize) {\n\n let mut transfers = 0;\n\n let mut wait_factor_from_chains = 0;\n\n\n\n // Collect minimal waits required and resource transfers count.\n\n for &(id, _) in &node.buffers {\n\n let chain = &buffers[id];\n\n if chain\n\n .current_family\n\n .map_or(false, |family| family != node.family)\n\n {\n\n transfers += 1;\n\n }\n\n wait_factor_from_chains = max(wait_factor_from_chains, chain.last_link_wait_factor);\n\n }\n", "file_path": "chain/src/collect.rs", "rank": 67, "score": 73731.57040879273 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone)]\n\nstruct PosVel {\n\n pos: [f32; 2],\n\n vel: [f32; 2],\n\n}\n\n\n\nlazy_static::lazy_static! {\n\n static ref RENDER_VERTEX: SpirvShader = SourceShaderInfo::new(\n\n include_str!(\"render.vert\"),\n\n concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/examples/quads/render.vert\").into(),\n\n ShaderKind::Vertex,\n\n SourceLanguage::GLSL,\n\n \"main\",\n\n ).precompile().unwrap();\n\n\n\n static ref RENDER_FRAGMENT: SpirvShader = SourceShaderInfo::new(\n\n include_str!(\"render.frag\"),\n\n concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/examples/quads/render.frag\").into(),\n\n ShaderKind::Fragment,\n\n SourceLanguage::GLSL,\n\n \"main\",\n", "file_path": "rendy/examples/quads/main.rs", "rank": 68, "score": 72535.28141386082 }, { "content": "#[derive(Clone, Copy)]\n\n#[repr(C, align(16))]\n\nstruct UniformArgs {\n\n proj: nalgebra::Matrix4<f32>,\n\n view: nalgebra::Matrix4<f32>,\n\n lights_count: i32,\n\n pad: [i32; 3],\n\n lights: [Light; MAX_LIGHTS],\n\n}\n\n\n", "file_path": "rendy/examples/meshes/main.rs", "rank": 69, "score": 72535.22428154098 }, { "content": "struct ResolvedNodeSet {\n\n nodes: Vec<ResolvedNode>,\n\n queues: Vec<QueueId>,\n\n buffers: Vec<Id>,\n\n images: Vec<Id>,\n\n}\n\n\n", "file_path": "chain/src/collect.rs", "rank": 70, "score": 72530.22213123558 }, { "content": "#[cfg(any(feature = \"dx12\", feature = \"metal\", feature = \"vulkan\"))]\n\nfn run(\n\n event_loop: &mut EventsLoop,\n\n factory: &mut Factory<Backend>,\n\n families: &mut Families<Backend>,\n\n mut graph: Graph<Backend, ()>,\n\n) -> Result<(), failure::Error> {\n\n let started = std::time::Instant::now();\n\n\n\n let mut frames = 0u64..;\n\n let mut elapsed = started.elapsed();\n\n\n\n for _ in &mut frames {\n\n factory.maintain(families);\n\n event_loop.poll_events(|_| ());\n\n graph.run(factory, families, &());\n\n\n\n elapsed = started.elapsed();\n\n if elapsed >= std::time::Duration::new(5, 0) {\n\n break;\n\n }\n", "file_path": "rendy/examples/triangle/main.rs", "rank": 71, "score": 72199.29949737784 }, { "content": "fn subresource_to_range(\n\n sub: &gfx_hal::image::SubresourceLayers,\n\n) -> gfx_hal::image::SubresourceRange {\n\n gfx_hal::image::SubresourceRange {\n\n aspects: sub.aspects,\n\n levels: sub.level..sub.level + 1,\n\n layers: sub.layers.clone(),\n\n }\n\n}\n\n\n\n/// A region to be blitted including the source and destination images and states,\n\n#[derive(Debug, Clone)]\n\npub struct BlitRegion {\n\n /// Region to blit from\n\n pub src: BlitImageState,\n\n /// Region to blit to\n\n pub dst: BlitImageState,\n\n}\n\n\n\nimpl BlitRegion {\n", "file_path": "factory/src/blitter.rs", "rank": 72, "score": 72199.29949737784 }, { "content": "#[cfg(any(feature = \"dx12\", feature = \"metal\", feature = \"vulkan\"))]\n\nfn run(\n\n event_loop: &mut EventsLoop,\n\n factory: &mut Factory<Backend>,\n\n families: &mut Families<Backend>,\n\n window: &Window,\n\n) -> Result<(), failure::Error> {\n\n let mut graph = build_graph(factory, families, window.clone());\n\n\n\n let started = std::time::Instant::now();\n\n\n\n let mut last_window_size = window.get_inner_size();\n\n let mut need_rebuild = false;\n\n\n\n let mut frames = 0u64..;\n\n let mut elapsed = started.elapsed();\n\n\n\n for _ in &mut frames {\n\n factory.maintain(families);\n\n event_loop.poll_events(|_| ());\n\n let new_window_size = window.get_inner_size();\n", "file_path": "rendy/examples/quads/main.rs", "rank": 73, "score": 72199.29949737784 }, { "content": "#[cfg(not(any(feature = \"dx12\", feature = \"metal\", feature = \"vulkan\")))]\n\nfn main() {\n\n panic!(\"Specify feature: { dx12, metal, vulkan }\");\n\n}\n", "file_path": "rendy/examples/triangle/main.rs", "rank": 74, "score": 72199.29949737784 }, { "content": "#[cfg(not(any(feature = \"dx12\", feature = \"metal\", feature = \"vulkan\")))]\n\nfn main() {\n\n panic!(\"Specify feature: { dx12, metal, vulkan }\");\n\n}\n", "file_path": "rendy/examples/meshes/main.rs", "rank": 75, "score": 72199.29949737784 }, { "content": "#[cfg(not(any(feature = \"dx12\", feature = \"metal\", feature = \"vulkan\")))]\n\nfn main() {\n\n panic!(\"Specify feature: { dx12, metal, vulkan }\");\n\n}\n", "file_path": "rendy/examples/sprite/main.rs", "rank": 76, "score": 72199.29949737784 }, { "content": "#[cfg(not(any(feature = \"dx12\", feature = \"metal\", feature = \"vulkan\")))]\n\nfn main() {\n\n panic!(\"Specify feature: { dx12, metal, vulkan }\");\n\n}\n", "file_path": "rendy/examples/init/main.rs", "rank": 77, "score": 72199.29949737784 }, { "content": "fn common_layout(\n\n acc: Option<gfx_hal::image::Layout>,\n\n layout: gfx_hal::image::Layout,\n\n) -> gfx_hal::image::Layout {\n\n match (acc, layout) {\n\n (None, layout) => layout,\n\n (Some(left), right) if left == right => left,\n\n (\n\n Some(gfx_hal::image::Layout::DepthStencilReadOnlyOptimal),\n\n gfx_hal::image::Layout::DepthStencilAttachmentOptimal,\n\n ) => gfx_hal::image::Layout::DepthStencilAttachmentOptimal,\n\n (\n\n Some(gfx_hal::image::Layout::DepthStencilAttachmentOptimal),\n\n gfx_hal::image::Layout::DepthStencilReadOnlyOptimal,\n\n ) => gfx_hal::image::Layout::DepthStencilAttachmentOptimal,\n\n (Some(_), _) => gfx_hal::image::Layout::General,\n\n }\n\n}\n", "file_path": "chain/src/resource.rs", "rank": 78, "score": 72199.29949737784 }, { "content": "fn optimize_submission(\n\n sid: SubmissionId,\n\n found: &mut fnv::FnvHashMap<QueueId, usize>,\n\n sync: &mut SyncTemp,\n\n) {\n\n let mut to_remove = Vec::new();\n\n if let Some(sync_data) = sync.0.get_mut(&sid) {\n\n sync_data\n\n .wait\n\n .sort_unstable_by_key(|wait| (wait.stage(), wait.semaphore().points.end.index()));\n\n sync_data.wait.retain(|wait| {\n\n let start = wait.semaphore().points.start;\n\n if let Some(synched_to) = found.get_mut(&start.queue()) {\n\n if *synched_to >= start.index() {\n\n to_remove.push(wait.semaphore().clone());\n\n return false;\n\n } else {\n\n *synched_to = start.index();\n\n return true;\n\n }\n", "file_path": "chain/src/sync.rs", "rank": 79, "score": 72199.29949737784 }, { "content": "#[cfg(not(any(feature = \"dx12\", feature = \"metal\", feature = \"vulkan\")))]\n\nfn main() {\n\n panic!(\"Specify feature: { dx12, metal, vulkan }\");\n\n}\n", "file_path": "rendy/examples/quads/main.rs", "rank": 80, "score": 72199.29949737784 }, { "content": "#[cfg(any(feature = \"dx12\", feature = \"metal\", feature = \"vulkan\"))]\n\nfn run(\n\n event_loop: &mut EventsLoop,\n\n factory: &mut Factory<Backend>,\n\n families: &mut Families<Backend>,\n\n mut graph: Graph<Backend, ()>,\n\n) -> Result<(), failure::Error> {\n\n let started = std::time::Instant::now();\n\n\n\n std::thread::spawn(move || {\n\n while started.elapsed() < std::time::Duration::new(30, 0) {\n\n std::thread::sleep(std::time::Duration::new(1, 0));\n\n }\n\n\n\n std::process::abort();\n\n });\n\n\n\n let mut frames = 0u64..;\n\n let mut elapsed = started.elapsed();\n\n\n\n for _ in &mut frames {\n", "file_path": "rendy/examples/sprite/main.rs", "rank": 81, "score": 72199.29949737784 }, { "content": "#[derive(Clone, Debug)]\n\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\n\nstruct RawIndices<'a> {\n\n #[cfg_attr(feature = \"serde\", serde(with = \"serde_bytes\", borrow))]\n\n indices: Cow<'a, [u8]>,\n\n index_type: gfx_hal::IndexType,\n\n}\n\n\n", "file_path": "mesh/src/mesh.rs", "rank": 82, "score": 71695.37562673271 }, { "content": "#[derive(Clone, Debug)]\n\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\n\nstruct RawVertices<'a> {\n\n #[cfg_attr(feature = \"serde\", serde(with = \"serde_bytes\", borrow))]\n\n vertices: Cow<'a, [u8]>,\n\n format: VertexFormat,\n\n}\n\n\n", "file_path": "mesh/src/mesh.rs", "rank": 83, "score": 71695.37562673271 }, { "content": "/// raw hal type for vertex attribute\n\ntype AttributeElem = gfx_hal::pso::Element<Format>;\n\n\n\n/// Vertex attribute type.\n\n#[derive(Clone, Debug, Derivative)]\n\n#[derivative(PartialEq, Eq, Hash)]\n\npub struct Attribute {\n\n /// globally unique identifier for attribute's semantic\n\n uuid: AttrUuid,\n\n /// hal type with offset and format\n\n element: AttributeElem,\n\n /// Attribute array index. Matrix attributes are treated like array of vectors.\n\n #[derivative(PartialEq = \"ignore\")]\n\n #[derivative(Hash = \"ignore\")]\n\n index: u8,\n\n /// Attribute name as used in the shader\n\n #[derivative(PartialEq = \"ignore\")]\n\n #[derivative(Hash = \"ignore\")]\n\n name: Cow<'static, str>,\n\n}\n\n\n", "file_path": "util/src/types/vertex.rs", "rank": 84, "score": 71494.95823204632 }, { "content": "/// Interface to create shader modules from shaders.\n\n/// Implemented for static shaders via [`compile_to_spirv!`] macro.\n\n///\n\npub trait Shader {\n\n /// Get spirv bytecode.\n\n fn spirv(&self) -> Result<std::borrow::Cow<'_, [u8]>, failure::Error>;\n\n\n\n /// Get the entry point of the shader.\n\n fn entry(&self) -> &str;\n\n\n\n /// Get the gfx_hal representation of this shaders kind/stage.\n\n fn stage(&self) -> ShaderStageFlags;\n\n\n\n /// Create shader module.\n\n ///\n\n /// Spir-V bytecode must adhere valid usage on this Vulkan spec page:\n\n /// https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/VkShaderModuleCreateInfo.html\n\n unsafe fn module<B>(\n\n &self,\n\n factory: &rendy_factory::Factory<B>,\n\n ) -> Result<B::ShaderModule, failure::Error>\n\n where\n\n B: Backend,\n", "file_path": "shader/src/lib.rs", "rank": 85, "score": 71387.79529109996 }, { "content": "#[cfg(any(feature = \"dx12\", feature = \"metal\", feature = \"vulkan\"))]\n\nfn run(\n\n event_loop: &mut EventsLoop,\n\n factory: &mut Factory<Backend>,\n\n families: &mut Families<Backend>,\n\n mut graph: Graph<Backend, ()>,\n\n) -> Result<(), failure::Error> {\n\n let started = std::time::Instant::now();\n\n\n\n let mut frames = 0u64..;\n\n let mut elapsed = started.elapsed();\n\n\n\n for _ in &mut frames {\n\n factory.maintain(families);\n\n event_loop.poll_events(|_| ());\n\n graph.run(factory, families, &mut ());\n\n\n\n elapsed = started.elapsed();\n\n if elapsed >= std::time::Duration::new(5, 0) {\n\n break;\n\n }\n", "file_path": "rendy/examples/source_shaders/main.rs", "rank": 86, "score": 70775.30658472994 }, { "content": "fn compare_set(\n\n lhv: &[gfx_hal::pso::DescriptorSetLayoutBinding],\n\n rhv: &[gfx_hal::pso::DescriptorSetLayoutBinding],\n\n) -> SetEquality {\n\n // Bindings may not be in order, so we need to make a copy and index them by binding.\n\n let mut lhv_bindings = HashMap::new();\n\n lhv.iter().for_each(|b| {\n\n lhv_bindings.insert(b.binding, b);\n\n });\n\n\n\n let mut rhv_bindings = HashMap::new();\n\n rhv.iter().for_each(|b| {\n\n rhv_bindings.insert(b.binding, b);\n\n });\n\n\n\n let predicate = if lhv.len() == rhv.len() {\n\n SetEquality::Equal\n\n } else if lhv.len() > rhv.len() {\n\n SetEquality::SupersetOf\n\n } else {\n", "file_path": "shader/src/reflect/mod.rs", "rank": 87, "score": 70775.30658472994 }, { "content": "fn load_from_data(\n\n obj_set: obj::ObjSet,\n\n) -> Result<Vec<(MeshBuilder<'static>, Option<String>)>, failure::Error> {\n\n // Takes a list of objects that contain geometries that contain shapes that contain\n\n // vertex/texture/normal indices into the main list of vertices, and converts to\n\n // MeshBuilders with Position, Normal, TexCoord.\n\n trace!(\"Loading mesh\");\n\n let mut objects = vec![];\n\n\n\n for object in obj_set.objects {\n\n for geometry in &object.geometry {\n\n let mut builder = MeshBuilder::new();\n\n\n\n let mut indices = Vec::new();\n\n\n\n geometry.shapes.iter().for_each(|shape| {\n\n if let obj::Primitive::Triangle(v1, v2, v3) = shape.primitive {\n\n indices.push(v1);\n\n indices.push(v2);\n\n indices.push(v3);\n", "file_path": "mesh/src/format/obj.rs", "rank": 88, "score": 70775.30658472994 }, { "content": "#[cfg(any(feature = \"dx12\", feature = \"metal\", feature = \"vulkan\"))]\n\nfn build_graph(\n\n factory: &mut Factory<Backend>,\n\n families: &mut Families<Backend>,\n\n window: &Window,\n\n) -> Graph<Backend, ()> {\n\n let surface = factory.create_surface(window);\n\n\n\n let mut graph_builder = GraphBuilder::<Backend, ()>::new();\n\n\n\n let posvel = graph_builder.create_buffer(QUADS as u64 * std::mem::size_of::<[f32; 4]>() as u64);\n\n\n\n let size = window\n\n .get_inner_size()\n\n .unwrap()\n\n .to_physical(window.get_hidpi_factor());\n\n let window_kind = hal::image::Kind::D2(size.width as u32, size.height as u32, 1, 1);\n\n\n\n let color = graph_builder.create_image(\n\n window_kind,\n\n 1,\n", "file_path": "rendy/examples/quads/main.rs", "rank": 89, "score": 70775.30658472994 }, { "content": "/// Helper function to find buffer with compatible format.\n\nfn find_compatible_buffer(\n\n vertex_layouts: &[VertexBufferLayout],\n\n format: &VertexFormat,\n\n) -> Option<usize> {\n\n debug_assert!(is_slice_sorted(&*format.attributes));\n\n for (i, layout) in vertex_layouts.iter().enumerate() {\n\n debug_assert!(is_slice_sorted(&*layout.format.attributes));\n\n if is_compatible(&layout.format, format) {\n\n return Some(i);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "mesh/src/mesh.rs", "rank": 90, "score": 70775.30658472994 }, { "content": "#[cfg(not(any(feature = \"dx12\", feature = \"metal\", feature = \"vulkan\")))]\n\nfn main() {\n\n panic!(\"Specify feature: { dx12, metal, vulkan }\");\n\n}\n", "file_path": "rendy/examples/source_shaders/main.rs", "rank": 91, "score": 70775.30658472994 }, { "content": "use super::queue::QueueId;\n\nuse crate::Id;\n\n\n\n/// Submission id.\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]\n\npub struct SubmissionId {\n\n /// Queue id of the submission.\n\n pub queue: QueueId,\n\n\n\n /// Index of the queue.\n\n pub index: usize,\n\n}\n\n\n\nimpl SubmissionId {\n\n /// Create new id from queue id and index.\n\n pub fn new(queue: QueueId, index: usize) -> Self {\n\n SubmissionId { queue, index }\n\n }\n\n\n\n /// Get family id.\n", "file_path": "chain/src/schedule/submission.rs", "rank": 97, "score": 43.61942218682145 }, { "content": "use super::submission::{Submission, SubmissionId};\n\n\n\n/// Queue id.\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]\n\npub struct QueueId {\n\n /// Family id of the queue.\n\n pub family: gfx_hal::queue::QueueFamilyId,\n\n\n\n /// Index of the queue.\n\n pub index: usize,\n\n}\n\n\n\nimpl QueueId {\n\n /// Create queue id from family id and index.\n\n pub fn new(family: gfx_hal::queue::QueueFamilyId, index: usize) -> Self {\n\n QueueId {\n\n family: family,\n\n index,\n\n }\n\n }\n", "file_path": "chain/src/schedule/queue.rs", "rank": 99, "score": 40.383088114974484 } ]
Rust
src/rust/storage/seg/src/ttl_buckets/ttl_bucket.rs
wandaitzuchen/pelikan
7421cdedbd5cf5c9814d244ae02eef5ec05904bf
use super::{SEGMENT_CLEAR, SEGMENT_EXPIRE}; use crate::*; use core::num::NonZeroU32; pub struct TtlBucket { head: Option<NonZeroU32>, tail: Option<NonZeroU32>, ttl: i32, nseg: i32, next_to_merge: Option<NonZeroU32>, _pad: [u8; 44], } impl TtlBucket { pub(super) fn new(ttl: i32) -> Self { Self { head: None, tail: None, ttl, nseg: 0, next_to_merge: None, _pad: [0; 44], } } pub fn head(&self) -> Option<NonZeroU32> { self.head } pub fn set_head(&mut self, id: Option<NonZeroU32>) { self.head = id; } pub fn next_to_merge(&self) -> Option<NonZeroU32> { self.next_to_merge } pub fn set_next_to_merge(&mut self, next: Option<NonZeroU32>) { self.next_to_merge = next; } pub(super) fn expire(&mut self, hashtable: &mut HashTable, segments: &mut Segments) -> usize { if self.head.is_none() { return 0; } let mut expired = 0; loop { let seg_id = self.head; if let Some(seg_id) = seg_id { let flush_at = segments.flush_at(); let mut segment = segments.get_mut(seg_id).unwrap(); if segment.create_at() + segment.ttl() <= Instant::recent() || segment.create_at() < flush_at { if let Some(next) = segment.next_seg() { self.head = Some(next); } else { self.head = None; self.tail = None; } let _ = segment.clear(hashtable, true); segments.push_free(seg_id); SEGMENT_EXPIRE.increment(); expired += 1; } else { return expired; } } else { return expired; } } } pub(super) fn clear(&mut self, hashtable: &mut HashTable, segments: &mut Segments) -> usize { if self.head.is_none() { return 0; } let mut cleared = 0; loop { let seg_id = self.head; if let Some(seg_id) = seg_id { let mut segment = segments.get_mut(seg_id).unwrap(); if let Some(next) = segment.next_seg() { self.head = Some(next); } else { self.head = None; self.tail = None; } let _ = segment.clear(hashtable, true); segments.push_free(seg_id); SEGMENT_CLEAR.increment(); cleared += 1; } else { return cleared; } } } fn try_expand(&mut self, segments: &mut Segments) -> Result<(), TtlBucketsError> { if let Some(id) = segments.pop_free() { { if let Some(tail_id) = self.tail { let mut tail = segments.get_mut(tail_id).unwrap(); tail.set_next_seg(Some(id)); } } let mut segment = segments.get_mut(id).unwrap(); segment.set_prev_seg(self.tail); segment.set_next_seg(None); segment.set_ttl(Duration::from_secs(self.ttl as u32)); if self.head.is_none() { debug_assert!(self.tail.is_none()); self.head = Some(id); } self.tail = Some(id); self.nseg += 1; debug_assert!(!segment.evictable(), "segment should not be evictable"); segment.set_evictable(true); segment.set_accessible(true); Ok(()) } else { Err(TtlBucketsError::NoFreeSegments) } } pub(crate) fn reserve( &mut self, size: usize, segments: &mut Segments, ) -> Result<ReservedItem, TtlBucketsError> { trace!("reserving: {} bytes for ttl: {}", size, self.ttl); let seg_size = segments.segment_size() as usize; if size > seg_size { debug!("item is oversized"); return Err(TtlBucketsError::ItemOversized { size }); } loop { if let Some(id) = self.tail { if let Ok(mut segment) = segments.get_mut(id) { if !segment.accessible() { continue; } let offset = segment.write_offset() as usize; trace!("offset: {}", offset); if offset + size <= seg_size { let size = size as i32; let item = segment.alloc_item(size); return Ok(ReservedItem::new(item, segment.id(), offset)); } } } self.try_expand(segments)?; } } }
use super::{SEGMENT_CLEAR, SEGMENT_EXPIRE}; use crate::*; use core::num::NonZeroU32; pub struct TtlBucket { head: Option<NonZeroU32>, tail: Option<NonZeroU32>, ttl: i32, nseg: i32, next_to_merge: Option<NonZeroU32>, _pad: [u8; 44], } impl TtlBucket { pub(super) fn new(ttl: i32) -> Self { Self { head: None, tail: None, ttl, nseg: 0, next_to_merge: None, _pad: [0; 44], } } pub fn head(&self) -> Option<NonZeroU32> { self.head } pub fn set_head(&mut self, id: Option<NonZeroU32>) { self.head = id; } pub fn next_to_merge(&self) -> Option<NonZeroU32> { self.next_to_merge } pub fn set_next_to_merge(&mut self, next: Option<NonZeroU32>) { self.next_to_merge = next; } pub(super) fn expire(&mut self, hashtable: &mut HashTable, segments: &mut Segments) -> usize { if self.head.is_none() { return 0; } let mut expired = 0; loop { let seg_id = self.head; if let Some(seg_id) = seg_id { let flush_at = segments.flush_at(); let mut segment = segments.get_mut(seg_id).unwrap(); if segment.create_at() + segment.ttl() <= Instant::recent() || segment.create_at() < flush_at { if let Some(next) = segment.next_seg() { self.head = Some(next); } else {
ired += 1; } else { return expired; } } else { return expired; } } } pub(super) fn clear(&mut self, hashtable: &mut HashTable, segments: &mut Segments) -> usize { if self.head.is_none() { return 0; } let mut cleared = 0; loop { let seg_id = self.head; if let Some(seg_id) = seg_id { let mut segment = segments.get_mut(seg_id).unwrap(); if let Some(next) = segment.next_seg() { self.head = Some(next); } else { self.head = None; self.tail = None; } let _ = segment.clear(hashtable, true); segments.push_free(seg_id); SEGMENT_CLEAR.increment(); cleared += 1; } else { return cleared; } } } fn try_expand(&mut self, segments: &mut Segments) -> Result<(), TtlBucketsError> { if let Some(id) = segments.pop_free() { { if let Some(tail_id) = self.tail { let mut tail = segments.get_mut(tail_id).unwrap(); tail.set_next_seg(Some(id)); } } let mut segment = segments.get_mut(id).unwrap(); segment.set_prev_seg(self.tail); segment.set_next_seg(None); segment.set_ttl(Duration::from_secs(self.ttl as u32)); if self.head.is_none() { debug_assert!(self.tail.is_none()); self.head = Some(id); } self.tail = Some(id); self.nseg += 1; debug_assert!(!segment.evictable(), "segment should not be evictable"); segment.set_evictable(true); segment.set_accessible(true); Ok(()) } else { Err(TtlBucketsError::NoFreeSegments) } } pub(crate) fn reserve( &mut self, size: usize, segments: &mut Segments, ) -> Result<ReservedItem, TtlBucketsError> { trace!("reserving: {} bytes for ttl: {}", size, self.ttl); let seg_size = segments.segment_size() as usize; if size > seg_size { debug!("item is oversized"); return Err(TtlBucketsError::ItemOversized { size }); } loop { if let Some(id) = self.tail { if let Ok(mut segment) = segments.get_mut(id) { if !segment.accessible() { continue; } let offset = segment.write_offset() as usize; trace!("offset: {}", offset); if offset + size <= seg_size { let size = size as i32; let item = segment.alloc_item(size); return Ok(ReservedItem::new(item, segment.id(), offset)); } } } self.try_expand(segments)?; } } }
self.head = None; self.tail = None; } let _ = segment.clear(hashtable, true); segments.push_free(seg_id); SEGMENT_EXPIRE.increment(); exp
random
[ { "content": "#[inline]\n\nfn copy_slice(dst: &mut [u8], src: &[u8]) -> usize {\n\n let n = cmp::min(dst.len(), src.len());\n\n dst[0..n].copy_from_slice(&src[0..n]);\n\n n\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use tempfile::NamedTempFile;\n\n\n\n fn kvs() -> Vec<(String, String)> {\n\n vec![\n\n (\"abc\", \"def\"),\n\n (\"pink\", \"red\"),\n\n (\"apple\", \"grape\"),\n\n (\"q\", \"burp\"),\n\n ]\n\n .iter()\n\n .map(|(k, v)| (k.to_string(), v.to_string()))\n", "file_path": "src/storage/cdb/cdb_rs/src/cdb/mod.rs", "rank": 0, "score": 219008.64793613274 }, { "content": "/// Trim starting and ending spaces off of a byte slice\n\nfn trim_bytes(mut slice: &[u8]) -> &[u8] {\n\n while is_space(slice.first().copied()) {\n\n slice = slice.split_first().unwrap().1;\n\n }\n\n\n\n while is_space(slice.last().copied()) {\n\n slice = slice.split_last().unwrap().1\n\n }\n\n\n\n slice\n\n}\n\n\n", "file_path": "deps/ccommon/rust/ccommon-backend/src/option/parse.rs", "rank": 1, "score": 198625.81972015614 }, { "content": "fn segment_size() -> i32 {\n\n SEGMENT_SIZE\n\n}\n\n\n", "file_path": "src/rust/config/src/seg.rs", "rank": 2, "score": 195359.39650797506 }, { "content": "pub fn readybuf(size: usize) -> BytesMut {\n\n let mut b = BytesMut::with_capacity(size);\n\n b.resize(size, 0);\n\n b\n\n}\n\n\n\nimpl SliceFactory {\n\n pub fn load(path: &str) -> Result<SliceFactory> {\n\n let mut f = File::open(path)?;\n\n let mut buffer = Vec::new();\n\n f.read_to_end(&mut buffer)?;\n\n Ok(SliceFactory::HeapStorage(HeapWrap(Bytes::from(buffer))))\n\n }\n\n\n\n pub fn make_map(path: &str) -> Result<SliceFactory> {\n\n let f = File::open(path)?;\n\n let mmap: Mmap = unsafe { MmapOptions::new().map(&f)? };\n\n\n\n let mut buf = [0u8; BUF_LEN];\n\n let mut count = 0;\n", "file_path": "src/storage/cdb/cdb_rs/src/cdb/storage.rs", "rank": 3, "score": 193967.9182434666 }, { "content": "// A very fast PRNG which is appropriate for testing\n\npub fn rng() -> impl RngCore {\n\n rand_xoshiro::Xoshiro256PlusPlus::seed_from_u64(0)\n\n}\n\n\n", "file_path": "src/rust/storage/seg/benches/benchmark.rs", "rank": 4, "score": 189389.95385749516 }, { "content": "fn put(buf: &mut Vec<u8>, flags: u32, path: &[u8], mut body: &[u8]) -> Result<(), Error> {\n\n let mut builder = request::put(buf, Uri::new(path))?;\n\n \n\n builder.header(\"Content-Length\", body.len())?;\n\n builder.header(\"Flags\", flags)?;\n\n builder.body(&mut body)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "test/server/twemcache-http/src/lib.rs", "rank": 5, "score": 186537.7713278846 }, { "content": "pub fn benches(c: &mut Criterion) {\n\n c.bench_function(\"build_req_long\", build_req_long);\n\n c.bench_function(\"build_req_long_unsafe\", build_req_long_unsafe);\n\n c.bench_function(\"build_req_short\", build_req_short);\n\n c.bench_function(\"build_req_short_unsafe\", build_req_short_unsafe);\n\n}\n\n\n\ncriterion_group!(group, benches);\n\ncriterion_main!(group);\n", "file_path": "src/rust-util/httpencode/benches/compose.rs", "rank": 6, "score": 185583.18721763755 }, { "content": "/// Load a single option from a byte string.\n\npub fn option_set<'a>(option: &mut option, value: &'a [u8]) -> Result<(), ParseError<'a>> {\n\n let value = parse_value(value, 0, option.type_)?;\n\n\n\n unsafe { set_option_value(option, value) }\n\n}\n\n\n", "file_path": "deps/ccommon/rust/ccommon-backend/src/option/parse.rs", "rank": 7, "score": 179760.63147956354 }, { "content": "pub fn opt_to_null_mut<T>(o: Option<*mut T>) -> *mut T {\n\n match o {\n\n Some(p) => p,\n\n None => ptr::null_mut(),\n\n }\n\n}\n", "file_path": "deps/ccommon/rust/ccommon-rs/src/ptrs.rs", "rank": 8, "score": 178405.1432352485 }, { "content": "fn get(buf: &mut Vec<u8>, path: &[u8]) -> Result<(), Error> {\n\n let mut builder = request::get(buf, Uri::new(path))?;\n\n\n\n builder.header(\"Content-Length\", \"0\")?;\n\n builder.finish()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "test/server/twemcache-http/src/lib.rs", "rank": 9, "score": 175860.5761091654 }, { "content": "fn delete(buf: &mut Vec<u8>, path: &[u8]) -> Result<(), Error> {\n\n let mut builder = request::delete(buf, Uri::new(path))?;\n\n\n\n builder.header(\"CoNtEnT-LeNgTh\", \"0\")?;\n\n builder.finish()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "test/server/twemcache-http/src/lib.rs", "rank": 10, "score": 175860.5761091654 }, { "content": "pub fn lift_to_option<T>(p: *mut T) -> Option<*mut T> {\n\n if p.is_null() {\n\n None\n\n } else {\n\n Some(p)\n\n }\n\n}\n\n\n", "file_path": "deps/ccommon/rust/ccommon-rs/src/ptrs.rs", "rank": 11, "score": 173102.08633288948 }, { "content": "pub fn head<B: BufMut>(buf: B, path: Uri) -> Result<HttpBuilder<B>> {\n\n HttpBuilder::request(buf, Method::Head, Version::Http11, path)\n\n}\n\n\n", "file_path": "src/rust-util/httpencode/src/request.rs", "rank": 12, "score": 172010.56767705825 }, { "content": "pub fn size_of(value: &Value) -> usize {\n\n match value {\n\n Value::Bytes(v) => v.len(),\n\n Value::U64(_) => core::mem::size_of::<u64>(),\n\n }\n\n}\n", "file_path": "src/rust/storage/seg/src/item/mod.rs", "rank": 13, "score": 160612.89421086863 }, { "content": "pub fn null_check<T>(p: *mut T) -> result::Result<*mut T, NullPointerError> {\n\n lift_to_option(p).ok_or_else(|| NullPointerError)\n\n}\n\n\n", "file_path": "deps/ccommon/rust/ccommon-rs/src/ptrs.rs", "rank": 14, "score": 157324.77838966792 }, { "content": "fn parse_digits(buf: &[u8]) -> Result<usize> {\n\n let s = str::from_utf8(&buf)?;\n\n s.parse::<usize>().map_err(|e| e.into())\n\n}\n\n\n\nconst ARROW_BYTES: &[u8; 2] = b\"->\";\n\n\n", "file_path": "src/storage/cdb/cdb_rs/src/cdb/input.rs", "rank": 15, "score": 154440.9352335621 }, { "content": "pub fn load_bytes_at_path(path: &str) -> Result<Box<[u8]>> {\n\n let mut f = File::open(path)?;\n\n let mut buffer = Vec::with_capacity(f.metadata()?.len() as usize);\n\n f.read_to_end(&mut buffer)?;\n\n Ok(buffer.into_boxed_slice())\n\n}\n\n\n\nimpl<'a> Reader<'a> {\n\n pub fn new<T: AsRef<[u8]>>(r: &'a T) -> Reader<'a> {\n\n Reader(r.as_ref())\n\n }\n\n\n\n // TODO: perform basic sanity checks of data (gee i wish there was a checksum\n\n // somwhere in this data format HINT HINT)\n\n //\n\n // * does the secondary index fall safely in the correct index range?\n\n // * do data pointers point at the data segment?\n\n\n\n #[inline]\n\n fn bucket_at(&self, idx: usize) -> Result<Bucket> {\n", "file_path": "src/storage/cdb/cdb_rs/src/cdb/mod.rs", "rank": 16, "score": 153787.65156644836 }, { "content": "pub fn init() {\n\n PID.set(std::process::id().into());\n\n}\n", "file_path": "src/rust/metrics/src/lib.rs", "rank": 17, "score": 151529.31409976818 }, { "content": "// helper functions for default values\n\nfn hash_power() -> u8 {\n\n HASH_POWER\n\n}\n\n\n", "file_path": "src/rust/config/src/seg.rs", "rank": 18, "score": 148795.84047733556 }, { "content": "pub fn tests() {\n\n debug!(\"beginning tests\");\n\n println!();\n\n\n\n test(\n\n \"cas not found (key: 0)\",\n\n &[(\"cas 0 0 0 1 1\\r\\n0\\r\\n\", Some(\"NOT_FOUND\\r\\n\"))],\n\n );\n\n test(\"get empty (key: 0)\", &[(\"get 0\\r\\n\", Some(\"END\\r\\n\"))]);\n\n test(\"gets empty (key: 0)\", &[(\"gets 0\\r\\n\", Some(\"END\\r\\n\"))]);\n\n test(\n\n \"cas not found (key: 0)\",\n\n &[(\"cas 0 0 0 1 0\\r\\n0\\r\\n\", Some(\"NOT_FOUND\\r\\n\"))],\n\n );\n\n test(\n\n \"set value (key: 0)\",\n\n &[(\"set 0 0 0 1\\r\\n1\\r\\n\", Some(\"STORED\\r\\n\"))],\n\n );\n\n test(\n\n \"get value (key: 0)\",\n", "file_path": "src/rust/server/segcache/tests/common.rs", "rank": 19, "score": 148720.660028336 }, { "content": "pub fn main() {\n\n let bindir = match env::args_os().skip(1).next() {\n\n Some(dir) => PathBuf::from(dir),\n\n None => {\n\n eprintln!(\"Usage: test-pingserver-rs <CMAKE_BINARY_DIR>\");\n\n panic!();\n\n }\n\n };\n\n\n\n let mut server = Command::new(bindir.join(\"_bin/pelikan_twemcache_http\"))\n\n .spawn()\n\n .expect(\"Failed to start server process\");\n\n\n\n // Give the server time to start up\n\n std::thread::sleep(Duration::from_millis(1000));\n\n\n\n let res = catch_unwind(|| run_tests());\n\n\n\n // Give the server some time to crash if any of the test\n\n // would cause that to happen.\n\n std::thread::sleep(Duration::from_millis(100));\n\n\n\n server.kill().expect(\"Server died unexpectedly\");\n\n\n\n match res {\n\n Err(e) => resume_unwind(e),\n\n _ => (),\n\n }\n\n}\n\n\n", "file_path": "test/server/twemcache-http/src/lib.rs", "rank": 20, "score": 148720.660028336 }, { "content": "pub fn klog_format(\n\n w: &mut dyn std::io::Write,\n\n now: DateTime,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n writeln!(\n\n w,\n\n \"{} {}\",\n\n now.to_rfc3339_opts(SecondsFormat::Millis, false),\n\n record.args()\n\n )\n\n}\n", "file_path": "src/rust/logger/src/format.rs", "rank": 21, "score": 148720.660028336 }, { "content": "pub fn default_format(\n\n w: &mut dyn std::io::Write,\n\n now: DateTime,\n\n record: &Record,\n\n) -> Result<(), std::io::Error> {\n\n writeln!(\n\n w,\n\n \"{} {} [{}] {}\",\n\n now.to_rfc3339_opts(SecondsFormat::Millis, false),\n\n record.level(),\n\n record.module_path().unwrap_or(\"<unnamed>\"),\n\n record.args()\n\n )\n\n}\n\n\n", "file_path": "src/rust/logger/src/format.rs", "rank": 22, "score": 148720.660028336 }, { "content": "fn compare_insensitive(a: &[u8], b: &[u8]) -> bool {\n\n if a.len() != b.len() {\n\n return false;\n\n }\n\n\n\n a.iter().copied().map(|c| c.to_ascii_lowercase())\n\n .zip(b.iter().copied().map(|c| c.to_ascii_lowercase()))\n\n .all(|(a, b)| a == b)\n\n}\n", "file_path": "test/server/twemcache-http/src/lib.rs", "rank": 23, "score": 142568.77200953406 }, { "content": " delta_time_i ttl; /* the min ttl of this bucket */\n", "file_path": "src/storage/seg/ttlbucket.h", "rank": 24, "score": 140661.67771209544 }, { "content": "fn compare_ascii_lowercase(a: &[u8], b: &[u8]) -> bool {\n\n if a.len() != b.len() {\n\n return false;\n\n }\n\n\n\n a.iter()\n\n .copied()\n\n .map(|x| x.to_ascii_lowercase())\n\n .zip(b.iter().copied().map(|x| x.to_ascii_lowercase()))\n\n .all(|(a, b)| a == b)\n\n}\n\n\n", "file_path": "src/server/twemcache-http/src/http.rs", "rank": 25, "score": 140441.0313131805 }, { "content": "/// Set the global panic handler to one which logs the\n\n/// panic at `LOG_CRIT` level before calling the original\n\n/// panic hook.\n\npub fn set_panic_handler() {\n\n use ccommon_sys::LOG_CRIT;\n\n\n\n use std::env;\n\n use std::io::Cursor;\n\n\n\n // After logging the message we want to call whatever existing\n\n // panic hook was in place. In most cases this should be the\n\n // default hook.\n\n let old_hook = panic::take_hook();\n\n\n\n panic::set_hook(Box::new(move |info: &PanicInfo| {\n\n let mut buffer = [0u8; LOG_MAX_LEN as usize + 1];\n\n let ptr = ccommon_dlog().load(Ordering::Relaxed);\n\n\n\n let msg = if let Some(s) = info.payload().downcast_ref::<&str>() {\n\n *s\n\n } else if let Some(s) = info.payload().downcast_ref::<String>() {\n\n &s[..]\n\n } else {\n", "file_path": "deps/ccommon/rust/ccommon-rs/src/log/shim.rs", "rank": 26, "score": 138962.47900680924 }, { "content": "fn ready_buf(size: usize) -> BytesMut {\n\n let mut b = BytesMut::with_capacity(size);\n\n b.reserve(size);\n\n b\n\n}\n\n\n\npub struct Writer<'a, F>\n\nwhere\n\n F: Write + Seek + 'a,\n\n{\n\n file: &'a mut F,\n\n index: Vec<Vec<IndexEntry>>,\n\n}\n\n\n\nimpl<'a, F> Writer<'a, F>\n\nwhere\n\n F: Write + Seek + 'a,\n\n{\n\n pub fn new(file: &'a mut F) -> Result<Writer<'a, F>> {\n\n file.seek(SeekFrom::Start(0))?;\n", "file_path": "src/storage/cdb/cdb_rs/src/cdb/mod.rs", "rank": 27, "score": 138821.5199208277 }, { "content": "fn reverse_in_place<T>(mut slice: &mut [T]) {\n\n while slice.len() > 1 {\n\n let (first, rest) = slice.split_first_mut().unwrap();\n\n let (last, rest) = rest.split_last_mut().unwrap();\n\n slice = rest;\n\n\n\n std::mem::swap(first, last);\n\n }\n\n}\n\n\n\nmacro_rules! impl_unsigned {\n\n ( $( $ty:ident ),* ) => {\n\n $(\n\n impl HeaderValue for $ty {\n\n fn put<B: BufMut>(&self, buf: &mut B) -> Result<(), OutOfBufferError> {\n\n let mut digits = ArrayVec::<[u8; base10_digits::<$ty>(false)]>::new();\n\n let mut value = *self;\n\n\n\n if value == 0 {\n\n digits.push(b'0');\n", "file_path": "src/rust-util/httpencode/src/traits.rs", "rank": 28, "score": 138405.05606643154 }, { "content": "// Write out a string and percent-escape any invalid characters within.\n\nfn write_percent_escaped<B, F>(buf: &mut B, path: &[u8], is_valid: F) -> Result<()>\n\nwhere\n\n B: BufMut,\n\n F: Fn(u8) -> bool,\n\n{\n\n fn next_invalid<F: Fn(u8) -> bool>(bytes: &[u8], is_valid: &F) -> Option<(usize, u8)> {\n\n for i in 0..bytes.len() {\n\n let b = unsafe { *bytes.get_unchecked(i) };\n\n\n\n if !is_valid(b) {\n\n return Some((i, b));\n\n }\n\n }\n\n\n\n None\n\n }\n\n\n\n fn hex_encode(byte: u8) -> u8 {\n\n let byte = byte & 0xF;\n\n match byte {\n", "file_path": "src/rust-util/httpencode/src/util.rs", "rank": 29, "score": 136824.2387126423 }, { "content": "/// Logs GET or GETS\n\nfn klog_get(command: &MemcacheCommand, key: &[u8], response_len: usize) {\n\n if response_len == 0 {\n\n klog!(\"\\\"{} {}\\\" 0 {}\", command, string_key(Ok(key)), response_len);\n\n } else {\n\n klog!(\"\\\"{} {}\\\" 4 {}\", command, string_key(Ok(key)), response_len);\n\n }\n\n}\n\n\n", "file_path": "src/rust/protocol/src/memcache/wire/response/mod.rs", "rank": 30, "score": 136758.8596230761 }, { "content": " proc_time_i next_expiration_sec;\n", "file_path": "src/storage/seg/ttlbucket.h", "rank": 31, "score": 135763.611083229 }, { "content": "fn setup(opts: &mut Options, metrics: &mut Metrics) {\n\n use crate::memcached::sys::process_setup;\n\n use ccommon_sys::*;\n\n use pelikan_sys::{\n\n hotkey::hotkey_setup, protocol::memcache::*, storage::slab::*, time::time_setup,\n\n util::procinfo_setup,\n\n };\n\n use std::ptr::null_mut;\n\n\n\n unsafe {\n\n log_setup(&mut metrics.log);\n\n if debug_setup(&mut opts.debug) < 0 {\n\n panic!(\"debug log setup failed\");\n\n }\n\n\n\n // Setup top-level application options\n\n if opts.server.daemonize.value() {\n\n pelikan_sys::util::daemonize();\n\n }\n\n\n", "file_path": "src/server/twemcache-http/src/main.rs", "rank": 32, "score": 134658.94085273566 }, { "content": "fn set(c: &mut Criterion) {\n\n let parser = MemcacheRequestParser::new(MAX_VALUE_SIZE, TimeType::Memcache);\n\n\n\n let mut group = c.benchmark_group(\"set\");\n\n group.measurement_time(Duration::from_secs(DURATION));\n\n group.throughput(Throughput::Elements(1));\n\n\n\n let mut buffer = Vec::new();\n\n buffer.extend_from_slice(b\"set 0 0 0 1\\r\\n1\\r\\n\");\n\n buffer.resize_with(BUFFER_SIZE, Default::default);\n\n\n\n group.bench_function(\"1b/1b\", |b| {\n\n b.iter(|| {\n\n let _ = parser.parse(&buffer);\n\n })\n\n });\n\n\n\n let mut buffer = Vec::new();\n\n buffer.extend_from_slice(b\"set 0 0 0 10\\r\\n0123456789\\r\\n\");\n\n buffer.resize_with(BUFFER_SIZE, Default::default);\n", "file_path": "src/rust/protocol/benches/memcache.rs", "rank": 33, "score": 133642.68979818147 }, { "content": "fn get(c: &mut Criterion) {\n\n let parser = MemcacheRequestParser::new(MAX_VALUE_SIZE, TimeType::Memcache);\n\n\n\n let mut group = c.benchmark_group(\"get\");\n\n group.measurement_time(Duration::from_secs(DURATION));\n\n group.throughput(Throughput::Elements(1));\n\n\n\n let mut buffer = Vec::new();\n\n buffer.extend_from_slice(b\"get 0\\r\\n\");\n\n buffer.resize_with(BUFFER_SIZE, Default::default);\n\n\n\n group.bench_function(\"1b\", |b| {\n\n b.iter(|| {\n\n let _ = parser.parse(&buffer);\n\n })\n\n });\n\n}\n\n\n", "file_path": "src/rust/protocol/benches/memcache.rs", "rank": 34, "score": 133642.68979818147 }, { "content": "fn validate_key(key: &[u8]) {\n\n if key.is_empty() {\n\n panic!(\"key is zero-length\");\n\n }\n\n if key.len() > MAX_KEY_LEN {\n\n panic!(\"key is too long\");\n\n }\n\n if key.windows(1).any(|w| w == b\" \") {\n\n panic!(\"key contains SPACE: {:?}\", key);\n\n }\n\n if key.windows(2).any(|w| w == b\"\\r\\n\") {\n\n panic!(\"key contains CRLF: {:?}\", key);\n\n }\n\n}\n", "file_path": "src/rust/protocol/fuzz/fuzz_targets/memcache.rs", "rank": 35, "score": 132513.55084204755 }, { "content": "#[test]\n\nfn expiration() {\n\n let segments = 64;\n\n let segment_size = 2 * 1024;\n\n let heap_size = segments * segment_size as usize;\n\n\n\n let mut cache = Seg::builder()\n\n .segment_size(segment_size)\n\n .heap_size(heap_size)\n\n .hash_power(16)\n\n .build();\n\n\n\n assert_eq!(cache.items(), 0);\n\n assert_eq!(cache.segments.free(), segments);\n\n\n\n assert!(cache\n\n .insert(b\"latte\", b\"\", None, Duration::from_secs(5))\n\n .is_ok());\n\n assert!(cache\n\n .insert(b\"espresso\", b\"\", None, Duration::from_secs(15))\n\n .is_ok());\n", "file_path": "src/rust/storage/seg/src/tests.rs", "rank": 36, "score": 129887.44349830484 }, { "content": "fn order_ascii_lowercase(a: &[u8], b: &[u8]) -> std::cmp::Ordering {\n\n use std::cmp::Ordering;\n\n\n\n match a.len().cmp(&b.len()) {\n\n Ordering::Equal => (),\n\n x => return x,\n\n }\n\n\n\n for (a, b) in a.iter().zip(b.iter()) {\n\n let a = a.to_ascii_lowercase();\n\n let b = b.to_ascii_lowercase();\n\n\n\n match a.cmp(&b) {\n\n Ordering::Equal => (),\n\n x => return x,\n\n }\n\n }\n\n\n\n Ordering::Equal\n\n}\n", "file_path": "src/server/twemcache-http/src/http.rs", "rank": 37, "score": 129569.31851628289 }, { "content": "struct EscapedByteString<'b>(&'b [u8]);\n\n\n\nimpl fmt::Display for EscapedByteString<'_> {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n let iter = self\n\n .0\n\n .iter()\n\n .cloned()\n\n .flat_map(|x| std::ascii::escape_default(x))\n\n .map(|x| x as char);\n\n\n\n fmt.write_str(\"\\\"\")?;\n\n for c in iter {\n\n fmt.write_fmt(format_args!(\"{}\", c))?;\n\n }\n\n fmt.write_str(\"\\\"\")?;\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "test/server/twemcache-http/src/lib.rs", "rank": 38, "score": 128952.88048717414 }, { "content": "fn get_benchmark(c: &mut Criterion) {\n\n // use the default config\n\n let config = SegcacheConfig::default();\n\n\n\n // launch the server\n\n let server = Segcache::new(config);\n\n\n\n // wait for server to startup. duration is chosen to be longer than we'd\n\n // expect startup to take in a slow ci environment.\n\n std::thread::sleep(Duration::from_secs(10));\n\n\n\n // connect and initialize an empty buffer\n\n let mut stream = TcpStream::connect(\"127.0.0.1:12321\").expect(\"failed to connect\");\n\n let mut buffer = vec![0; 1024 * 1024];\n\n\n\n // define a benchmarking group\n\n let mut group = c.benchmark_group(\"request\");\n\n group.throughput(Throughput::Elements(1));\n\n\n\n // benchmark for a few key lengths\n", "file_path": "src/rust/server/segcache/benches/benchmark.rs", "rank": 39, "score": 128490.9364147259 }, { "content": "fn set_benchmark(c: &mut Criterion) {\n\n let ttl = Duration::ZERO;\n\n let mut group = c.benchmark_group(\"set\");\n\n group.measurement_time(Duration::from_secs(30));\n\n group.throughput(Throughput::Elements(1));\n\n\n\n for key_size in [1, 255].iter() {\n\n for value_size in [1, 64, 1024, 16384].iter() {\n\n let (keys, values) = key_values(*key_size, 1_000_000, *value_size, 10_000);\n\n\n\n // launch the server\n\n let mut cache = Seg::builder()\n\n .hash_power(16)\n\n .heap_size(64 * MB)\n\n .segment_size(MB as i32)\n\n .build();\n\n\n\n let mut key = 0;\n\n let mut value = 0;\n\n\n", "file_path": "src/rust/storage/seg/benches/benchmark.rs", "rank": 40, "score": 128490.9364147259 }, { "content": "fn get_benchmark(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"get\");\n\n group.measurement_time(Duration::from_secs(30));\n\n group.throughput(Throughput::Elements(1));\n\n\n\n for key_size in [1, 255].iter() {\n\n let (keys, _values) = key_values(*key_size, 1_000_000, 0, 0);\n\n\n\n // launch the server\n\n let mut cache = Seg::builder()\n\n .hash_power(16)\n\n .heap_size(64 * MB)\n\n .segment_size(MB as i32)\n\n .build();\n\n\n\n let mut key = 0;\n\n\n\n group.bench_function(&format!(\"{}b/0b\", key_size), |b| {\n\n b.iter(|| {\n\n cache.get(&keys[key]);\n\n key += 1;\n\n if key >= keys.len() {\n\n key = 0;\n\n }\n\n })\n\n });\n\n }\n\n}\n\n\n", "file_path": "src/rust/storage/seg/benches/benchmark.rs", "rank": 41, "score": 128490.9364147259 }, { "content": "fn ping_benchmark(c: &mut Criterion) {\n\n let config = PingserverConfig::default();\n\n\n\n // launch the server\n\n let server = Pingserver::new(config);\n\n\n\n // wait for server to startup. duration is chosen to be longer than we'd\n\n // expect startup to take in a slow ci environment.\n\n std::thread::sleep(Duration::from_secs(10));\n\n\n\n // connect and initialize an empty buffer\n\n let mut stream = TcpStream::connect(\"127.0.0.1:12321\").expect(\"failed to connect\");\n\n let mut buffer = vec![0; 1024 * 1024];\n\n\n\n // define a benchmarking group\n\n let mut group = c.benchmark_group(\"request\");\n\n group.throughput(Throughput::Elements(1));\n\n\n\n let msg = b\"ping\\r\\n\";\n\n let bench_name = \"ping\";\n", "file_path": "src/rust/server/pingserver/benches/benchmark.rs", "rank": 42, "score": 128490.9364147259 }, { "content": "/// Given an AdminHandler and a DataProcessor start up a server\n\npub fn core_run_tcp<W, H>(\n\n admin_opts: &AdminOptions,\n\n server_opts: &ListenerOptions,\n\n metrics: &'static CoreMetrics,\n\n admin_handler: H,\n\n worker: W,\n\n) -> IOResult<()>\n\nwhere\n\n W: Worker + Unpin + 'static,\n\n H: AdminHandler + Send + 'static,\n\n{\n\n use tokio::sync::mpsc::channel;\n\n\n\n let admin_addr = admin_opts.addr().expect(\"Invalid socket address\");\n\n let server_addr = server_opts.addr().expect(\"Invalid socket address\");\n\n\n\n let dlog_intvl = admin_opts.dlog_intvl();\n\n\n\n let (send, recv) = channel(1024);\n\n\n", "file_path": "src/rustcore/src/lib.rs", "rank": 43, "score": 128428.29884310022 }, { "content": "pub fn delete<B: BufMut>(buf: B, path: Uri) -> Result<HttpBuilder<B>> {\n\n HttpBuilder::request(buf, Method::Delete, Version::Http11, path)\n\n}\n\n\n", "file_path": "src/rust-util/httpencode/src/request.rs", "rank": 44, "score": 126265.67839864697 }, { "content": "pub fn options<B: BufMut>(buf: B, path: Uri) -> Result<HttpBuilder<B>> {\n\n HttpBuilder::request(buf, Method::Options, Version::Http11, path)\n\n}\n\n\n", "file_path": "src/rust-util/httpencode/src/request.rs", "rank": 45, "score": 126265.67839864697 }, { "content": "pub fn patch<B: BufMut>(buf: B, path: Uri) -> Result<HttpBuilder<B>> {\n\n HttpBuilder::request(buf, Method::Patch, Version::Http11, path)\n\n}\n\n\n", "file_path": "src/rust-util/httpencode/src/request.rs", "rank": 46, "score": 126265.67839864697 }, { "content": "pub fn put<B: BufMut>(buf: B, path: Uri) -> Result<HttpBuilder<B>> {\n\n HttpBuilder::request(buf, Method::Put, Version::Http11, path)\n\n}\n\n\n", "file_path": "src/rust-util/httpencode/src/request.rs", "rank": 47, "score": 126265.67839864697 }, { "content": "pub fn trace<B: BufMut>(buf: B, path: Uri) -> Result<HttpBuilder<B>> {\n\n HttpBuilder::request(buf, Method::Trace, Version::Http11, path)\n\n}\n", "file_path": "src/rust-util/httpencode/src/request.rs", "rank": 48, "score": 126265.67839864697 }, { "content": "pub fn post<B: BufMut>(buf: B, path: Uri) -> Result<HttpBuilder<B>> {\n\n HttpBuilder::request(buf, Method::Post, Version::Http11, path)\n\n}\n\n\n", "file_path": "src/rust-util/httpencode/src/request.rs", "rank": 49, "score": 126265.67839864697 }, { "content": "pub fn get<B: BufMut>(buf: B, path: Uri) -> Result<HttpBuilder<B>> {\n\n HttpBuilder::request(buf, Method::Get, Version::Http11, path)\n\n}\n\n\n", "file_path": "src/rust-util/httpencode/src/request.rs", "rank": 50, "score": 126265.67839864697 }, { "content": "fn build_req_long(b: &mut Bencher) {\n\n let mut buf = Vec::new();\n\n buf.reserve(1 << 14);\n\n\n\n b.iter(|| -> Result<_, _> {\n\n buf.clear();\n\n\n\n let mut req = HttpBuilder::request(\n\n &mut buf,\n\n Method::Get,\n\n Version::Http11,\n\n Uri::new(b\"/wp-content/uploads/2010/03/hello-kitty-darth-vader-pink.jpg\")\n\n )?;\n\n\n\n req.header(\"Host\", \"www.kittyhell.com\")?;\n\n req.header(\"User-Agent\", \"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; ja-JP-mac; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 Pathtraq/0.9\")?;\n\n req.header(\"Accept\", \"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\")?;\n\n req.header(\"Accept-Language\", \"ja,en-us;q=0.7,en;q=0.3\")?;\n\n req.header(\"Accept-Encoding\", \"gzip,deflate\")?;\n\n req.header(\"Accept-Charset\", \"Shift_JIS,utf-8;q=0.7,*;q=0.7\")?;\n\n req.header(\"Keep-Alive\", \"115\")?;\n\n req.header(\"Connection\", \"keep-alive\")?;\n\n req.header(\"Cookie\", \"wp_ozh_wsa_visits=2; wp_ozh_wsa_visit_lasttime=xxxxxxxxxx; __utma=xxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.x; __utmz=xxxxxxxxx.xxxxxxxxxx.x.x.utmccn=(referral)|utmcsr=reader.livedoor.com|utmcct=/reader/|utmcmd=referral|padding=under256\")?;\n\n\n\n req.finish().map(|_| ())\n\n });\n\n}\n\n\n", "file_path": "src/rust-util/httpencode/benches/compose.rs", "rank": 51, "score": 126123.65286996013 }, { "content": "fn build_req_short(b: &mut Bencher) {\n\n let mut buf = Vec::new();\n\n buf.reserve(1 << 14);\n\n\n\n b.iter(|| -> Result<_, _> {\n\n buf.clear();\n\n\n\n let mut req = HttpBuilder::request(&mut buf, Method::Get, Version::Http11, Uri::new(b\"/\"))?;\n\n\n\n req.header(\"Host\", \"example.com\")?;\n\n req.header(\"Cookie\", \"session=60; user_id=1\")?;\n\n\n\n req.finish().map(|_| ())\n\n });\n\n}\n\n\n", "file_path": "src/rust-util/httpencode/benches/compose.rs", "rank": 52, "score": 126123.65286996013 }, { "content": "/// An `EventLoop` describes the functions which must be implemented for a basic\n\n/// event loop and provides some default implementations and helper functions.\n\npub trait EventLoop {\n\n // the following functions must be implemented\n\n\n\n /// Provides access to the `Poll` structure which allows polling for new\n\n /// readiness events and managing registration for event sources.\n\n fn poll(&mut self) -> &mut Poll;\n\n\n\n /// Handle new data received for the `Session` with the provided `Token`.\n\n /// This will include parsing the incoming data and composing a response.\n\n fn handle_data(&mut self, token: Token) -> Result<(), std::io::Error>;\n\n\n\n /// Handle a read event for the `Session` with the `Token`.\n\n fn do_read(&mut self, token: Token) -> Result<(), ()> {\n\n if let Ok(session) = self.poll().get_mut_session(token) {\n\n // read from session to buffer\n\n match session.fill_buf().map(|b| b.len()) {\n\n Ok(0) => {\n\n trace!(\"hangup for session: {:?}\", session);\n\n let _ = self.poll().close_session(token);\n\n Err(())\n", "file_path": "src/rust/core/server/src/threads/traits/event_loop.rs", "rank": 53, "score": 125805.56643444154 }, { "content": "fn escape(bytes: &[u8]) -> EscapedByteString {\n\n EscapedByteString(bytes)\n\n}\n\n\n", "file_path": "test/server/twemcache-http/src/lib.rs", "rank": 54, "score": 123881.24869189886 }, { "content": "fn build_req_long_unsafe(b: &mut Bencher) {\n\n let mut buf = Vec::new();\n\n buf.reserve(1 << 14);\n\n\n\n b.iter(|| -> Result<_, _> {\n\n unsafe {\n\n buf.clear();\n\n\n\n let mut req = HttpBuilder::request(\n\n &mut buf,\n\n Method::Get,\n\n Version::Http11,\n\n Uri::escaped_unchecked(b\"/wp-content/uploads/2010/03/hello-kitty-darth-vader-pink.jpg\")\n\n )?;\n\n\n\n req.header_unchecked(\"Host\", \"www.kittyhell.com\")?;\n\n req.header_unchecked(\"User-Agent\", \"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; ja-JP-mac; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 Pathtraq/0.9\")?;\n\n req.header_unchecked(\"Accept\", \"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\")?;\n\n req.header_unchecked(\"Accept-Language\", \"ja,en-us;q=0.7,en;q=0.3\")?;\n\n req.header_unchecked(\"Accept-Encoding\", \"gzip,deflate\")?;\n\n req.header_unchecked(\"Accept-Charset\", \"Shift_JIS,utf-8;q=0.7,*;q=0.7\")?;\n\n req.header_unchecked(\"Keep-Alive\", \"115\")?;\n\n req.header_unchecked(\"Connection\", \"keep-alive\")?;\n\n req.header_unchecked(\"Cookie\", \"wp_ozh_wsa_visits=2; wp_ozh_wsa_visit_lasttime=xxxxxxxxxx; __utma=xxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.x; __utmz=xxxxxxxxx.xxxxxxxxxx.x.x.utmccn=(referral)|utmcsr=reader.livedoor.com|utmcct=/reader/|utmcmd=referral|padding=under256\")?;\n\n\n\n req.finish().map(|_| ())\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/rust-util/httpencode/benches/compose.rs", "rank": 55, "score": 123880.81116875951 }, { "content": "fn build_req_short_unsafe(b: &mut Bencher) {\n\n let mut buf = Vec::new();\n\n buf.reserve(1 << 14);\n\n\n\n b.iter(|| -> Result<_, _> {\n\n unsafe {\n\n buf.clear();\n\n\n\n let mut req = HttpBuilder::request(\n\n &mut buf,\n\n Method::Get,\n\n Version::Http11,\n\n Uri::escaped_unchecked(b\"/\"),\n\n )?;\n\n\n\n req.header_unchecked(\"Host\", \"example.com\")?;\n\n req.header_unchecked(\"Cookie\", \"session=60; user_id=1\")?;\n\n\n\n req.finish().map(|_| ())\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/rust-util/httpencode/benches/compose.rs", "rank": 56, "score": 123880.81116875951 }, { "content": "/// Creats a new queue pair that can hold up to capacity items in each\n\n/// direction. The optional `Waker`s allow the sender to inform the receiver\n\n/// that a receive operation should return at least one message.\n\npub fn queue_pair_with_capacity<A, B>(\n\n capacity: usize,\n\n waker_a: Option<Arc<Waker>>,\n\n waker_b: Option<Arc<Waker>>,\n\n) -> (QueuePair<A, B>, QueuePair<B, A>) {\n\n let (to_a, from_b) = rtrb::RingBuffer::new(capacity).split();\n\n let (to_b, from_a) = rtrb::RingBuffer::new(capacity).split();\n\n\n\n let queue_a = QueuePair::<A, B> {\n\n send: to_b,\n\n recv: from_b,\n\n waker: waker_b,\n\n };\n\n\n\n let queue_b = QueuePair::<B, A> {\n\n send: to_a,\n\n recv: from_a,\n\n waker: waker_a,\n\n };\n\n\n", "file_path": "src/rust/queues/src/queue_pair/mod.rs", "rank": 57, "score": 123823.76848196925 }, { "content": "fn interval() -> usize {\n\n INTERVAL\n\n}\n\n\n", "file_path": "src/rust/config/src/klog.rs", "rank": 58, "score": 123501.32679461688 }, { "content": "fn sample() -> usize {\n\n SAMPLE\n\n}\n\n\n", "file_path": "src/rust/config/src/klog.rs", "rank": 59, "score": 123501.32679461688 }, { "content": "fn poolsize() -> usize {\n\n BUF_POOLSIZE\n\n}\n\n\n\n// struct definitions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Buf {\n\n #[serde(default = \"size\")]\n\n size: usize,\n\n #[serde(default = \"poolsize\")]\n\n poolsize: usize,\n\n}\n\n\n\n// implementation\n\nimpl Buf {\n\n pub fn size(&self) -> usize {\n\n self.size\n\n }\n\n\n\n pub fn poolsize(&self) -> usize {\n", "file_path": "src/rust/config/src/buf.rs", "rank": 60, "score": 123501.32679461688 }, { "content": "fn nevent() -> usize {\n\n WORKER_NEVENT\n\n}\n\n\n", "file_path": "src/rust/config/src/worker.rs", "rank": 61, "score": 123501.32679461688 }, { "content": "fn nevent() -> usize {\n\n ADMIN_NEVENT\n\n}\n\n\n", "file_path": "src/rust/config/src/admin.rs", "rank": 62, "score": 123501.32679461688 }, { "content": "fn nevent() -> usize {\n\n SERVER_NEVENT\n\n}\n\n\n\n// definitions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Server {\n\n #[serde(default = \"host\")]\n\n host: String,\n\n #[serde(default = \"port\")]\n\n port: String,\n\n #[serde(default = \"timeout\")]\n\n timeout: usize,\n\n #[serde(default = \"nevent\")]\n\n nevent: usize,\n\n}\n\n\n\n// implementation\n\nimpl Server {\n\n /// Host address to listen on\n", "file_path": "src/rust/config/src/server.rs", "rank": 63, "score": 123501.32679461688 }, { "content": "// helper functions\n\nfn size() -> usize {\n\n BUF_DEFAULT_SIZE\n\n}\n\n\n", "file_path": "src/rust/config/src/buf.rs", "rank": 64, "score": 123501.32679461688 }, { "content": "// helper functions\n\nfn backlog() -> usize {\n\n TCP_BACKLOG\n\n}\n\n\n", "file_path": "src/rust/config/src/tcp.rs", "rank": 65, "score": 123501.32679461688 }, { "content": "fn poolsize() -> usize {\n\n TCP_POOLSIZE\n\n}\n\n\n\n// definitions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Tcp {\n\n #[serde(default = \"backlog\")]\n\n backlog: usize,\n\n #[serde(default = \"poolsize\")]\n\n poolsize: usize,\n\n}\n\n\n\n// implementation\n\nimpl Tcp {\n\n pub fn backlog(&self) -> usize {\n\n self.backlog\n\n }\n\n\n\n pub fn poolsize(&self) -> usize {\n", "file_path": "src/rust/config/src/tcp.rs", "rank": 66, "score": 123501.32679461688 }, { "content": "fn timeout() -> usize {\n\n ADMIN_TIMEOUT\n\n}\n\n\n", "file_path": "src/rust/config/src/admin.rs", "rank": 67, "score": 123501.32679461688 }, { "content": "fn timeout() -> usize {\n\n SERVER_TIMEOUT\n\n}\n\n\n", "file_path": "src/rust/config/src/server.rs", "rank": 68, "score": 123501.32679461688 }, { "content": "fn threads() -> usize {\n\n WORKER_THREADS\n\n}\n\n\n\n// definitions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Worker {\n\n #[serde(default = \"timeout\")]\n\n timeout: usize,\n\n #[serde(default = \"nevent\")]\n\n nevent: usize,\n\n #[serde(default = \"threads\")]\n\n threads: usize,\n\n}\n\n\n\n// implementation\n\nimpl Worker {\n\n pub fn timeout(&self) -> usize {\n\n self.timeout\n\n }\n", "file_path": "src/rust/config/src/worker.rs", "rank": 69, "score": 123501.32679461688 }, { "content": "// helper functions\n\nfn timeout() -> usize {\n\n WORKER_TIMEOUT\n\n}\n\n\n", "file_path": "src/rust/config/src/worker.rs", "rank": 70, "score": 123501.32679461688 }, { "content": "fn print_directives(lib: impl AsRef<str>, subdir: impl AsRef<str>) {\n\n let lib: &str = lib.as_ref();\n\n let subdir: &str = subdir.as_ref();\n\n let bindir = get_cmake_binary_dir().unwrap_or(format!(\"{}/build\", env::var(\"OUT_DIR\").unwrap()));\n\n\n\n println!(\"cargo:rustc-link-search={}/{}/\", bindir, subdir);\n\n println!(\"cargo:rustc-link-lib={}\", lib);\n\n}\n\n\n", "file_path": "src/rust-util/pelikan-sys/build.rs", "rank": 71, "score": 123297.46496197395 }, { "content": "/// Parse a key-value pair separated by `:` then strip the whitespace\n\n/// off of both the key and the value.\n\nfn parse_kv<'a>(input: &'a [u8], line: u32) -> Result<(&'a [u8], &'a [u8]), ParseError<'a>> {\n\n let mut first = true;\n\n\n\n let mut split = input.split(|&x| {\n\n if x == b':' && first {\n\n first = false;\n\n return true;\n\n }\n\n\n\n false\n\n });\n\n\n\n let key = match split.next() {\n\n Some(x) => x,\n\n // There will always be at least one subslice\n\n None => unreachable!(),\n\n };\n\n let value = match split.next() {\n\n Some(x) => x,\n\n None => return Err(ParseError::missing_colon(Span::new(input, line))),\n", "file_path": "deps/ccommon/rust/ccommon-backend/src/option/parse.rs", "rank": 72, "score": 123135.18378034768 }, { "content": "#[test]\n\nfn bucket_index() {\n\n let ttl_buckets = TtlBuckets::new();\n\n\n\n // Zero TTL and max duration both go into the same TtlBucket\n\n assert_eq!(ttl_buckets.get_bucket_index(Duration::from_secs(0)), 1023);\n\n assert_eq!(\n\n ttl_buckets.get_bucket_index(Duration::from_secs(u32::MAX)),\n\n 1023\n\n );\n\n\n\n // first bucket is only 7s wide because 0 is no ttl\n\n assert_eq!(ttl_buckets.get_bucket_index(Duration::from_secs(1)), 0);\n\n assert_eq!(ttl_buckets.get_bucket_index(Duration::from_secs(7)), 0);\n\n\n\n // buckets from 8s - 2048s (0..34 minutes) are all 8s wide\n\n for bucket in 1..256 {\n\n let start = Duration::from_secs(8 * bucket);\n\n let end = Duration::from_secs(8 * bucket + 7);\n\n assert_eq!(\n\n ttl_buckets.get_bucket_index(start) as u32,\n", "file_path": "src/rust/storage/seg/src/ttl_buckets/tests.rs", "rank": 73, "score": 121871.4607781822 }, { "content": "/// Validate a key name.\n\n///\n\n/// A valid key can only contain characters in `[a-zA-Z0-9_]`.\n\nfn is_valid_name(key: &[u8]) -> bool {\n\n key.iter()\n\n .copied()\n\n .all(|x: u8| x.is_ascii_alphanumeric() || x == b'_')\n\n}\n\n\n", "file_path": "deps/ccommon/rust/ccommon-backend/src/option/parse.rs", "rank": 74, "score": 121753.19326651416 }, { "content": "fn escape_string(bytes: &[u8]) -> String {\n\n bytes\n\n .iter()\n\n .flat_map(|x| std::ascii::escape_default(*x))\n\n .map(|c| c as char)\n\n .collect()\n\n}\n\n\n\nimpl fmt::Debug for Value<'_> {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Value::Bool(v) => fmt.debug_tuple(\"Bool\").field(v).finish(),\n\n Value::Float(v) => fmt.debug_tuple(\"Float\").field(v).finish(),\n\n Value::UInt(v) => fmt.debug_tuple(\"UInt\").field(v).finish(),\n\n Value::Str(s) => {\n\n let escaped = &escape_string(s);\n\n fmt.debug_tuple(\"Str\").field(escaped).finish()\n\n }\n\n }\n\n }\n", "file_path": "deps/ccommon/rust/ccommon-backend/src/option/parse.rs", "rank": 75, "score": 121753.19326651416 }, { "content": "struct KVSizes(usize, usize);\n\n\n\nconst PLUS: u8 = 0x2b; // ASCII '+'\n\nconst COMMA: u8 = 0x2c; // ASCII ','\n\nconst COLON: u8 = 0x3a; // ASCII ':'\n\nconst NL: u8 = 0x0a; // ASCII '\\n'\n\n\n", "file_path": "src/storage/cdb/cdb_rs/src/cdb/input.rs", "rank": 76, "score": 121445.82509719816 }, { "content": "fn escaped(bytes: &[u8]) -> Cow<str> {\n\n if bytes.iter().copied().all(|c| c.is_ascii()) {\n\n Cow::Borrowed(std::str::from_utf8(bytes).unwrap())\n\n } else {\n\n let string = bytes\n\n .iter()\n\n .copied()\n\n .flat_map(std::ascii::escape_default)\n\n .map(|c| c as char)\n\n .collect();\n\n\n\n Cow::Owned(string)\n\n }\n\n}\n\n\n", "file_path": "src/rust-util/httpencode/src/tests.rs", "rank": 77, "score": 121259.04813092665 }, { "content": "fn use_tls() -> bool {\n\n ADMIN_USE_TLS\n\n}\n\n\n\n// definitions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Admin {\n\n #[serde(default = \"host\")]\n\n host: String,\n\n #[serde(default = \"port\")]\n\n port: String,\n\n #[serde(default = \"timeout\")]\n\n timeout: usize,\n\n #[serde(default = \"nevent\")]\n\n nevent: usize,\n\n #[serde(default = \"tw_tick\")]\n\n tw_tick: usize,\n\n #[serde(default = \"tw_cap\")]\n\n tw_cap: usize,\n\n #[serde(default = \"tw_ntick\")]\n", "file_path": "src/rust/config/src/admin.rs", "rank": 78, "score": 120736.19815501932 }, { "content": "fn heap_size() -> usize {\n\n HEAP_SIZE\n\n}\n\n\n", "file_path": "src/rust/config/src/seg.rs", "rank": 79, "score": 120691.9207839444 }, { "content": "fn nbuf() -> usize {\n\n STATS_LOG_NBUF\n\n}\n\n\n\n// definitions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct StatsLog {\n\n #[serde(default = \"file\")]\n\n file: Option<String>,\n\n #[serde(default = \"nbuf\")]\n\n nbuf: usize,\n\n}\n\n\n\n// implementation\n\nimpl StatsLog {\n\n pub fn log_file(&self) -> Option<String> {\n\n self.file.clone()\n\n }\n\n\n\n pub fn log_nbuf(&self) -> usize {\n\n self.nbuf\n\n }\n\n}\n\n\n", "file_path": "src/rust/config/src/stats_log.rs", "rank": 80, "score": 120691.9207839444 }, { "content": "fn queue_depth() -> usize {\n\n QUEUE_DEPTH\n\n}\n\n\n", "file_path": "src/rust/config/src/klog.rs", "rank": 81, "score": 120691.9207839444 }, { "content": "// helper functions\n\nfn nelem_delta() -> usize {\n\n NELEM_DELTA\n\n}\n\n\n\n// definitions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Array {\n\n #[serde(default = \"nelem_delta\")]\n\n nelem_delta: usize,\n\n}\n\n\n\n// implementation\n\nimpl Array {\n\n pub fn nelem_delta(&self) -> usize {\n\n self.nelem_delta\n\n }\n\n}\n\n\n", "file_path": "src/rust/config/src/array.rs", "rank": 82, "score": 120691.9207839444 }, { "content": "fn compact_target() -> usize {\n\n COMPACT_TARGET\n\n}\n\n\n", "file_path": "src/rust/config/src/seg.rs", "rank": 83, "score": 120691.9207839444 }, { "content": "fn tw_ntick() -> usize {\n\n ADMIN_TW_NTICK\n\n}\n\n\n", "file_path": "src/rust/config/src/admin.rs", "rank": 84, "score": 120691.9207839444 }, { "content": "fn tw_tick() -> usize {\n\n ADMIN_TW_TICK\n\n}\n\n\n", "file_path": "src/rust/config/src/admin.rs", "rank": 85, "score": 120691.9207839444 }, { "content": "fn dlog_interval() -> usize {\n\n DLOG_INTERVAL\n\n}\n\n\n\n// struct definitions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct PingserverConfig {\n\n // top-level\n\n #[serde(default = \"daemonize\")]\n\n daemonize: bool,\n\n #[serde(default = \"pid_filename\")]\n\n pid_filename: Option<String>,\n\n #[serde(default = \"dlog_interval\")]\n\n dlog_interval: usize,\n\n\n\n // application modules\n\n #[serde(default)]\n\n admin: Admin,\n\n #[serde(default)]\n\n server: Server,\n", "file_path": "src/rust/config/src/pingserver.rs", "rank": 86, "score": 120691.9207839444 }, { "content": "fn tw_cap() -> usize {\n\n ADMIN_TW_CAP\n\n}\n\n\n", "file_path": "src/rust/config/src/admin.rs", "rank": 87, "score": 120691.9207839444 }, { "content": "// helper functions\n\nfn max_power() -> usize {\n\n DBUF_DEFAULT_MAX\n\n}\n\n\n\n// struct definitions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Dbuf {\n\n #[serde(default = \"max_power\")]\n\n max_power: usize,\n\n}\n\n\n\n// implementation\n\nimpl Dbuf {\n\n pub fn max_power(&self) -> usize {\n\n self.max_power\n\n }\n\n}\n\n\n", "file_path": "src/rust/config/src/dbuf.rs", "rank": 88, "score": 120691.9207839444 }, { "content": "fn dlog_interval() -> usize {\n\n DLOG_INTERVAL\n\n}\n\n\n\n// struct definitions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct SegcacheConfig {\n\n // top-level\n\n #[serde(default = \"daemonize\")]\n\n daemonize: bool,\n\n #[serde(default = \"pid_filename\")]\n\n pid_filename: Option<String>,\n\n #[serde(default = \"dlog_interval\")]\n\n dlog_interval: usize,\n\n\n\n // application modules\n\n #[serde(default)]\n\n admin: Admin,\n\n #[serde(default)]\n\n server: Server,\n", "file_path": "src/rust/config/src/segcache.rs", "rank": 89, "score": 120691.9207839444 }, { "content": "fn merge_target() -> usize {\n\n MERGE_TARGET\n\n}\n\n\n", "file_path": "src/rust/config/src/seg.rs", "rank": 90, "score": 120691.9207839444 }, { "content": "fn merge_max() -> usize {\n\n MERGE_MAX\n\n}\n\n\n", "file_path": "src/rust/config/src/seg.rs", "rank": 91, "score": 120691.9207839444 }, { "content": "/// Load options from a type which implements `BufRead`.\n\n///\n\n/// In most cases you'll want to use [`OptionExt::load`][0]\n\n/// instead.\n\n///\n\n/// [0]: crate::option::OptionExt::load\n\npub fn option_load<R: BufRead>(\n\n options: &mut [option],\n\n source: &mut R,\n\n) -> Result<(), ParseError<'static>> {\n\n let mut linebuf = Vec::new();\n\n\n\n let mut lineno = 0;\n\n\n\n while source\n\n .read_until(b'\\n', &mut linebuf)\n\n .map_err(ParseError::ioerror)?\n\n != 0\n\n {\n\n // Strip off any comments before doing parsing\n\n let line = linebuf.split(|&x| x == b'#').next().unwrap();\n\n\n\n if line.iter().copied().all(|x| x.is_ascii_whitespace()) {\n\n linebuf.clear();\n\n continue;\n\n }\n", "file_path": "deps/ccommon/rust/ccommon-backend/src/option/parse.rs", "rank": 92, "score": 119679.65903465006 }, { "content": "/// Initialize the logger. It will automatically use the\n\n/// ccommon debug logger once that is set up.\n\npub fn init() -> Result<(), SetLoggerError> {\n\n use log::LevelFilter;\n\n\n\n // TODO: dynamically set this based on dlog?\n\n log::set_max_level(LevelFilter::Trace);\n\n\n\n log::set_logger(&LOGGER)\n\n}\n\n\n", "file_path": "deps/ccommon/rust/ccommon-rs/src/log/shim.rs", "rank": 93, "score": 119676.48728591808 }, { "content": "fn is_space(c: Option<u8>) -> bool {\n\n c.map(|x| x.is_ascii_whitespace()).unwrap_or(false)\n\n}\n\n\n", "file_path": "deps/ccommon/rust/ccommon-backend/src/option/parse.rs", "rank": 94, "score": 119016.10682881856 }, { "content": "#[proc_macro]\n\npub fn to_lowercase(input: TokenStream) -> TokenStream {\n\n let ident = syn::parse_macro_input!(input as Ident);\n\n let name = ident.to_string().to_ascii_lowercase();\n\n let literal = syn::LitStr::new(&name, ident.span());\n\n let tokens = quote! { #literal };\n\n\n\n tokens.into()\n\n}\n", "file_path": "src/rust/macros/src/lib.rs", "rank": 95, "score": 118957.89903571687 }, { "content": "fn nbuf() -> usize {\n\n STATS_LOG_NBUF\n\n}\n\n\n\n// definitions\n\n#[derive(Debug)]\n\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\n\npub struct StatsLogConfig {\n\n\t#[cfg_attr(feature = \"serde\", serde(default = \"file\"))]\n\n file: Option<String>,\n\n #[cfg_attr(feature = \"serde\", serde(default = \"nbuf\"))]\n\n nbuf: usize,\n\n}\n\n\n\n// implementation\n\nimpl StatsLogConfig {\n\n pub fn log_file(&self) -> Option<String> {\n\n self.file.clone()\n\n }\n\n\n", "file_path": "deps/ccommon/rust/ccommon-stats/src/lib.rs", "rank": 96, "score": 118042.839606728 }, { "content": "fn single_message_size() -> usize {\n\n SINGLE_MESSAGE_SIZE\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n// struct definitions\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Klog {\n\n #[serde(default = \"backup\")]\n\n backup: Option<String>,\n\n #[serde(default = \"file\")]\n\n file: Option<String>,\n\n #[serde(default = \"interval\")]\n\n interval: usize,\n\n #[serde(default = \"max_size\")]\n\n max_size: u64,\n\n #[serde(default = \"queue_depth\")]\n\n queue_depth: usize,\n", "file_path": "src/rust/config/src/klog.rs", "rank": 97, "score": 118042.839606728 }, { "content": "// helper functions\n\nfn buf_sock_poolsize() -> usize {\n\n BUFSOCK_POOLSIZE\n\n}\n\n\n\n// definitions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Sockio {\n\n #[serde(default = \"buf_sock_poolsize\")]\n\n buf_sock_poolsize: usize,\n\n}\n\n\n\n// implementation\n\nimpl Sockio {\n\n pub fn buf_sock_poolsize(&self) -> usize {\n\n self.buf_sock_poolsize\n\n }\n\n}\n\n\n\n// trait implementations\n\nimpl Default for Sockio {\n\n fn default() -> Self {\n\n Self {\n\n buf_sock_poolsize: buf_sock_poolsize(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rust/config/src/sockio.rs", "rank": 98, "score": 118042.839606728 }, { "content": "fn log_queue_depth() -> usize {\n\n LOG_QUEUE_DEPTH\n\n}\n\n\n", "file_path": "src/rust/config/src/debug.rs", "rank": 99, "score": 118042.839606728 } ]
Rust
oxide-7/src/video/mod.rs
coopersimon/oxide-7
538a2946fc32eef922749dcfe86490c1f09ca12e
mod ram; mod render; use std::sync::{ Arc, Mutex }; use bitflags::bitflags; use crate::{ common::Interrupt, constants::{ timing, screen }, }; use ram::VideoMem; pub use render::RenderTarget; type VRamRef = Arc<Mutex<VideoMem>>; bitflags! { #[derive(Default)] struct IntEnable: u8 { const ENABLE_NMI = bit!(7); const ENABLE_IRQ_Y = bit!(5); const ENABLE_IRQ_X = bit!(4); const AUTO_JOYPAD = bit!(0); } } impl IntEnable { fn all_irq() -> IntEnable { IntEnable::ENABLE_IRQ_X | IntEnable::ENABLE_IRQ_Y } } bitflags! { #[derive(Default)] struct PPUStatus: u8 { const V_BLANK = bit!(7); const H_BLANK = bit!(6); } } #[derive(PartialEq)] pub enum PPUSignal { None, Int(Interrupt), HBlank, Delay, FrameStart, } #[derive(Debug, PartialEq)] enum PPUState { HBlankLeft, DrawingBeforePause, DrawingAfterPause, HBlankRight, VBlank } #[derive(Clone, Copy)] pub enum BG { _1, _2, _3, _4 } impl BG { fn all() -> &'static [BG; 4] { const BGS: [BG; 4] = [BG::_1, BG::_2, BG::_3, BG::_4]; &BGS } } pub struct PPU { state: PPUState, mem: VRamRef, cycle_count: usize, scanline: usize, int_enable: IntEnable, status: PPUStatus, nmi_flag: u8, irq_flag: u8, h_timer: u16, h_cycle: usize, v_timer: u16, h_irq_latch: bool, renderer: render::RenderThread, } impl PPU { pub fn new() -> Self { let mem = Arc::new(Mutex::new(VideoMem::new())); PPU { state: PPUState::VBlank, mem: mem.clone(), cycle_count: 0, scanline: 0, int_enable: IntEnable::default(), status: PPUStatus::default(), nmi_flag: 0, irq_flag: 0, h_timer: 0, h_cycle: 0, v_timer: 0, h_irq_latch: false, renderer: render::RenderThread::new(mem), } } pub fn start_frame(&mut self, frame: RenderTarget) { self.renderer.start_frame(frame); } pub fn read_mem(&mut self, addr: u8) -> u8 { self.mem.lock().unwrap().read(addr) } pub fn write_mem(&mut self, addr: u8, data: u8) { self.mem.lock().unwrap().write(addr, data); } pub fn get_status(&mut self) -> u8 { self.status.bits() } pub fn latch_hv(&mut self) -> u8 { self.mem.lock().unwrap().set_latched_hv( (self.cycle_count / timing::DOT_TIME) as u16, self.scanline as u16 ); 0 } pub fn clock(&mut self, cycles: usize) -> PPUSignal { use PPUState::*; self.cycle_count += cycles; let transition = match self.state { VBlank if self.scanline == 0 => Some(PPUTransition::ExitVBlank), DrawingBeforePause if self.cycle_count >= timing::PAUSE_START => Some(PPUTransition::CPUPause), DrawingAfterPause if self.cycle_count >= timing::H_BLANK_TIME => Some(PPUTransition::EnterHBlank), HBlankRight if self.cycle_count >= timing::SCANLINE => Some(PPUTransition::NextLine), HBlankLeft if self.scanline > screen::V_RES => Some(PPUTransition::EnterVBlank), HBlankLeft if (self.cycle_count >= timing::SCANLINE_OFFSET) && (self.scanline <= screen::V_RES) => Some(PPUTransition::ExitHBlank), VBlank if self.cycle_count >= timing::SCANLINE => Some(PPUTransition::NextLine), _ => None }; let signal = if let Some(transition) = transition { self.transition_state(transition) } else { PPUSignal::None }; if signal == PPUSignal::None { if self.check_x_irq() { self.h_irq_latch = true; self.trigger_irq() } else { PPUSignal::None } } else { signal } } pub fn set_int_enable(&mut self, data: u8) { self.int_enable = IntEnable::from_bits_truncate(data); } pub fn set_h_timer_lo(&mut self, data: u8) { self.h_timer = set_lo!(self.h_timer, data); self.h_cycle = (self.h_timer as usize) * timing::DOT_TIME; self.h_irq_latch = false; } pub fn set_h_timer_hi(&mut self, data: u8) { self.h_timer = set_hi!(self.h_timer, data); self.h_cycle = (self.h_timer as usize) * timing::DOT_TIME; self.h_irq_latch = false; } pub fn set_v_timer_lo(&mut self, data: u8) { self.v_timer = set_lo!(self.v_timer, data); } pub fn set_v_timer_hi(&mut self, data: u8) { self.v_timer = set_hi!(self.v_timer, data); } pub fn get_nmi_flag(&mut self) -> u8 { std::mem::replace(&mut self.nmi_flag, 0) } pub fn get_irq_flag(&mut self) -> u8 { std::mem::replace(&mut self.irq_flag, 0) } } enum PPUTransition { ExitVBlank, CPUPause, EnterHBlank, NextLine, ExitHBlank, EnterVBlank, } impl PPU { fn transition_state(&mut self, transition: PPUTransition) -> PPUSignal { use PPUTransition::*; match transition { ExitVBlank => { self.nmi_flag = 0; self.irq_flag = 0; self.toggle_vblank(false); self.toggle_hblank(false); self.state = PPUState::DrawingBeforePause; PPUSignal::FrameStart }, CPUPause => { self.state = PPUState::DrawingAfterPause; PPUSignal::Delay }, EnterHBlank => { self.toggle_hblank(true); self.state = PPUState::HBlankRight; PPUSignal::HBlank }, NextLine => { self.cycle_count -= timing::SCANLINE; self.h_irq_latch = false; self.scanline += 1; if self.scanline >= screen::NUM_SCANLINES { self.scanline -= screen::NUM_SCANLINES; } if self.state == PPUState::HBlankRight { self.state = PPUState::HBlankLeft; } if self.check_y_irq() { self.trigger_irq() } else { PPUSignal::None } }, ExitHBlank => { self.toggle_hblank(false); self.renderer.draw_line((self.scanline - 1) as usize); self.state = PPUState::DrawingBeforePause; PPUSignal::None }, EnterVBlank => { self.toggle_vblank(true); self.toggle_hblank(false); { let mut mem = self.mem.lock().unwrap(); mem.oam_reset(); } self.state = PPUState::VBlank; self.trigger_nmi() }, } } fn check_y_irq(&self) -> bool { let enabled = (self.int_enable & IntEnable::all_irq()) == IntEnable::ENABLE_IRQ_Y; enabled && (self.scanline == (self.v_timer as usize)) } fn check_x_irq(&self) -> bool { let irq = self.int_enable & IntEnable::all_irq(); if !self.h_irq_latch { if irq == IntEnable::all_irq() { (self.scanline == (self.v_timer as usize)) && (self.cycle_count >= self.h_cycle) } else if irq == IntEnable::ENABLE_IRQ_X { self.cycle_count >= self.h_cycle } else { false } } else { false } } fn trigger_nmi(&mut self) -> PPUSignal { self.nmi_flag |= bit!(7); if self.int_enable.contains(IntEnable::ENABLE_NMI) { PPUSignal::Int(Interrupt::NMI) } else { PPUSignal::Int(Interrupt::VBLANK) } } fn trigger_irq(&mut self) -> PPUSignal { self.irq_flag |= bit!(7); PPUSignal::Int(Interrupt::IRQ) } fn toggle_vblank(&mut self, vblank: bool) { self.status.set(PPUStatus::V_BLANK, vblank); } fn toggle_hblank(&mut self, hblank: bool) { self.status.set(PPUStatus::H_BLANK, hblank); } }
mod ram; mod render; use std::sync::{ Arc, Mutex }; use bitflags::bitflags; use crate::{ common::Interrupt, constants::{ timing, screen }, }; use ram::VideoMem; pub use render::RenderTarget; type VRamRef = Arc<Mutex<VideoMem>>; bitflags! { #[derive(Default)] struct IntEnable: u8 { const ENABLE_NMI = bit!(7); const ENABLE_IRQ_Y = bit!(5); const ENABLE_IRQ_X = bit!(4); const AUTO_JOYPAD = bit!(0); } } impl IntEnable { fn all_irq() -> IntEnable { IntEnable::ENABLE_IRQ_X | IntEnable::ENABLE_IRQ_Y } } bitflags! { #[derive(Default)] struct PPUStatus: u8 { const V_BLANK = bit!(7); const H_BLANK = bit!(6); } } #[derive(PartialEq)] pub enum PPUSignal { None, Int(Interrupt), HBlank, Delay, FrameStart, } #[derive(Debug, PartialEq)] enum PPUState { HBlankLeft, DrawingBeforePause, DrawingAfterPause, HBlankRight, VBlank } #[derive(Clone, Copy)] pub enum BG { _1, _2, _3, _4 } impl BG { fn all() -> &'static [BG; 4] { const BGS: [BG; 4] = [BG::_1, BG::_2, BG::_3, BG::_4]; &BGS } } pub struct PPU { state: PPUState, mem: VRamRef, cycle_count: usize, scanline: usize, int_enable: IntEnable, status: PPUStatus, nmi_flag: u8, irq_flag: u8, h_timer: u16, h_cycle: usize, v_timer: u16, h_irq_latch: bool, renderer: render::RenderThread, } impl PPU { pub fn new() -> Self { let mem = Arc::new(Mut
= PPUState::VBlank; self.trigger_nmi() }, } } fn check_y_irq(&self) -> bool { let enabled = (self.int_enable & IntEnable::all_irq()) == IntEnable::ENABLE_IRQ_Y; enabled && (self.scanline == (self.v_timer as usize)) } fn check_x_irq(&self) -> bool { let irq = self.int_enable & IntEnable::all_irq(); if !self.h_irq_latch { if irq == IntEnable::all_irq() { (self.scanline == (self.v_timer as usize)) && (self.cycle_count >= self.h_cycle) } else if irq == IntEnable::ENABLE_IRQ_X { self.cycle_count >= self.h_cycle } else { false } } else { false } } fn trigger_nmi(&mut self) -> PPUSignal { self.nmi_flag |= bit!(7); if self.int_enable.contains(IntEnable::ENABLE_NMI) { PPUSignal::Int(Interrupt::NMI) } else { PPUSignal::Int(Interrupt::VBLANK) } } fn trigger_irq(&mut self) -> PPUSignal { self.irq_flag |= bit!(7); PPUSignal::Int(Interrupt::IRQ) } fn toggle_vblank(&mut self, vblank: bool) { self.status.set(PPUStatus::V_BLANK, vblank); } fn toggle_hblank(&mut self, hblank: bool) { self.status.set(PPUStatus::H_BLANK, hblank); } }
ex::new(VideoMem::new())); PPU { state: PPUState::VBlank, mem: mem.clone(), cycle_count: 0, scanline: 0, int_enable: IntEnable::default(), status: PPUStatus::default(), nmi_flag: 0, irq_flag: 0, h_timer: 0, h_cycle: 0, v_timer: 0, h_irq_latch: false, renderer: render::RenderThread::new(mem), } } pub fn start_frame(&mut self, frame: RenderTarget) { self.renderer.start_frame(frame); } pub fn read_mem(&mut self, addr: u8) -> u8 { self.mem.lock().unwrap().read(addr) } pub fn write_mem(&mut self, addr: u8, data: u8) { self.mem.lock().unwrap().write(addr, data); } pub fn get_status(&mut self) -> u8 { self.status.bits() } pub fn latch_hv(&mut self) -> u8 { self.mem.lock().unwrap().set_latched_hv( (self.cycle_count / timing::DOT_TIME) as u16, self.scanline as u16 ); 0 } pub fn clock(&mut self, cycles: usize) -> PPUSignal { use PPUState::*; self.cycle_count += cycles; let transition = match self.state { VBlank if self.scanline == 0 => Some(PPUTransition::ExitVBlank), DrawingBeforePause if self.cycle_count >= timing::PAUSE_START => Some(PPUTransition::CPUPause), DrawingAfterPause if self.cycle_count >= timing::H_BLANK_TIME => Some(PPUTransition::EnterHBlank), HBlankRight if self.cycle_count >= timing::SCANLINE => Some(PPUTransition::NextLine), HBlankLeft if self.scanline > screen::V_RES => Some(PPUTransition::EnterVBlank), HBlankLeft if (self.cycle_count >= timing::SCANLINE_OFFSET) && (self.scanline <= screen::V_RES) => Some(PPUTransition::ExitHBlank), VBlank if self.cycle_count >= timing::SCANLINE => Some(PPUTransition::NextLine), _ => None }; let signal = if let Some(transition) = transition { self.transition_state(transition) } else { PPUSignal::None }; if signal == PPUSignal::None { if self.check_x_irq() { self.h_irq_latch = true; self.trigger_irq() } else { PPUSignal::None } } else { signal } } pub fn set_int_enable(&mut self, data: u8) { self.int_enable = IntEnable::from_bits_truncate(data); } pub fn set_h_timer_lo(&mut self, data: u8) { self.h_timer = set_lo!(self.h_timer, data); self.h_cycle = (self.h_timer as usize) * timing::DOT_TIME; self.h_irq_latch = false; } pub fn set_h_timer_hi(&mut self, data: u8) { self.h_timer = set_hi!(self.h_timer, data); self.h_cycle = (self.h_timer as usize) * timing::DOT_TIME; self.h_irq_latch = false; } pub fn set_v_timer_lo(&mut self, data: u8) { self.v_timer = set_lo!(self.v_timer, data); } pub fn set_v_timer_hi(&mut self, data: u8) { self.v_timer = set_hi!(self.v_timer, data); } pub fn get_nmi_flag(&mut self) -> u8 { std::mem::replace(&mut self.nmi_flag, 0) } pub fn get_irq_flag(&mut self) -> u8 { std::mem::replace(&mut self.irq_flag, 0) } } enum PPUTransition { ExitVBlank, CPUPause, EnterHBlank, NextLine, ExitHBlank, EnterVBlank, } impl PPU { fn transition_state(&mut self, transition: PPUTransition) -> PPUSignal { use PPUTransition::*; match transition { ExitVBlank => { self.nmi_flag = 0; self.irq_flag = 0; self.toggle_vblank(false); self.toggle_hblank(false); self.state = PPUState::DrawingBeforePause; PPUSignal::FrameStart }, CPUPause => { self.state = PPUState::DrawingAfterPause; PPUSignal::Delay }, EnterHBlank => { self.toggle_hblank(true); self.state = PPUState::HBlankRight; PPUSignal::HBlank }, NextLine => { self.cycle_count -= timing::SCANLINE; self.h_irq_latch = false; self.scanline += 1; if self.scanline >= screen::NUM_SCANLINES { self.scanline -= screen::NUM_SCANLINES; } if self.state == PPUState::HBlankRight { self.state = PPUState::HBlankLeft; } if self.check_y_irq() { self.trigger_irq() } else { PPUSignal::None } }, ExitHBlank => { self.toggle_hblank(false); self.renderer.draw_line((self.scanline - 1) as usize); self.state = PPUState::DrawingBeforePause; PPUSignal::None }, EnterVBlank => { self.toggle_vblank(true); self.toggle_hblank(false); { let mut mem = self.mem.lock().unwrap(); mem.oam_reset(); } self.state
random
[ { "content": "type CartMappingFn = fn(u8, u16) -> CartDevice;\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 1, "score": 252137.86479187425 }, { "content": "fn map_hirom_bank(in_bank: u8, mapping: u8, lo_rom: bool, bank_addr: u16) -> u8 {\n\n let mapped_bank = map_bank(in_bank, mapping);\n\n if lo_rom {\n\n if bank_addr >= 0x8000 {\n\n (mapped_bank * 2) + 1\n\n } else {\n\n mapped_bank * 2\n\n }\n\n } else {\n\n mapped_bank\n\n }\n\n}", "file_path": "oxide-7/src/expansion/sa1/mem/mod.rs", "rank": 2, "score": 223483.82814469648 }, { "content": "#[inline]\n\nfn get_mode_7_texel(vram: &[u8], x: usize, y: usize) -> u8 {\n\n // Find tile num.\n\n let tile_x = x / 8;\n\n let tile_y = y / 8;\n\n let tile = (tile_y * 128) + tile_x;\n\n let tile_num = vram[tile * 2] as usize;\n\n\n\n // Find pixel in tile.\n\n let tex_x = x % 8;\n\n let tex_y = y % 8;\n\n let tex_num = (tex_y * 8) + tex_x;\n\n \n\n // Lookup pixel in vram.\n\n let tile_offset = tile_num * 128;\n\n vram[tile_offset + (tex_num * 2) + 1]\n\n}", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 3, "score": 198832.13223468527 }, { "content": "fn clear_line(target: &mut [u8], y: usize) {\n\n use crate::constants::screen::H_RES;\n\n\n\n for d in target.iter_mut().skip(y * H_RES * 8).take(H_RES * 8) {\n\n *d = 0;\n\n }\n\n}", "file_path": "oxide-7/src/video/render/mod.rs", "rank": 4, "score": 185406.74917752366 }, { "content": "fn map_bank(in_bank: u8, mapping: u8) -> u8 {\n\n let lo = lo_nybble!(in_bank);\n\n let hi = (mapping % 8) << 4;\n\n hi | lo\n\n}\n\n\n", "file_path": "oxide-7/src/expansion/sa1/mem/mod.rs", "rank": 5, "score": 172817.41629470972 }, { "content": "pub fn step_size(param: u8) -> Option<usize> {\n\n match param {\n\n 0x00 => None,\n\n 0x01 => Some(2048),\n\n 0x02 => Some(1536),\n\n 0x03 => Some(1280),\n\n 0x04 => Some(1024),\n\n 0x05 => Some(768),\n\n 0x06 => Some(640),\n\n 0x07 => Some(512),\n\n 0x08 => Some(384),\n\n 0x09 => Some(320),\n\n 0x0A => Some(256),\n\n 0x0B => Some(192),\n\n 0x0C => Some(160),\n\n 0x0D => Some(128),\n\n 0x0E => Some(96),\n\n 0x0F => Some(80),\n\n 0x10 => Some(64),\n\n 0x11 => Some(48),\n", "file_path": "oxide-7/src/audio/dsp/envelope/gain.rs", "rank": 7, "score": 164251.98278013268 }, { "content": "#[inline]\n\nfn sign_extend(val: u16) -> u16 {\n\n if test_bit!(val, 12) {\n\n val | 0xE000\n\n } else {\n\n val & 0x1FFF\n\n }\n\n}", "file_path": "oxide-7/src/video/ram/bgregs.rs", "rank": 8, "score": 149375.5359737318 }, { "content": "// Messages to send to the render thread.\n\nenum RendererMessage {\n\n StartFrame(RenderTarget), // Begin frame, and target the provided byte array.\n\n DrawLine(usize),\n\n}\n\n\n\n// Renderer for video that spawns a thread to render on.\n\npub struct RenderThread {\n\n sender: Sender<RendererMessage>,\n\n receiver: Receiver<()>,\n\n}\n\n\n\nimpl RenderThread {\n\n pub fn new(mem: super::VRamRef) -> Self {\n\n let (send_msg, recv_msg) = bounded(224);\n\n let (send_reply, recv_reply) = bounded(224);\n\n\n\n std::thread::spawn(move || {\n\n use RendererMessage::*;\n\n let mut target = None;\n\n let mut renderer = drawing::Renderer::new();\n", "file_path": "oxide-7/src/video/render/mod.rs", "rank": 9, "score": 138092.8393030044 }, { "content": "// Renderer trait.\n\npub trait Renderable {\n\n fn frame_start(&mut self);\n\n fn draw_line(&mut self, y: u16);\n\n fn frame_end(&mut self);\n\n}\n\n\n\n// Mode\n\n#[derive(Clone, Copy, PartialEq, Debug)]\n\npub enum VideoMode {\n\n _0 = 0,\n\n _1 = 1,\n\n _2 = 2,\n\n _3 = 3,\n\n _4 = 4,\n\n _5 = 5,\n\n _6 = 6,\n\n _7 = 7\n\n}\n\n\n\nimpl From<u8> for VideoMode {\n", "file_path": "oxide-7/src/video/render/mod.rs", "rank": 10, "score": 135721.9189237412 }, { "content": "// Memory Bus to attach to CPU.\n\npub trait MemBus {\n\n fn read(&mut self, addr: u32) -> (u8, usize);\n\n fn write(&mut self, addr: u32, data: u8) -> usize;\n\n fn clock(&mut self, cycles: usize) -> Interrupt;\n\n}\n\n\n\n/// Random access memory.\n\n#[derive(Default)]\n\npub struct RAM {\n\n data: Vec<u8>\n\n}\n\n\n\nimpl RAM {\n\n pub fn new(size: usize) -> Self {\n\n RAM {\n\n data: vec![0; size]\n\n }\n\n }\n\n\n\n pub fn read(&self, addr: u32) -> u8 {\n", "file_path": "oxide-7/src/mem/mod.rs", "rank": 11, "score": 135096.11073126216 }, { "content": "pub trait SPCMem {\n\n fn read(&mut self, addr: u16) -> u8;\n\n fn write(&mut self, addr: u16, data: u8);\n\n fn clock(&mut self, cycles: usize);\n\n}\n\n\n\npub struct SPCBus {\n\n ram: RAM,\n\n\n\n ipl_rom: &'static [u8; 64],\n\n\n\n // Registers\n\n control: SPCControl,\n\n dsp_reg_addr: u8,\n\n dsp: DSP,\n\n\n\n // Port data sent in from CPU.\n\n ports_cpu_to_apu: [u8; 4],\n\n\n\n // Port data sent out from APU.\n", "file_path": "oxide-7/src/audio/mem/mod.rs", "rank": 12, "score": 132114.86987482526 }, { "content": "pub fn create_sram(file_name: &str, size: usize) -> Result<Box<dyn SRAM>, String> {\n\n if size == 0 {\n\n Ok(Box::new(EmptySRAM::new()))\n\n } else {\n\n let sram = SizedSRAM::new(file_name, size)?;\n\n Ok(Box::new(sram))\n\n }\n\n}\n\n\n\n/// Used in cartridges that have SRAM.\n\npub struct SizedSRAM {\n\n save_file: BufWriter<File>,\n\n ram: RAM,\n\n\n\n mask: u32, // Mask when reading/writing\n\n\n\n dirty: bool,\n\n}\n\n\n\nimpl SizedSRAM {\n", "file_path": "oxide-7/src/mem/rom/sram.rs", "rank": 14, "score": 128132.19800082978 }, { "content": "struct CartMapping {\n\n start_bank: u8,\n\n end_bank: u8,\n\n start_addr: u16,\n\n\n\n addr_mapping: CartMappingFn\n\n}\n\n\n\nimpl CartMapping {\n\n fn new(start_bank: u8, end_bank: u8, start_addr: u16, mapping: CartMappingFn) -> Self {\n\n Self {\n\n start_bank: start_bank,\n\n end_bank: end_bank,\n\n start_addr: start_addr,\n\n addr_mapping: mapping,\n\n }\n\n }\n\n}\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 15, "score": 127178.7721077572 }, { "content": "struct CartBuilder {\n\n mappings: Vec<CartMapping>,\n\n mapping_mode: CartMappingMode,\n\n\n\n rom: Option<ROM>,\n\n ram: Option<Box<dyn SRAM>>,\n\n expansion: Option<Box<dyn Expansion>>,\n\n\n\n fast_rom: bool,\n\n\n\n name: String\n\n}\n\n\n\nimpl CartBuilder {\n\n fn new(mode: CartMappingMode) -> Self {\n\n CartBuilder {\n\n mappings: Vec::new(),\n\n mapping_mode: mode,\n\n\n\n rom: None,\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 16, "score": 127178.7721077572 }, { "content": "enum CartDevice {\n\n ROM(u8, u16),\n\n RAM(u32),\n\n Expansion(u8, u16)\n\n}\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 17, "score": 127178.7721077572 }, { "content": "enum CartMappingMode {\n\n Lo,\n\n LoLarge,\n\n Hi,\n\n ExHi,\n\n SA,\n\n SuperFX\n\n}\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 18, "score": 124020.9410867754 }, { "content": "pub fn debug_mode(snes: &mut SNES) {\n\n // Debug mode.\n\n snes.start_frame();\n\n println!(\"Debug mode.\");\n\n println!(\"Enter 'h' for help.\");\n\n let mut breaks = std::collections::BTreeSet::new();\n\n let mut stack_trace = Vec::new();\n\n loop {\n\n let mut input = String::new();\n\n match std::io::stdin().read_line(&mut input) {\n\n Ok(_) => if input.starts_with(\"b:\") {\n\n // Add breakpoint\n\n match u32::from_str_radix(&input[2..].trim(), 16) {\n\n Ok(num) => {\n\n println!(\"Inserted breakpoint at ${:06X}\", num);\n\n breaks.insert(num);\n\n },\n\n Err(e) => println!(\"Invalid breakpoint: {}\", e),\n\n }\n\n } else if input.starts_with(\"c:\") {\n", "file_path": "oxide-7-bin/src/debug/mod.rs", "rank": 19, "score": 122105.68643811112 }, { "content": "pub fn create_cart(cart_path: &str, save_path: &str, dsp_path: Option<&str>) -> Box<Cart> {\n\n let rom_file = File::open(cart_path).expect(&format!(\"Couldn't open file {}\", cart_path));\n\n //let rom_size = rom_file.metadata().expect(\"Couldn't get metadata for file.\").len();\n\n\n\n let mut reader = BufReader::new(rom_file);\n\n let mut header = ROMHeader::new();\n\n\n\n let cart = if header.try_lo(&mut reader) {\n\n let sram = create_sram(save_path, header.sram_size()).expect(\"Couldn't make save file.\");\n\n let name = header.rom_name();\n\n\n\n if header.rom_size() > LOROM_LARGE_SIZE {\n\n println!(\"LOROM Large {:X}: {}\", header.rom_mapping(), name);\n\n Cart::new_lorom_large(reader, sram)\n\n } else {\n\n println!(\"LOROM {:X}: {}\", header.rom_mapping(), name);\n\n Cart::new_lorom(reader, sram)\n\n }.named(name)\n\n .fast_rom(header.fast_rom())\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 20, "score": 119856.17552274684 }, { "content": "// Convert master cycles into SPC cycles.\n\n// SNES clock: 21_442_080 Hz\n\n// SPC clock: 1_024_000 Hz\n\nfn calc_cycles(master_cycles: usize) -> f64 {\n\n (master_cycles as f64) * SPC_RATIO\n\n}", "file_path": "oxide-7/src/audio/mod.rs", "rank": 21, "score": 110988.30769056921 }, { "content": "// Step the CPU, and add the PC to the stack trace if it calls.\n\nfn step_and_trace(snes: &mut SNES, stack_trace: &mut Vec<u32>, print: bool) {\n\n let instr = snes.get_instr();\n\n match instr[0] {\n\n 0x22 | 0x20 | 0xFC => {\n\n stack_trace.push(make24!(snes.get_state().pb, snes.get_state().pc));\n\n },\n\n 0x6B | 0x60 => {\n\n stack_trace.pop();\n\n },\n\n _ => {}\n\n }\n\n\n\n if print {\n\n let state = snes.get_state();\n\n let pc = make24!(state.pb, state.pc);\n\n println!(\"${:06X}: ${:02X} ({:02X} {:02X} {:02X})\", pc, instr[0], instr[1], instr[2], instr[3]);\n\n }\n\n\n\n if snes.step() {\n\n snes.start_frame();\n\n }\n\n}", "file_path": "oxide-7-bin/src/debug/mod.rs", "rank": 22, "score": 101156.36477162017 }, { "content": "#[inline]\n\nfn sign_extend_4(val: u8) -> i8 {\n\n if test_bit!(val, 3, u8) {\n\n (val | 0xF0) as i8\n\n } else {\n\n val as i8\n\n }\n\n}\n\n\n", "file_path": "oxide-7/src/audio/dsp/brr.rs", "rank": 23, "score": 97908.31309306914 }, { "content": "enum ScreenHeight {\n\n _128,\n\n _160,\n\n _192,\n\n Obj\n\n}\n\n\n\nimpl From<ScreenMode> for ScreenHeight {\n\n fn from(val: ScreenMode) -> Self {\n\n if val.contains(ScreenMode::HT0 | ScreenMode::HT1) {\n\n ScreenHeight::Obj\n\n } else if val.contains(ScreenMode::HT0) {\n\n ScreenHeight::_160\n\n } else if val.contains(ScreenMode::HT1) {\n\n ScreenHeight::_192\n\n } else {\n\n ScreenHeight::_128\n\n }\n\n }\n\n}\n\n\n\n// Represents a row of 8 pixels.\n\n// Each pixel is 2, 4, or 8 bits.\n", "file_path": "oxide-7/src/expansion/superfx/pixelcache.rs", "rank": 24, "score": 96119.79108523217 }, { "content": "#[derive(Clone, Copy)]\n\nenum WindowOp {\n\n OR,\n\n AND,\n\n XOR,\n\n XNOR\n\n}\n\n\n\nimpl From<u8> for WindowOp {\n\n // Should be from a 2-bit value.\n\n fn from(val: u8) -> Self {\n\n const MASK_LOGIC_OR: u8 = 0;\n\n const MASK_LOGIC_AND: u8 = 1;\n\n const MASK_LOGIC_XOR: u8 = 2;\n\n const MASK_LOGIC_XNOR: u8 = 3;\n\n\n\n match val {\n\n MASK_LOGIC_OR => Self::OR,\n\n MASK_LOGIC_AND => Self::AND,\n\n MASK_LOGIC_XOR => Self::XOR,\n\n MASK_LOGIC_XNOR => Self::XNOR,\n", "file_path": "oxide-7/src/video/ram/windowregs.rs", "rank": 25, "score": 95389.6978125322 }, { "content": "// Address Bus B, used for hardware registers.\n\nstruct AddrBusB {\n\n ppu: PPU,\n\n apu: APU,\n\n\n\n open_bus: u8\n\n}\n\n\n\nimpl AddrBusB {\n\n fn new() -> Self {\n\n AddrBusB {\n\n ppu: PPU::new(),\n\n apu: APU::new(),\n\n\n\n open_bus: 0,\n\n }\n\n }\n\n\n\n fn read(&mut self, addr: u8) -> u8 {\n\n match addr {\n\n 0x37 => self.ppu.latch_hv(),\n", "file_path": "oxide-7/src/mem/bus.rs", "rank": 26, "score": 95017.51355010997 }, { "content": "enum ArithMode {\n\n Multiply,\n\n Divide,\n\n MultiplySum,\n\n Res\n\n}\n\n\n\nimpl From<u8> for ArithMode {\n\n fn from(val: u8) -> Self {\n\n use ArithMode::*;\n\n match val & 0x3 {\n\n 0 => Multiply,\n\n 1 => Divide,\n\n 2 => MultiplySum,\n\n 3 => Res,\n\n _ => unreachable!()\n\n }\n\n }\n\n}\n\n\n", "file_path": "oxide-7/src/expansion/sa1/mem/arith.rs", "rank": 27, "score": 92822.07354881272 }, { "content": "/// Save RAM.\n\n/// This may or may not exist based on the cartridge.\n\npub trait SRAM {\n\n fn read(&self, addr: u32) -> u8;\n\n fn write(&mut self, addr: u32, data: u8);\n\n\n\n /// Flush the SRAM to the save file specified.\n\n fn flush(&mut self);\n\n}\n\n\n", "file_path": "oxide-7/src/mem/rom/sram.rs", "rank": 28, "score": 92646.44013995858 }, { "content": "pub trait Expansion {\n\n fn read(&mut self, bank: u8, addr: u16) -> u8;\n\n fn write(&mut self, bank: u8, addr: u16, data: u8);\n\n\n\n fn clock(&mut self, cycles: usize) -> Interrupt;\n\n fn flush(&mut self) {}\n\n}\n\n\n\nimpl Expansion for DSP {\n\n fn read(&mut self, bank: u8, _addr: u16) -> u8 {\n\n match bank {\n\n 0 => self.read_dr(),\n\n 1 => self.read_sr(),\n\n _ => unreachable!()\n\n }\n\n }\n\n\n\n fn write(&mut self, bank: u8, _addr: u16, data: u8) {\n\n match bank {\n\n 0 => self.write_dr(data),\n", "file_path": "oxide-7/src/expansion/mod.rs", "rank": 29, "score": 91101.52464350309 }, { "content": "fn help() {\n\n println!(\"b:x: New breakpoint at memory location x (hex).\");\n\n println!(\"c:x: Clear breakpoint at memory location x (hex).\");\n\n println!(\"r: Keep running until a breakpoint is hit.\");\n\n println!(\"s: Step a single instruction.\");\n\n println!(\"s:x: Step multiple instructions (base 10).\");\n\n println!(\"t: Print the stack trace (all the call locations).\");\n\n println!(\"p: Print the current state of the CPU.\");\n\n println!(\"p:x: Print x - if x is a number, print the contents of that address, otherwise print the register.\");\n\n println!(\"p:x-y: Print the memory in the range x -> y.\");\n\n println!(\"q: Quit execution.\");\n\n}\n\n\n", "file_path": "oxide-7-bin/src/debug/mod.rs", "rank": 30, "score": 90260.86577094428 }, { "content": "#[inline]\n\nfn decompress_sample(head: BRRHead, encoded: u8, last1: i16, last2: i16) -> i16 {\n\n let unpacked = sign_extend_4(encoded) as i16;\n\n let base = (unpacked << head.shift()) >> 1;\n\n let samp = base + head.calc_coef_a(last1) + head.calc_coef_b(last2);\n\n sign_extend_15(samp)\n\n}\n\n\n\n// Sign extend a 4-bit signed value to 8 bits.\n", "file_path": "oxide-7/src/audio/dsp/brr.rs", "rank": 31, "score": 76208.75943052795 }, { "content": "// Types to assist with drawing.\n\n\n\nuse crate::video::render::Colour;\n\n\n\n// A sprite colour for final compositing.\n\n#[derive(Clone, Copy)]\n\npub struct SpriteColour {\n\n pub colour: Colour, // The colour of the sprite\n\n pub col_math: bool // Should it participate in colour math\n\n}\n\n\n\n// A sprite pixel.\n\n#[derive(Clone, Copy)]\n\npub enum SpritePixel {\n\n // Colours with respective priorities.\n\n Prio3(SpriteColour),\n\n Prio2(SpriteColour),\n\n Prio1(SpriteColour),\n\n Prio0(SpriteColour),\n\n Masked, // This pixel is masked and no sprites should appear here.\n", "file_path": "oxide-7/src/video/render/drawing/types.rs", "rank": 32, "score": 75645.21896253202 }, { "content": " _ => panic!(\"Don't call this on pixels with no colour value.\")\n\n }\n\n }\n\n}\n\n\n\n// A colour value with source information.\n\npub enum Pixel {\n\n BG1(Colour),\n\n BG2(Colour),\n\n BG3(Colour),\n\n BG4(Colour),\n\n ObjHi(Colour), // Uses object palette 4-7, uses colour math\n\n ObjLo(Colour), // Uses object palette 0-3, doesn't use colmath\n\n None\n\n}\n\n\n\nimpl Pixel {\n\n // Get the colour from if there is one, and discard source information.\n\n pub fn any(self) -> Option<Colour> {\n\n match self {\n", "file_path": "oxide-7/src/video/render/drawing/types.rs", "rank": 33, "score": 75637.98090194052 }, { "content": " None // No sprites found on this pixel.\n\n}\n\n\n\nimpl SpritePixel {\n\n // Test if the pixel is masked.\n\n #[inline]\n\n pub fn is_masked(&self) -> bool {\n\n match self {\n\n SpritePixel::Masked => true,\n\n _ => false\n\n }\n\n }\n\n\n\n // Get the final pixel out, and test it to see if it should participate in colour math.\n\n pub fn pixel(&self) -> Pixel {\n\n match self {\n\n SpritePixel::Prio3(c) => if c.col_math {Pixel::ObjHi(c.colour)} else {Pixel::ObjLo(c.colour)},\n\n SpritePixel::Prio2(c) => if c.col_math {Pixel::ObjHi(c.colour)} else {Pixel::ObjLo(c.colour)},\n\n SpritePixel::Prio1(c) => if c.col_math {Pixel::ObjHi(c.colour)} else {Pixel::ObjLo(c.colour)},\n\n SpritePixel::Prio0(c) => if c.col_math {Pixel::ObjHi(c.colour)} else {Pixel::ObjLo(c.colour)},\n", "file_path": "oxide-7/src/video/render/drawing/types.rs", "rank": 34, "score": 75633.25115310909 }, { "content": " Pixel::BG1(c) => Some(c),\n\n Pixel::BG2(c) => Some(c),\n\n Pixel::BG3(c) => Some(c),\n\n Pixel::BG4(c) => Some(c),\n\n Pixel::ObjHi(c) => Some(c),\n\n Pixel::ObjLo(c) => Some(c),\n\n Pixel::None => None\n\n }\n\n }\n\n}\n", "file_path": "oxide-7/src/video/render/drawing/types.rs", "rank": 35, "score": 75628.62835560007 }, { "content": " self.data[addr as usize]\n\n }\n\n\n\n pub fn write(&mut self, addr: u32, data: u8) {\n\n self.data[addr as usize] = data;\n\n }\n\n\n\n pub fn iter<'a>(&'a self, start: usize) -> RAMIter<'a> {\n\n RAMIter {\n\n ram: self,\n\n read_head: start\n\n }\n\n }\n\n}\n\n\n\n/// An iterator over RAM.\n\n/// Use RAM::iter to construct this.\n\npub struct RAMIter<'a> {\n\n ram: &'a RAM,\n\n read_head: usize\n", "file_path": "oxide-7/src/mem/mod.rs", "rank": 36, "score": 75083.28908867968 }, { "content": "// Memory\n\nmod bus;\n\nmod dma;\n\npub mod rom;\n\n\n\npub use bus::AddrBusA;\n\n\n\nuse crate::common::Interrupt;\n\n\n\n// Memory Bus to attach to CPU.\n", "file_path": "oxide-7/src/mem/mod.rs", "rank": 37, "score": 75080.50839670034 }, { "content": "}\n\n\n\nimpl Iterator for RAMIter<'_> {\n\n type Item = u8;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let data = self.ram.data[self.read_head];\n\n self.read_head = (self.read_head + 1) % self.ram.data.len();\n\n Some(data)\n\n }\n\n}\n", "file_path": "oxide-7/src/mem/mod.rs", "rank": 38, "score": 75077.90967280431 }, { "content": "fn print(s: &str, snes: &mut SNES) {\n\n match s {\n\n \"a\" => println!(\"a: ${:04X}\", snes.get_state().a),\n\n \"x\" => println!(\"x: ${:04X}\", snes.get_state().x),\n\n \"y\" => println!(\"y: ${:04X}\", snes.get_state().y),\n\n \"s\" => println!(\"s: ${:04X}\", snes.get_state().s),\n\n \"db\" => println!(\"db: ${:02X}\", snes.get_state().db),\n\n \"dp\" => println!(\"dp: ${:04X}\", snes.get_state().dp),\n\n \"pb\" => println!(\"pb: ${:02X}\", snes.get_state().pb),\n\n \"pc\" => println!(\"pc: ${:04X}\", snes.get_state().pc),\n\n \"p\" => println!(\"p: b{:08b}\", snes.get_state().p),\n\n \"e\" => println!(\"e: {}\", snes.get_state().pe),\n\n s => {\n\n // Memory range\n\n if let Some(x) = s.find('-') {\n\n match u32::from_str_radix(&s[..x], 16) {\n\n Ok(start) => match u32::from_str_radix(&s[(x+1)..], 16) {\n\n Ok(end) => {\n\n println!(\"${:06X} - ${:06X}:\", start, end);\n\n let mems = (start..end).map(|n| format!(\"{:02X}\", snes.get_mem_at(n)))\n", "file_path": "oxide-7-bin/src/debug/mod.rs", "rank": 39, "score": 75039.77535309893 }, { "content": " fn from(val: u8) -> Self {\n\n match val & bits![2, 1, 0] {\n\n 0 => VideoMode::_0,\n\n 1 => VideoMode::_1,\n\n 2 => VideoMode::_2,\n\n 3 => VideoMode::_3,\n\n 4 => VideoMode::_4,\n\n 5 => VideoMode::_5,\n\n 6 => VideoMode::_6,\n\n 7 => VideoMode::_7,\n\n _ => unreachable!()\n\n }\n\n }\n\n}\n\n\n\npub type RenderTarget = Arc<Mutex<Box<[u8]>>>;\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Colour {\n\n pub r: u8,\n", "file_path": "oxide-7/src/video/render/mod.rs", "rank": 40, "score": 73121.54988847628 }, { "content": " pub g: u8,\n\n pub b: u8\n\n}\n\n\n\nimpl Colour {\n\n pub const fn new(r: u8, g: u8, b: u8) -> Self {\n\n Colour {\n\n r: r,\n\n g: g,\n\n b: b\n\n }\n\n }\n\n\n\n pub const fn zero() -> Colour {\n\n Colour {\n\n r: 0,\n\n g: 0,\n\n b: 0\n\n }\n\n }\n", "file_path": "oxide-7/src/video/render/mod.rs", "rank": 41, "score": 73119.15835675334 }, { "content": "// Tools for communicating with the renderer (and its thread on certain platforms).\n\n\n\n//mod bgcache;\n\nmod patternmem;\n\nmod drawing;\n\nmod palette;\n\n\n\nuse std::sync::{\n\n Arc, Mutex\n\n};\n\n\n\nuse crossbeam_channel::{\n\n bounded,\n\n Sender,\n\n Receiver\n\n};\n\n\n\n// Renderer trait.\n", "file_path": "oxide-7/src/video/render/mod.rs", "rank": 42, "score": 73118.29859513511 }, { "content": "\n\n while let Ok(msg) = recv_msg.recv() {\n\n match msg {\n\n StartFrame(data) => {\n\n target = Some(data);\n\n },\n\n DrawLine(y) => {\n\n let mut mem = mem.lock().unwrap();\n\n send_reply.send(()).unwrap();\n\n if !mem.get_bg_registers().in_fblank() {\n\n renderer.setup_caches(&mut mem);\n\n let mut t = target.as_ref().unwrap().lock().unwrap();\n\n renderer.draw_line(&mem, &mut t, y);\n\n } else {\n\n let mut t = target.as_ref().unwrap().lock().unwrap();\n\n clear_line(&mut t, y);\n\n }\n\n }\n\n }\n\n }\n", "file_path": "oxide-7/src/video/render/mod.rs", "rank": 43, "score": 73108.21688629618 }, { "content": " });\n\n\n\n RenderThread {\n\n sender: send_msg,\n\n receiver: recv_reply,\n\n }\n\n }\n\n\n\n pub fn start_frame(&mut self, target: RenderTarget) {\n\n self.sender\n\n .send(RendererMessage::StartFrame(target))\n\n .expect(\"Couldn't send start frame message!\");\n\n }\n\n\n\n pub fn draw_line(&mut self, y: usize) {\n\n self.sender\n\n .send(RendererMessage::DrawLine(y))\n\n .expect(\"Couldn't send draw line message!\");\n\n\n\n self.receiver\n\n .recv()\n\n .expect(\"Draw line\");\n\n }\n\n}\n\n\n", "file_path": "oxide-7/src/video/render/mod.rs", "rank": 44, "score": 73108.02020795905 }, { "content": "\n\n /*const fn red() -> Colour {\n\n Colour {\n\n r: 255,\n\n g: 0,\n\n b: 0\n\n }\n\n }\n\n const fn blue() -> Colour {\n\n Colour {\n\n r: 0,\n\n g: 0,\n\n b: 255\n\n }\n\n }\n\n const fn green() -> Colour {\n\n Colour {\n\n r: 0,\n\n g: 255,\n\n b: 0\n", "file_path": "oxide-7/src/video/render/mod.rs", "rank": 45, "score": 73096.53484012646 }, { "content": " }\n\n }\n\n const fn yellow() -> Colour {\n\n Colour {\n\n r: 255,\n\n g: 255,\n\n b: 0\n\n }\n\n }*/\n\n}\n\n\n", "file_path": "oxide-7/src/video/render/mod.rs", "rank": 46, "score": 73096.53484012646 }, { "content": " bgregs: Registers,\n\n windowregs: WindowRegisters,\n\n\n\n h_pos: u16,\n\n v_pos: u16,\n\n\n\n h_hi_byte: bool,\n\n v_hi_byte: bool,\n\n\n\n oam: OAM,\n\n cgram: CGRAM,\n\n vram: VRAM\n\n}\n\n\n\nimpl VideoMem {\n\n pub fn new() -> Self {\n\n VideoMem {\n\n bgregs: Registers::new(),\n\n windowregs: WindowRegisters::new(),\n\n\n", "file_path": "oxide-7/src/video/ram/mod.rs", "rank": 47, "score": 73005.4342471276 }, { "content": "// Video memory. Connected to the B bus.\n\n\n\nmod bgregs;\n\nmod cgram;\n\nmod oam;\n\nmod vram;\n\nmod windowregs;\n\n\n\npub use bgregs::*;\n\nuse cgram::CGRAM;\n\nuse oam::OAM;\n\npub use oam::SpritePriority;\n\nuse vram::VRAM;\n\npub use windowregs::{\n\n Screen,\n\n WindowRegisters\n\n};\n\n\n\n// Struct containing OAM, CGRAM and VRAM.\n\npub struct VideoMem {\n", "file_path": "oxide-7/src/video/ram/mod.rs", "rank": 48, "score": 73002.62919577655 }, { "content": "\n\n // Renderer methods to check dirtiness of data.\n\n pub fn is_cgram_bg_dirty(&self) -> bool {\n\n self.cgram.is_bg_dirty()\n\n }\n\n\n\n pub fn is_cgram_obj_dirty(&self) -> bool {\n\n self.cgram.is_obj_dirty()\n\n }\n\n\n\n pub fn cgram_reset_dirty(&mut self) {\n\n self.cgram.reset_dirty()\n\n }\n\n\n\n pub fn vram_is_dirty(&self, start_addr: u16) -> bool {\n\n self.vram.dirty_range(start_addr)\n\n }\n\n\n\n pub fn vram_reset_dirty_range(&mut self, read: &[u16]) {\n\n self.vram.reset_dirty_range(read);\n\n }\n\n\n\n pub fn vram_set_pattern_regions(&mut self, regions: Vec<(u16, u16)>) {\n\n self.vram.set_pattern_regions(regions);\n\n }\n\n}", "file_path": "oxide-7/src/video/ram/mod.rs", "rank": 49, "score": 72997.81272144732 }, { "content": " // Renderer methods to get raw data.\n\n pub fn get_oam<'a>(&'a self) -> &'a [oam::Object] {\n\n self.oam.ref_data()\n\n }\n\n\n\n pub fn get_cgram<'a>(&'a self) -> &'a [u8] {\n\n self.cgram.ref_data()\n\n }\n\n\n\n pub fn get_vram<'a>(&'a self) -> &'a [u8] {\n\n self.vram.ref_data()\n\n }\n\n\n\n pub fn get_bg_registers<'a>(&'a self) -> &'a Registers {\n\n &self.bgregs\n\n }\n\n\n\n pub fn get_window_registers<'a>(&'a self) -> &'a WindowRegisters {\n\n &self.windowregs\n\n }\n", "file_path": "oxide-7/src/video/ram/mod.rs", "rank": 50, "score": 72993.63912183336 }, { "content": " h_pos: 0,\n\n v_pos: 0,\n\n\n\n h_hi_byte: false,\n\n v_hi_byte: false,\n\n\n\n oam: OAM::new(),\n\n cgram: CGRAM::new(),\n\n vram: VRAM::new()\n\n }\n\n }\n\n\n\n pub fn read(&mut self, addr: u8) -> u8 {\n\n match addr {\n\n 0x34 => self.bgregs.read_mult_result_lo(),\n\n 0x35 => self.bgregs.read_mult_result_mid(),\n\n 0x36 => self.bgregs.read_mult_result_hi(),\n\n 0x38 => self.oam.read(),\n\n 0x39 => self.vram.read_lo(),\n\n 0x3A => self.vram.read_hi(),\n", "file_path": "oxide-7/src/video/ram/mod.rs", "rank": 51, "score": 72989.04865469942 }, { "content": " },\n\n _ => unreachable!()\n\n }\n\n }\n\n\n\n pub fn write(&mut self, addr: u8, data: u8) {\n\n match addr {\n\n 0x00 => self.bgregs.set_screen_display(data),\n\n\n\n 0x01 => self.bgregs.set_object_settings(data),\n\n 0x02 => self.oam.set_addr_lo(data),\n\n 0x03 => self.oam.set_addr_hi(data),\n\n 0x04 => self.oam.write(data),\n\n\n\n 0x05 => self.bgregs.set_bg_mode(data),\n\n 0x06 => self.bgregs.set_mosaic(data),\n\n 0x07 => self.bgregs.set_bg1_settings(data),\n\n 0x08 => self.bgregs.set_bg2_settings(data),\n\n 0x09 => self.bgregs.set_bg3_settings(data),\n\n 0x0A => self.bgregs.set_bg4_settings(data),\n", "file_path": "oxide-7/src/video/ram/mod.rs", "rank": 52, "score": 72988.58203335338 }, { "content": " 0x2F => self.windowregs.set_sub_mask_desg(data),\n\n 0x30 => self.windowregs.set_colour_add_select(data),\n\n 0x31 => self.windowregs.set_colour_math_desg(data),\n\n 0x32 => self.windowregs.set_fixed_colour(data),\n\n 0x33 => self.windowregs.set_video_select(data),\n\n _ => unreachable!()\n\n }\n\n }\n\n\n\n // Set latched h or v pos.\n\n pub fn set_latched_hv(&mut self, h: u16, v: u16) {\n\n self.h_pos = h;\n\n self.v_pos = v;\n\n }\n\n\n\n // OAM address reset that happens at V-blank\n\n pub fn oam_reset(&mut self) {\n\n self.oam.reset();\n\n }\n\n\n", "file_path": "oxide-7/src/video/ram/mod.rs", "rank": 53, "score": 72985.6189986983 }, { "content": " 0x1D => self.bgregs.set_mode7_matrix_c(data),\n\n 0x1E => self.bgregs.set_mode7_matrix_d(data),\n\n 0x1F => self.bgregs.set_mode7_centre_x(data),\n\n 0x20 => self.bgregs.set_mode7_centre_y(data),\n\n\n\n 0x21 => self.cgram.set_addr(data),\n\n 0x22 => self.cgram.write(data),\n\n \n\n 0x23 => self.windowregs.set_mask_bg1_2(data), // BG1&2 window\n\n 0x24 => self.windowregs.set_mask_bg3_4(data), // BG3&4 window\n\n 0x25 => self.windowregs.set_mask_obj_col(data), // Obj window\n\n 0x26 => self.windowregs.window_1_left = data,\n\n 0x27 => self.windowregs.window_1_right = data,\n\n 0x28 => self.windowregs.window_2_left = data,\n\n 0x29 => self.windowregs.window_2_right = data,\n\n 0x2A => self.windowregs.set_mask_logic_bg(data),\n\n 0x2B => self.windowregs.set_mask_logic_obj_col(data),\n\n 0x2C => self.windowregs.set_main_screen_desg(data),\n\n 0x2D => self.windowregs.set_sub_screen_desg(data),\n\n 0x2E => self.windowregs.set_main_mask_desg(data),\n", "file_path": "oxide-7/src/video/ram/mod.rs", "rank": 54, "score": 72982.92005250842 }, { "content": " 0x3B => self.cgram.read(),\n\n 0x3C => if !self.h_hi_byte {\n\n self.h_hi_byte = true;\n\n lo!(self.h_pos)\n\n } else {\n\n self.h_hi_byte = false;\n\n hi!(self.h_pos)\n\n },\n\n 0x3D => if !self.v_hi_byte {\n\n self.v_hi_byte = true;\n\n lo!(self.v_pos)\n\n } else {\n\n self.v_hi_byte = false;\n\n hi!(self.v_pos)\n\n },\n\n 0x3E => 1, // PPU Status\n\n 0x3F => { // PPU Status\n\n self.h_hi_byte = false;\n\n self.v_hi_byte = false;\n\n 2\n", "file_path": "oxide-7/src/video/ram/mod.rs", "rank": 55, "score": 72982.51736959915 }, { "content": " 0x0B => self.bgregs.set_bg12_char_addr(data),\n\n 0x0C => self.bgregs.set_bg34_char_addr(data),\n\n 0x0D => self.bgregs.set_bg1_scroll_x(data),\n\n 0x0E => self.bgregs.set_bg1_scroll_y(data),\n\n 0x0F => self.bgregs.set_bg2_scroll_x(data),\n\n 0x10 => self.bgregs.set_bg2_scroll_y(data),\n\n 0x11 => self.bgregs.set_bg3_scroll_x(data),\n\n 0x12 => self.bgregs.set_bg3_scroll_y(data),\n\n 0x13 => self.bgregs.set_bg4_scroll_x(data),\n\n 0x14 => self.bgregs.set_bg4_scroll_y(data),\n\n\n\n 0x15 => self.vram.set_port_control(data),\n\n 0x16 => self.vram.set_addr_lo(data),\n\n 0x17 => self.vram.set_addr_hi(data),\n\n 0x18 => self.vram.write_lo(data),\n\n 0x19 => self.vram.write_hi(data),\n\n\n\n 0x1A => self.bgregs.set_mode7_settings(data),\n\n 0x1B => self.bgregs.set_mode7_matrix_a(data),\n\n 0x1C => self.bgregs.set_mode7_matrix_b(data),\n", "file_path": "oxide-7/src/video/ram/mod.rs", "rank": 56, "score": 72978.42922893183 }, { "content": "\n\n timer_0: Timer::new(128),\n\n timer_1: Timer::new(128),\n\n timer_2: Timer::new(16),\n\n }\n\n }\n\n\n\n pub fn read_port(&self, port_num: usize) -> u8 {\n\n self.ports_apu_to_cpu[port_num]\n\n }\n\n\n\n pub fn write_port(&mut self, port_num: usize, data: u8) {\n\n self.ports_cpu_to_apu[port_num] = data;\n\n }\n\n}\n\n\n\nimpl SPCMem for SPCBus {\n\n fn read(&mut self, addr: u16) -> u8 {\n\n match addr {\n\n 0xF1 => 0,\n", "file_path": "oxide-7/src/audio/mem/mod.rs", "rank": 57, "score": 72606.82791011181 }, { "content": "// Memory for the SPC-700.\n\nmod timer;\n\n\n\nuse bitflags::bitflags;\n\nuse crossbeam_channel::Sender;\n\n\n\nuse crate::mem::RAM;\n\nuse timer::Timer;\n\nuse super::dsp::DSP;\n\n\n\nbitflags! {\n\n struct SPCControl: u8 {\n\n const ROM_ENABLE = bit!(7);\n\n const CLEAR_PORT_32 = bit!(5);\n\n const CLEAR_PORT_10 = bit!(4);\n\n const ENABLE_TIMER_2 = bit!(2);\n\n const ENABLE_TIMER_1 = bit!(1);\n\n const ENABLE_TIMER_0 = bit!(0);\n\n }\n\n}\n", "file_path": "oxide-7/src/audio/mem/mod.rs", "rank": 58, "score": 72606.60445167412 }, { "content": "// ROM.\n\npub struct ROM {\n\n data: Vec<u8>,\n\n bank_size: usize\n\n}\n\n\n\nimpl ROM {\n\n fn new(mut cart_file: BufReader<File>, bank_size: usize) -> Self {\n\n // read and store\n\n let mut buffer = Vec::new();\n\n cart_file.seek(SeekFrom::Start(0)).expect(\"couldn't seek in file\");\n\n cart_file.read_to_end(&mut buffer).expect(\"couldn't read file\");\n\n ROM {\n\n data: buffer,\n\n bank_size: bank_size\n\n }\n\n }\n\n\n\n pub fn read(&mut self, bank: u8, addr: u16) -> u8 {\n\n let bank_offset = (bank as usize) * self.bank_size;\n\n self.data[bank_offset + (addr as usize)]\n\n }\n\n}\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 59, "score": 72603.91935405244 }, { "content": " // Write to expansion port slot.\n\n pub fn write_exp(&mut self, addr: u16, data: u8) {\n\n self.expansion.as_mut().map_or((), |e| e.write(0, addr, data));\n\n }\n\n\n\n pub fn flush(&mut self) {\n\n self.ram.flush();\n\n if let Some(e) = self.expansion.as_mut() {\n\n e.flush();\n\n }\n\n }\n\n\n\n pub fn set_rom_speed(&mut self, data: u8) {\n\n self.rom_speed = if self.fast_rom && test_bit!(data, SPEED_BIT, u8) {\n\n timing::FAST_MEM_ACCESS\n\n } else {\n\n timing::SLOW_MEM_ACCESS\n\n }\n\n }\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 60, "score": 72603.8487112653 }, { "content": "\n\n builder\n\n }\n\n\n\n fn new_exhirom(cart_file: BufReader<File>, ram: Box<dyn SRAM>) -> CartBuilder {\n\n let mut builder = CartBuilder::new(CartMappingMode::ExHi);\n\n builder.rom = Some(ROM::new(cart_file, 0x10000));\n\n builder.ram = Some(ram);\n\n\n\n builder\n\n }\n\n}\n\n\n\nimpl Cart {\n\n pub fn read(&mut self, bank: u8, addr: u16) -> (u8, usize) {\n\n for mapping in self.mappings.iter() {\n\n if (bank >= mapping.start_bank) &&\n\n (bank <= mapping.end_bank) &&\n\n (addr >= mapping.start_addr) {\n\n return match (mapping.addr_mapping)(bank, addr) {\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 61, "score": 72603.37515622503 }, { "content": " CartDevice::ROM(bank, addr) => (self.rom.as_mut().map_or(0, |r| r.read(bank, addr)), self.rom_speed),\n\n CartDevice::RAM(addr) => (self.ram.read(addr), timing::SLOW_MEM_ACCESS),\n\n CartDevice::Expansion(bank, addr) => {\n\n let data = self.expansion.as_mut().map_or(0, |e| e.read(bank, addr));\n\n //println!(\"Reading {:X} from {:X}\", data, addr);\n\n (data, timing::SLOW_MEM_ACCESS)\n\n },\n\n };\n\n }\n\n }\n\n\n\n (0, timing::SLOW_MEM_ACCESS)\n\n }\n\n\n\n pub fn write(&mut self, bank: u8, addr: u16, data: u8) -> usize {\n\n let internal_bank = bank % 0x80;\n\n\n\n for mapping in self.mappings.iter() {\n\n if (internal_bank >= mapping.start_bank) &&\n\n (internal_bank <= mapping.end_bank) &&\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 62, "score": 72603.18460936949 }, { "content": "use header::*;\n\nuse sram::*;\n\npub use sram::SRAM;\n\n\n\nconst LOROM_LARGE_SIZE: usize = 1 << 21;\n\nconst LOROM_RAM_BANK_SIZE: u32 = 0x8000;\n\nconst HIROM_RAM_BANK_SIZE: u32 = 0x2000;\n\n\n\nconst SPEED_BIT: u8 = 0;\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 63, "score": 72601.73412395381 }, { "content": " ram: None,\n\n expansion: None,\n\n\n\n fast_rom: false,\n\n\n\n name: String::new()\n\n }\n\n }\n\n\n\n fn named(mut self, name: String) -> Self {\n\n self.name = name;\n\n self\n\n }\n\n\n\n fn fast_rom(mut self, fast: bool) -> Self {\n\n self.fast_rom = fast;\n\n self\n\n }\n\n\n\n fn with_dsp(mut self, dsp: Box<dyn Expansion>) -> Self {\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 64, "score": 72601.26603124241 }, { "content": " ports_apu_to_cpu: [u8; 4],\n\n\n\n timer_0: Timer,\n\n timer_1: Timer,\n\n timer_2: Timer,\n\n}\n\n\n\nimpl SPCBus {\n\n pub fn new(signal_tx: Sender<super::SamplePacket>) -> Self {\n\n SPCBus {\n\n ram: RAM::new(SPC_RAM_SIZE),\n\n\n\n ipl_rom: &IPL_ROM,\n\n\n\n control: SPCControl::ROM_ENABLE | SPCControl::CLEAR_PORT_32 | SPCControl::CLEAR_PORT_10,\n\n dsp_reg_addr: 0,\n\n dsp: DSP::new(signal_tx),\n\n\n\n ports_cpu_to_apu: [0; 4],\n\n ports_apu_to_cpu: [0; 4],\n", "file_path": "oxide-7/src/audio/mem/mod.rs", "rank": 65, "score": 72600.57216474012 }, { "content": "// ROM types\n\nmod header;\n\nmod sram;\n\n\n\nuse std::{\n\n io::{\n\n BufReader,\n\n Read,\n\n Seek,\n\n SeekFrom\n\n },\n\n fs::File\n\n};\n\n\n\nuse crate::{\n\n common::Interrupt,\n\n constants::timing,\n\n expansion::*\n\n};\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 66, "score": 72599.0500125306 }, { "content": "\n\n name: self.name,\n\n })\n\n }\n\n}\n\n\n\npub struct Cart {\n\n mappings: Vec<CartMapping>,\n\n\n\n rom: Option<ROM>,\n\n ram: Box<dyn SRAM>,\n\n expansion: Option<Box<dyn Expansion>>,\n\n\n\n fast_rom: bool,\n\n rom_speed: usize,\n\n\n\n name: String\n\n}\n\n\n\nimpl Cart {\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 67, "score": 72598.79465980532 }, { "content": " (addr >= mapping.start_addr) {\n\n match (mapping.addr_mapping)(internal_bank, addr) {\n\n CartDevice::ROM(_,_) => {},\n\n CartDevice::RAM(addr) => self.ram.write(addr, data),\n\n CartDevice::Expansion(bank, addr) => {\n\n //println!(\"Writing {:X} to {:X}\", data, addr);\n\n self.expansion.as_mut().map_or((), |e| e.write(bank, addr, data));\n\n },\n\n }\n\n }\n\n }\n\n\n\n timing::SLOW_MEM_ACCESS\n\n }\n\n\n\n // Read from expansion port slot.\n\n pub fn read_exp(&mut self, addr: u16) -> u8 {\n\n self.expansion.as_mut().map_or(0, |e| e.read(0, addr))\n\n }\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 68, "score": 72598.28422804682 }, { "content": " self\n\n }\n\n\n\n fn with_sa1(mut self) -> Self {\n\n use CartMappingMode::*;\n\n\n\n let sa1 = Box::new(SA1::new(self.rom.take().unwrap(), true, self.ram.take().unwrap()));\n\n self.expansion = Some(sa1);\n\n\n\n self.mapping_mode = SA;\n\n\n\n self\n\n }\n\n\n\n fn with_superfx(mut self) -> Self {\n\n let super_fx = Box::new(SuperFX::new(self.rom.take().unwrap(), self.ram.take().unwrap()));\n\n self.expansion = Some(super_fx);\n\n\n\n self.mapping_mode = CartMappingMode::SuperFX;\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 69, "score": 72597.56146432375 }, { "content": " self.mappings.push(CartMapping::new(0x20, 0x3F, 0x6000, |bank, addr| {\n\n let ram_bank = ((bank % 0x10) as u32) * HIROM_RAM_BANK_SIZE;\n\n CartDevice::RAM(ram_bank + (addr as u32 - 0x6000))\n\n }));\n\n },\n\n SuperFX => {\n\n self.mappings.push(CartMapping::new(0x60, 0x7F, 0, |bank, addr| CartDevice::Expansion(bank, addr)));\n\n self.mappings.push(CartMapping::new(0xE0, 0xEF, 0, |bank, addr| CartDevice::Expansion(bank, addr)));\n\n },\n\n _ => {}\n\n }\n\n\n\n Box::new(Cart {\n\n mappings: self.mappings,\n\n rom: self.rom,\n\n ram: self.ram.unwrap_or(Box::new(EmptySRAM::new())),\n\n expansion: self.expansion,\n\n\n\n fast_rom: self.fast_rom,\n\n rom_speed: timing::SLOW_MEM_ACCESS,\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 70, "score": 72595.98553341202 }, { "content": " }\n\n }\n\n\n\n fn clock(&mut self, cycles: usize) {\n\n if self.control.contains(SPCControl::ENABLE_TIMER_0) {\n\n self.timer_0.clock(cycles);\n\n }\n\n if self.control.contains(SPCControl::ENABLE_TIMER_1) {\n\n self.timer_1.clock(cycles);\n\n }\n\n if self.control.contains(SPCControl::ENABLE_TIMER_2) {\n\n self.timer_2.clock(cycles);\n\n }\n\n\n\n self.dsp.clock(cycles, &mut self.ram);\n\n }\n\n}\n\n\n\nimpl SPCBus {\n\n fn set_control(&mut self, data: u8) {\n", "file_path": "oxide-7/src/audio/mem/mod.rs", "rank": 71, "score": 72593.59229538555 }, { "content": " fn new_lorom(cart_file: BufReader<File>, ram: Box<dyn SRAM>) -> CartBuilder {\n\n let mut builder = CartBuilder::new(CartMappingMode::Lo);\n\n builder.rom = Some(ROM::new(cart_file, 0x8000));\n\n builder.ram = Some(ram);\n\n\n\n builder\n\n }\n\n\n\n fn new_lorom_large(cart_file: BufReader<File>, ram: Box<dyn SRAM>) -> CartBuilder {\n\n let mut builder = CartBuilder::new(CartMappingMode::LoLarge);\n\n builder.rom = Some(ROM::new(cart_file, 0x8000));\n\n builder.ram = Some(ram);\n\n\n\n builder\n\n }\n\n\n\n fn new_hirom(cart_file: BufReader<File>, ram: Box<dyn SRAM>) -> CartBuilder {\n\n let mut builder = CartBuilder::new(CartMappingMode::Hi);\n\n builder.rom = Some(ROM::new(cart_file, 0x10000));\n\n builder.ram = Some(ram);\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 72, "score": 72592.3441723307 }, { "content": " self.mappings.push(CartMapping::new(0xC0, 0xFF, 0, |bank, addr| CartDevice::Expansion(bank, addr)));\n\n },\n\n SuperFX => {\n\n self.mappings.push(CartMapping::new(0x00, 0x3F, 0x3000, |bank, addr| CartDevice::Expansion(bank, addr)));\n\n self.mappings.push(CartMapping::new(0x80, 0xBF, 0x3000, |bank, addr| CartDevice::Expansion(bank, addr)));\n\n\n\n self.mappings.push(CartMapping::new(0x40, 0x5F, 0, |bank, addr| CartDevice::Expansion(bank, addr)));\n\n self.mappings.push(CartMapping::new(0xC0, 0xDF, 0, |bank, addr| CartDevice::Expansion(bank, addr)));\n\n },\n\n }\n\n\n\n // SRAM\n\n match self.mapping_mode {\n\n Lo | LoLarge => {\n\n self.mappings.push(CartMapping::new(0x70, 0x7F, 0, |bank, addr| {\n\n let ram_bank = ((bank - 0x70) as u32) * LOROM_RAM_BANK_SIZE;\n\n CartDevice::RAM(ram_bank + addr as u32)\n\n }));\n\n },\n\n Hi | ExHi => {\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 73, "score": 72590.51355827728 }, { "content": " pub fn name(&self) -> String {\n\n self.name.clone()\n\n }\n\n\n\n pub fn clock(&mut self, cycles: usize) -> Interrupt {\n\n if let Some(ex) = self.expansion.as_mut() {\n\n ex.clock(cycles)\n\n } else {\n\n Interrupt::default()\n\n }\n\n }\n\n}\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 74, "score": 72590.44934526419 }, { "content": " };\n\n\n\n let cart_with_ext = match header.rom_type().enhancement_chip() {\n\n Some(EnhancementChip::DSP) => {\n\n let dsp_path = dsp_path.expect(\"Must specify DSP ROM path!\");\n\n let dsp_rom_file = File::open(dsp_path).expect(&format!(\"Couldn't open DSP ROM file {}\", dsp_path));\n\n let mut dsp_reader = BufReader::new(dsp_rom_file);\n\n let mut buffer = vec![0; 0x2000];\n\n dsp_reader.read_exact(&mut buffer).expect(\"Couldn't read into DSP ROM\");\n\n cart.with_dsp(Box::new(DSP::new(&buffer)))\n\n },\n\n Some(EnhancementChip::SA1) => cart.with_sa1(),\n\n Some(EnhancementChip::SuperFX) => cart.with_superfx(),\n\n Some(e) => panic!(\"Unsupported enhancement chip {:?}\", e),\n\n None => cart,\n\n };\n\n\n\n cart_with_ext.build()\n\n}\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 75, "score": 72590.09658548007 }, { "content": "\n\n fn write(&mut self, addr: u16, data: u8) {\n\n match addr {\n\n 0xF1 => self.set_control(data),\n\n\n\n 0xF2 => self.dsp_reg_addr = data,\n\n 0xF3 => self.dsp.write(self.dsp_reg_addr & 0x7F, data, &self.ram),\n\n\n\n 0xF4 => self.ports_apu_to_cpu[0] = data,\n\n 0xF5 => self.ports_apu_to_cpu[1] = data,\n\n 0xF6 => self.ports_apu_to_cpu[2] = data,\n\n 0xF7 => self.ports_apu_to_cpu[3] = data,\n\n\n\n 0xFA => self.timer_0.write_timer_modulo(data),\n\n 0xFB => self.timer_1.write_timer_modulo(data),\n\n 0xFC => self.timer_2.write_timer_modulo(data),\n\n\n\n 0xFD..=0xFF => {},\n\n\n\n _ => self.ram.write(addr.into(), data)\n", "file_path": "oxide-7/src/audio/mem/mod.rs", "rank": 76, "score": 72590.00900285422 }, { "content": "\n\nconst SPC_RAM_SIZE: usize = 1024 * 64; // 64KB of RAM.\n\n\n\nconst IPL_ROM: [u8; 64] = [\n\n 0xCD, 0xEF, 0xBD, 0xE8, 0x00, 0xC6, 0x1D, 0xD0,\n\n 0xFC, 0x8F, 0xAA, 0xF4, 0x8F, 0xBB, 0xF5, 0x78,\n\n 0xCC, 0xF4, 0xD0, 0xFB, 0x2F, 0x19, 0xEB, 0xF4,\n\n 0xD0, 0xFC, 0x7E, 0xF4, 0xD0, 0x0B, 0xE4, 0xF5,\n\n 0xCB, 0xF4, 0xD7, 0x00, 0xFC, 0xD0, 0xF3, 0xAB,\n\n 0x01, 0x10, 0xEF, 0x7E, 0xF4, 0x10, 0xEB, 0xBA,\n\n 0xF6, 0xDA, 0x00, 0xBA, 0xF4, 0xC4, 0xF4, 0xDD,\n\n 0x5D, 0xD0, 0xDB, 0x1F, 0x00, 0x00, 0xC0, 0xFF\n\n];\n\n\n", "file_path": "oxide-7/src/audio/mem/mod.rs", "rank": 77, "score": 72589.49333544503 }, { "content": " use CartMappingMode::*;\n\n\n\n self.expansion = Some(dsp);\n\n\n\n match self.mapping_mode {\n\n Lo | LoLarge => {\n\n self.mappings.push(CartMapping::new(0x30, 0x3F, 0x8000, |_, addr| {\n\n let out_addr = if addr < 0xC000 {0} else {1};\n\n CartDevice::Expansion(out_addr, 0)\n\n }));\n\n },\n\n Hi => {\n\n self.mappings.push(CartMapping::new(0x00, 0x1F, 0x6000, |_, addr| {\n\n let out_addr = if addr < 0x7000 {0} else {1};\n\n CartDevice::Expansion(out_addr, 0)\n\n }));\n\n }\n\n _ => {}\n\n }\n\n\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 78, "score": 72588.84828000073 }, { "content": " self\n\n }\n\n\n\n fn build(mut self) -> Box<Cart> {\n\n use CartMappingMode::*;\n\n // Map ROM\n\n match self.mapping_mode {\n\n Lo => {\n\n self.mappings.push(CartMapping::new(0x00, 0x3F, 0x8000, |bank, addr| CartDevice::ROM(bank, addr % 0x8000)));\n\n self.mappings.push(CartMapping::new(0x80, 0xBF, 0x8000, |bank, addr| CartDevice::ROM(bank - 0x80, addr % 0x8000)));\n\n\n\n self.mappings.push(CartMapping::new(0x40, 0x6F, 0, |bank, addr| CartDevice::ROM(bank - 0x40, addr % 0x8000)));\n\n self.mappings.push(CartMapping::new(0xC0, 0xFF, 0, |bank, addr| CartDevice::ROM(bank - 0xC0, addr % 0x8000)));\n\n },\n\n LoLarge => {\n\n self.mappings.push(CartMapping::new(0x00, 0x3F, 0x8000, |bank, addr| CartDevice::ROM(bank, addr % 0x8000)));\n\n self.mappings.push(CartMapping::new(0x80, 0xBF, 0x8000, |bank, addr| CartDevice::ROM(bank - 0x80, addr % 0x8000)));\n\n\n\n self.mappings.push(CartMapping::new(0x40, 0x6F, 0, |bank, addr| CartDevice::ROM(bank, addr % 0x8000)));\n\n self.mappings.push(CartMapping::new(0xC0, 0xFF, 0, |bank, addr| CartDevice::ROM(bank - 0x80, addr % 0x8000)));\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 79, "score": 72588.07677140781 }, { "content": "\n\n 0xF2 => self.dsp_reg_addr,\n\n 0xF3 => self.dsp.read(self.dsp_reg_addr & 0x7F),\n\n\n\n 0xF4 => self.ports_cpu_to_apu[0],\n\n 0xF5 => self.ports_cpu_to_apu[1],\n\n 0xF6 => self.ports_cpu_to_apu[2],\n\n 0xF7 => self.ports_cpu_to_apu[3],\n\n\n\n 0xFA..=0xFC => 0,\n\n\n\n 0xFD => self.timer_0.read_counter(),\n\n 0xFE => self.timer_1.read_counter(),\n\n 0xFF => self.timer_2.read_counter(),\n\n\n\n 0xFFC0..=0xFFFF if self.control.contains(SPCControl::ROM_ENABLE) => self.ipl_rom[(addr - 0xFFC0) as usize],\n\n\n\n _ => self.ram.read(addr.into())\n\n }\n\n }\n", "file_path": "oxide-7/src/audio/mem/mod.rs", "rank": 80, "score": 72587.21129910479 }, { "content": " },\n\n Hi => {\n\n self.mappings.insert(0, CartMapping::new(0x00, 0x3F, 0x8000, |bank, addr| CartDevice::ROM(bank, addr)));\n\n self.mappings.insert(1, CartMapping::new(0x80, 0xBF, 0x8000, |bank, addr| CartDevice::ROM(bank - 0x80, addr)));\n\n\n\n self.mappings.insert(2, CartMapping::new(0x40, 0x7F, 0, |bank, addr| CartDevice::ROM(bank - 0x40, addr)));\n\n self.mappings.insert(3, CartMapping::new(0xC0, 0xFF, 0, |bank, addr| CartDevice::ROM(bank - 0xC0, addr)));\n\n },\n\n ExHi => {\n\n self.mappings.push(CartMapping::new(0x00, 0x1F, 0x8000, |bank, addr| CartDevice::ROM(bank + 0x40, addr)));\n\n self.mappings.push(CartMapping::new(0x80, 0xBF, 0x8000, |bank, addr| CartDevice::ROM(bank - 0x80, addr)));\n\n\n\n self.mappings.push(CartMapping::new(0x40, 0x5F, 0, |bank, addr| CartDevice::ROM(bank, addr)));\n\n self.mappings.push(CartMapping::new(0xC0, 0xFF, 0, |bank, addr| CartDevice::ROM(bank - 0xC0, addr)));\n\n },\n\n SA => {\n\n self.mappings.push(CartMapping::new(0x00, 0x3F, 0x2200, |bank, addr| CartDevice::Expansion(bank, addr)));\n\n self.mappings.push(CartMapping::new(0x80, 0xBF, 0x2200, |bank, addr| CartDevice::Expansion(bank, addr)));\n\n\n\n self.mappings.push(CartMapping::new(0x40, 0x6F, 0, |bank, addr| CartDevice::Expansion(bank, addr)));\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 81, "score": 72586.80643128601 }, { "content": " } else if header.try_exhi(&mut reader) {\n\n let sram = create_sram(save_path, header.sram_size()).expect(\"Couldn't make save file.\");\n\n let name = header.rom_name();\n\n\n\n println!(\"EXHIROM {:X}: {}\", header.rom_mapping(), name);\n\n Cart::new_exhirom(reader, sram)\n\n .named(name)\n\n .fast_rom(header.fast_rom())\n\n\n\n } else if header.try_hi(&mut reader) {\n\n let sram = create_sram(save_path, header.sram_size()).expect(\"Couldn't make save file.\");\n\n let name = header.rom_name();\n\n\n\n println!(\"HIROM {:X}: {}\", header.rom_mapping(), name);\n\n Cart::new_hirom(reader, sram)\n\n .named(name)\n\n .fast_rom(header.fast_rom())\n\n\n\n } else {\n\n panic!(\"Unrecognised ROM: {:X}\", header.rom_mapping());\n", "file_path": "oxide-7/src/mem/rom/mod.rs", "rank": 82, "score": 72585.22649141017 }, { "content": " let control = SPCControl::from_bits_truncate(data);\n\n\n\n self.timer_0.reset();\n\n self.timer_1.reset();\n\n self.timer_2.reset();\n\n\n\n if control.contains(SPCControl::CLEAR_PORT_10) {\n\n //println!(\"APU reset ports 0 and 1\");\n\n self.ports_cpu_to_apu[0] = 0;\n\n self.ports_cpu_to_apu[1] = 0;\n\n }\n\n if control.contains(SPCControl::CLEAR_PORT_32) {\n\n //println!(\"APU reset ports 2 and 3\");\n\n self.ports_cpu_to_apu[2] = 0;\n\n self.ports_cpu_to_apu[3] = 0;\n\n }\n\n\n\n self.control = control;\n\n }\n\n}", "file_path": "oxide-7/src/audio/mem/mod.rs", "rank": 83, "score": 72582.0002487802 }, { "content": "// Actually drawing the image!\n\n\n\nmod lines;\n\nmod types;\n\n\n\nuse crate::video::{\n\n BG,\n\n VideoMem,\n\n ram::{\n\n Mode7Extend,\n\n Screen,\n\n SpritePriority,\n\n WindowRegisters\n\n },\n\n render::{\n\n Colour,\n\n VideoMode,\n\n patternmem::{\n\n BitsPerPixel,\n\n PatternMem\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 84, "score": 70788.15110998998 }, { "content": " },\n\n palette::PaletteMem\n\n }\n\n};\n\n\n\nuse types::*;\n\nuse lines::{\n\n BGData,\n\n TileAttributes\n\n};\n\n\n\nuse crate::constants::screen::H_RES;\n\n\n\npub struct Renderer {\n\n mode: VideoMode,\n\n\n\n bg_pattern_mem: [PatternMem; 4],\n\n obj_pattern_mem: [PatternMem; 2],\n\n\n\n palettes: PaletteMem\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 85, "score": 70786.43168107572 }, { "content": "}\n\n\n\nimpl Renderer {\n\n pub fn new() -> Self {\n\n Renderer {\n\n mode: VideoMode::_7,\n\n\n\n bg_pattern_mem: [\n\n PatternMem::new(BitsPerPixel::_2),\n\n PatternMem::new(BitsPerPixel::_2),\n\n PatternMem::new(BitsPerPixel::_2),\n\n PatternMem::new(BitsPerPixel::_2)\n\n ],\n\n\n\n obj_pattern_mem: [\n\n PatternMem::new(BitsPerPixel::_4),\n\n PatternMem::new(BitsPerPixel::_4),\n\n ],\n\n\n\n palettes: PaletteMem::new()\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 86, "score": 70784.23607205709 }, { "content": " fn get_pattern_mem(&self, bg: BG) -> &PatternMem {\n\n match bg {\n\n BG::_1 => &self.bg_pattern_mem[0],\n\n BG::_2 => &self.bg_pattern_mem[1],\n\n BG::_3 => &self.bg_pattern_mem[2],\n\n BG::_4 => &self.bg_pattern_mem[3],\n\n }\n\n }\n\n}\n\n\n\n// Apply brightness to colour component.\n\nmacro_rules! apply_brightness {\n\n ($col:expr, $brightness:expr) => {\n\n (($col as usize) * ($brightness as usize) / 0xF) as u8\n\n };\n\n}\n\n\n\n// Drawing modes\n\nimpl Renderer {\n\n #[inline]\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 87, "score": 70782.0928576103 }, { "content": " let sub = if window_regs.use_subscreen() {\n\n let sprite_pix = sub_sprite_pixels[x];\n\n let bg1_pix = sub_bg1_pixels[x];\n\n let bg2_pix = sub_bg2_pixels[x];\n\n self.eval_mode_7(window_regs.use_direct_colour(), ext_bg, sprite_pix, bg1_pix, bg2_pix).any()\n\n } else {\n\n None\n\n };\n\n\n\n self.write_pixel(window_regs, out, main, sub, brightness, x as u8);\n\n }\n\n }\n\n}\n\n\n\n// Generic drawing utils.\n\nimpl Renderer {\n\n // TODO: lots of cleanup here\n\n fn draw_sprites_to_line(&self, mem: &VideoMem, main_line: &mut [SpritePixel], sub_line: &mut [SpritePixel], y: u8) {\n\n let (small, large) = mem.get_bg_registers().obj_sizes();\n\n\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 88, "score": 70779.29420197182 }, { "content": " if self.bg_pattern_mem[0].get_bits_per_pixel() != BitsPerPixel::_4 {\n\n self.bg_pattern_mem[0] = PatternMem::new(BitsPerPixel::_4);\n\n }\n\n },\n\n _7 => {}\n\n }\n\n }\n\n\n\n // Get the number of backgrounds active\n\n fn num_bgs(&self) -> usize {\n\n use VideoMode::*;\n\n match self.mode {\n\n _0 => 4,\n\n _1 => 3,\n\n _2 | _3 | _4 | _5 => 2,\n\n _6 => 1,\n\n _7 => 0,\n\n }\n\n }\n\n\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 89, "score": 70778.0265199156 }, { "content": " let p_b = (palette_num & bit!(4)) << 1;\n\n let r_i = r | p_r;\n\n let g_i = g | p_g;\n\n let b_i = b | p_b;\n\n Colour::new(r_i | (r_i >> 4), g_i | (g_i >> 4), b_i | (b_i >> 3) | (b_i >> 6))\n\n } else {\n\n self.palettes.get_bg_colour(data.texel as usize)\n\n }\n\n }\n\n #[inline]\n\n fn make_mode7_bg1_pixel(&self, texel: u8, direct_col: bool) -> Colour {\n\n if direct_col {\n\n let r = (texel & bits![2, 1, 0]) << 5;\n\n let g = (texel & bits![5, 4, 3]) << 2;\n\n let b = texel & bits![7, 6];\n\n Colour::new(r | (r >> 3) | (r >> 6), g | (g >> 3) | (g >> 6), b | (b >> 2) | (b >> 4) | (b >> 6))\n\n } else {\n\n self.palettes.get_bg_colour(texel as usize)\n\n }\n\n }\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 90, "score": 70777.33937111757 }, { "content": " let sub = if window_regs.use_subscreen() {\n\n let sprite_pix = sub_sprite_pixels[x];\n\n let bg1_pix = sub_bg1_pixels[x];\n\n let bg2_pix = sub_bg2_pixels[x];\n\n self.eval_mode_3(window_regs.use_direct_colour(), sprite_pix, bg1_pix, bg2_pix).any()\n\n } else {\n\n None\n\n };\n\n\n\n self.write_pixel(window_regs, out, main, sub, brightness, x as u8);\n\n }\n\n }\n\n\n\n fn draw_line_mode_4(&self, mem: &VideoMem, target: &mut [u8], y: usize) {\n\n let brightness = mem.get_bg_registers().get_brightness();\n\n let window_regs = mem.get_window_registers();\n\n let target_start = y * H_RES;\n\n\n\n let mut main_sprite_pixels = [SpritePixel::None; H_RES];\n\n let mut sub_sprite_pixels = [SpritePixel::None; H_RES];\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 91, "score": 70777.13625338097 }, { "content": " } else {\n\n None\n\n };\n\n\n\n self.write_pixel(window_regs, out, main, sub, brightness, x as u8);\n\n }\n\n }\n\n\n\n fn draw_line_mode_5(&self, mem: &VideoMem, target: &mut [u8], y: usize) {\n\n let brightness = mem.get_bg_registers().get_brightness();\n\n let window_regs = mem.get_window_registers();\n\n let target_start = y * H_RES;\n\n\n\n let mut main_sprite_pixels = [SpritePixel::None; H_RES];\n\n let mut sub_sprite_pixels = [SpritePixel::None; H_RES];\n\n self.draw_sprites_to_line(mem, &mut main_sprite_pixels, &mut sub_sprite_pixels, y as u8);\n\n let mut main_bg1_pixels = [BGData::default(); H_RES];\n\n let mut sub_bg1_pixels = [BGData::default(); H_RES];\n\n self.draw_bg_to_line(mem, BG::_1, &mut main_bg1_pixels, &mut sub_bg1_pixels, y, false);\n\n let mut main_bg2_pixels = [BGData::default(); H_RES];\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 92, "score": 70776.52231555637 }, { "content": " }\n\n }\n\n\n\n pub fn draw_line(&mut self, mem: &VideoMem, target: &mut [u8], y: usize) {\n\n match self.mode {\n\n VideoMode::_0 => self.draw_line_mode_0(mem, target, y),\n\n VideoMode::_1 => self.draw_line_mode_1(mem, target, y),\n\n VideoMode::_2 => self.draw_line_mode_2(mem, target, y),\n\n VideoMode::_3 => self.draw_line_mode_3(mem, target, y),\n\n VideoMode::_4 => self.draw_line_mode_4(mem, target, y),\n\n VideoMode::_5 => self.draw_line_mode_5(mem, target, y),\n\n VideoMode::_6 => self.draw_line_mode_6(mem, target, y),\n\n VideoMode::_7 => self.draw_line_mode_7(mem, target, y),\n\n }\n\n }\n\n}\n\n\n\n// Caches\n\nimpl Renderer {\n\n pub fn setup_caches(&mut self, mem: &mut VideoMem) {\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 93, "score": 70776.09606568191 }, { "content": " }\n\n }\n\n }\n\n\n\n fn draw_mode7_bg1_to_line(&self, mem: &VideoMem, main_line: &mut [Option<u8>], sub_line: &mut [Option<u8>], y: usize) {\n\n let vram = mem.get_vram();\n\n let regs = mem.get_bg_registers();\n\n let window_regs = mem.get_window_registers();\n\n\n\n let actual_y = y + 1;\n\n\n\n let mut main_window = [true; H_RES];\n\n window_regs.bg_window(BG::_1, Screen::Main, &mut main_window);\n\n let mut sub_window = [true; H_RES];\n\n window_regs.bg_window(BG::_1, Screen::Sub, &mut sub_window);\n\n\n\n for (x, (main, sub)) in main_line.iter_mut().zip(sub_line.iter_mut()).enumerate() {\n\n let x_in = if regs.mode_7_flip_x() {x ^ 0xFF} else {x};\n\n let y_in = if regs.mode_7_flip_y() {actual_y ^ 0xFF} else {actual_y};\n\n let (bg_x, bg_y) = regs.calc_mode_7(x_in as i16, y_in as i16);\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 94, "score": 70775.99877531786 }, { "content": " let pix = get_mode_7_texel(vram, x_val, y_val);\n\n let pix_out = if pix == 0 {None} else {Some(pix)};\n\n\n\n if main_window[x] { // If pixel shows through main window.\n\n *main = pix_out;\n\n }\n\n if sub_window[x] { // If pixel shows through sub window.\n\n *sub = pix_out;\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn draw_mode7_bg2_to_line(&self, mem: &VideoMem, main_line: &mut [u8], sub_line: &mut [u8], y: usize) {\n\n let vram = mem.get_vram();\n\n let regs = mem.get_bg_registers();\n\n let window_regs = mem.get_window_registers();\n\n\n\n let actual_y = y + 1;\n\n\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 95, "score": 70774.86801298782 }, { "content": " let mut main_sprite_pixels = [SpritePixel::None; H_RES];\n\n let mut sub_sprite_pixels = [SpritePixel::None; H_RES];\n\n self.draw_sprites_to_line(mem, &mut main_sprite_pixels, &mut sub_sprite_pixels, y as u8);\n\n let mut main_bg1_pixels = [None; H_RES];\n\n let mut sub_bg1_pixels = [None; H_RES];\n\n self.draw_mode7_bg1_to_line(mem, &mut main_bg1_pixels, &mut sub_bg1_pixels, y);\n\n let mut main_bg2_pixels = [0_u8; H_RES];\n\n let mut sub_bg2_pixels = [0_u8; H_RES];\n\n let ext_bg = window_regs.use_ext_bg();\n\n if ext_bg {\n\n self.draw_mode7_bg2_to_line(mem, &mut main_bg2_pixels, &mut sub_bg2_pixels, y);\n\n }\n\n\n\n for (x, out) in target.chunks_mut(8).skip(target_start).take(H_RES).enumerate() {\n\n let main = {\n\n let sprite_pix = main_sprite_pixels[x];\n\n let bg1_pix = main_bg1_pixels[x];\n\n let bg2_pix = main_bg2_pixels[x];\n\n self.eval_mode_7(window_regs.use_direct_colour(), ext_bg, sprite_pix, bg1_pix, bg2_pix)\n\n };\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 96, "score": 70774.83217556382 }, { "content": " } // for sprite x pixels\n\n });\n\n }\n\n\n\n fn draw_bg_to_line(&self, mem: &VideoMem, bg: BG, main_line: &mut [BGData], sub_line: &mut [BGData], y: usize, offset_per_tile: bool) {\n\n let regs = mem.get_bg_registers();\n\n let window_regs = mem.get_window_registers();\n\n\n\n let actual_y = y + 1;\n\n\n\n // TODO: separate mosaic stuff?\n\n let mosaic_amount = if regs.bg_mosaic_enabled(bg) {\n\n regs.bg_mosaic_mask()\n\n } else {\n\n 0\n\n } as usize;\n\n let mut x_mosaic_offset = 0;\n\n\n\n let y_mosaic_offset = y % (mosaic_amount + 1);\n\n let line_y = actual_y - y_mosaic_offset;\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 97, "score": 70774.81309964026 }, { "content": " let sprite_pix = main_sprite_pixels[x];\n\n let bg1_pix = main_bg1_pixels[x];\n\n let bg2_pix = main_bg2_pixels[x];\n\n self.eval_mode_2(sprite_pix, bg1_pix, bg2_pix)\n\n };\n\n let sub = if window_regs.use_subscreen() {\n\n let sprite_pix = sub_sprite_pixels[x];\n\n let bg1_pix = sub_bg1_pixels[x];\n\n let bg2_pix = sub_bg2_pixels[x];\n\n self.eval_mode_2(sprite_pix, bg1_pix, bg2_pix).any()\n\n } else {\n\n None\n\n };\n\n\n\n self.write_pixel(window_regs, out, main, sub, brightness, x as u8);\n\n }\n\n }\n\n\n\n fn draw_line_mode_3(&self, mem: &VideoMem, target: &mut [u8], y: usize) {\n\n let brightness = mem.get_bg_registers().get_brightness();\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 98, "score": 70774.80512121061 }, { "content": " self.palettes.make_obj_palette(mem);\n\n }\n\n mem.cgram_reset_dirty();\n\n }\n\n\n\n // Switch mode: setup backgrounds.\n\n fn switch_mode(&mut self, mode: VideoMode) {\n\n use VideoMode::*;\n\n\n\n //println!(\"Switching mode to {:?}\", mode);\n\n\n\n self.mode = mode;\n\n match mode {\n\n _0 => {\n\n self.bg_pattern_mem[0] = PatternMem::new(BitsPerPixel::_2);\n\n if self.bg_pattern_mem[1].get_bits_per_pixel() != BitsPerPixel::_2 {\n\n self.bg_pattern_mem[1] = PatternMem::new(BitsPerPixel::_2);\n\n }\n\n },\n\n _1 => {\n", "file_path": "oxide-7/src/video/render/drawing/mod.rs", "rank": 99, "score": 70774.73295727067 } ]
Rust
src/limit.rs
phip1611/spectrum-analyzer
8f8eb2cd91768222a7ede072c2c5353788238256
/* MIT License Copyright (c) 2021 Philipp Schuster Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #[derive(Debug, Copy, Clone)] pub enum FrequencyLimit { All, Min(f32), Max(f32), Range(f32, f32), } impl FrequencyLimit { #[inline(always)] pub const fn maybe_min(&self) -> Option<f32> { match self { FrequencyLimit::Min(min) => Some(*min), FrequencyLimit::Range(min, _) => Some(*min), _ => None, } } #[inline(always)] pub const fn maybe_max(&self) -> Option<f32> { match self { FrequencyLimit::Max(max) => Some(*max), FrequencyLimit::Range(_, max) => Some(*max), _ => None, } } #[inline(always)] pub fn min(&self) -> f32 { self.maybe_min().expect("Must contain a value!") } #[inline(always)] pub fn max(&self) -> f32 { self.maybe_max().expect("Must contain a value!") } pub fn verify(&self, max_detectable_frequency: f32) -> Result<(), FrequencyLimitError> { match self { Self::All => Ok(()), Self::Min(x) | Self::Max(x) => { if *x < 0.0 { Err(FrequencyLimitError::ValueBelowMinimum(*x)) } else if *x > max_detectable_frequency { Err(FrequencyLimitError::ValueAboveNyquist(*x)) } else { Ok(()) } } FrequencyLimit::Range(min, max) => { let _ = Self::Min(*min).verify(max_detectable_frequency)?; let _ = Self::Max(*max).verify(max_detectable_frequency)?; if min > max { Err(FrequencyLimitError::InvalidRange(*min, *max)) } else { Ok(()) } } } } } #[derive(Debug)] pub enum FrequencyLimitError { ValueBelowMinimum(f32), ValueAboveNyquist(f32), InvalidRange(f32, f32), } #[cfg(test)] mod tests { use crate::FrequencyLimit; #[test] fn test_panic_min_below_minimum() { let _ = FrequencyLimit::Min(-1.0).verify(0.0).unwrap_err(); } #[test] fn test_panic_min_above_nyquist() { let _ = FrequencyLimit::Min(1.0).verify(0.0).unwrap_err(); } #[test] fn test_panic_max_below_minimum() { let _ = FrequencyLimit::Max(-1.0).verify(0.0).unwrap_err(); } #[test] fn test_panic_max_above_nyquist() { let _ = FrequencyLimit::Max(1.0).verify(0.0).unwrap_err(); } #[test] fn test_panic_range_min() { let _ = FrequencyLimit::Range(-1.0, 0.0).verify(0.0).unwrap_err(); } #[test] fn test_panic_range_max() { let _ = FrequencyLimit::Range(0.0, 1.0).verify(0.0).unwrap_err(); } #[test] fn test_panic_range_order() { let _ = FrequencyLimit::Range(0.0, -1.0).verify(0.0).unwrap_err(); } #[test] fn test_ok() { let _ = FrequencyLimit::Min(50.0).verify(100.0).unwrap(); let _ = FrequencyLimit::Max(50.0).verify(100.0).unwrap(); let _ = FrequencyLimit::Range(50.0, 50.0).verify(100.0).unwrap(); let _ = FrequencyLimit::Range(50.0, 70.0).verify(100.0).unwrap(); } }
/* MIT License Copyright (c) 2021 Philipp Schuster Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #[derive(Debug, Copy, Clone)] pub enum FrequencyLimit { All, Min(f32), Max(f32), Range(f32, f32), } impl FrequencyLimit { #[inline(always)] pub const fn maybe_min(&self) -> Option<f32> { match self { FrequencyLimit::Min(min) => Some(*min), FrequencyLimit::Range(min, _) => Some(*min), _ => None, } } #[inline(always)] pub const fn maybe_max(&self) -> Option<f32> { match self { FrequencyLimit::Max(max) => Some(*max), FrequencyLimit::Range(_, max) => Some(*max), _ => None, } } #[inline(always)] pub fn min(&self) -> f32 { self.maybe_min().expect("Must contain a value!") } #[inline(always)] pub fn max(&self) -> f32 { self.maybe_max().expect("Must contain a value!") } pub fn verify(&self, max_detectable_frequency: f32) -> Result<(), FrequencyLimitError> { match self { Self::All => Ok(()), Self::Min(x) | Self::Max(x) => { if *x < 0.0 {
} #[derive(Debug)] pub enum FrequencyLimitError { ValueBelowMinimum(f32), ValueAboveNyquist(f32), InvalidRange(f32, f32), } #[cfg(test)] mod tests { use crate::FrequencyLimit; #[test] fn test_panic_min_below_minimum() { let _ = FrequencyLimit::Min(-1.0).verify(0.0).unwrap_err(); } #[test] fn test_panic_min_above_nyquist() { let _ = FrequencyLimit::Min(1.0).verify(0.0).unwrap_err(); } #[test] fn test_panic_max_below_minimum() { let _ = FrequencyLimit::Max(-1.0).verify(0.0).unwrap_err(); } #[test] fn test_panic_max_above_nyquist() { let _ = FrequencyLimit::Max(1.0).verify(0.0).unwrap_err(); } #[test] fn test_panic_range_min() { let _ = FrequencyLimit::Range(-1.0, 0.0).verify(0.0).unwrap_err(); } #[test] fn test_panic_range_max() { let _ = FrequencyLimit::Range(0.0, 1.0).verify(0.0).unwrap_err(); } #[test] fn test_panic_range_order() { let _ = FrequencyLimit::Range(0.0, -1.0).verify(0.0).unwrap_err(); } #[test] fn test_ok() { let _ = FrequencyLimit::Min(50.0).verify(100.0).unwrap(); let _ = FrequencyLimit::Max(50.0).verify(100.0).unwrap(); let _ = FrequencyLimit::Range(50.0, 50.0).verify(100.0).unwrap(); let _ = FrequencyLimit::Range(50.0, 70.0).verify(100.0).unwrap(); } }
Err(FrequencyLimitError::ValueBelowMinimum(*x)) } else if *x > max_detectable_frequency { Err(FrequencyLimitError::ValueAboveNyquist(*x)) } else { Ok(()) } } FrequencyLimit::Range(min, max) => { let _ = Self::Min(*min).verify(max_detectable_frequency)?; let _ = Self::Max(*max).verify(max_detectable_frequency)?; if min > max { Err(FrequencyLimitError::InvalidRange(*min, *max)) } else { Ok(()) } } } }
function_block-function_prefix_line
[ { "content": "/// Creates a sine (sinus) wave function for a given frequency.\n\n/// Don't forget to scale up the value to the audio resolution.\n\n/// So far, amplitude is in interval `[-1; 1]`. The parameter\n\n/// of the returned function is the point in time in seconds.\n\n///\n\n/// * `frequency` is in Hertz\n\npub fn sine_wave(frequency: f32) -> Box<dyn Fn(f32) -> f32> {\n\n Box::new(move |t| (t * frequency * 2.0 * PI).sin())\n\n}\n\n\n\n/// See [`sine_wave_audio_data_multiple`]\n", "file_path": "src/tests/sine.rs", "rank": 0, "score": 114647.95336310664 }, { "content": "/// Applies a Hamming window (<https://en.wikipedia.org/wiki/Window_function#Hann_and_Hamming_windows>)\n\n/// to an array of samples.\n\n///\n\n/// ## Return value\n\n/// New vector with Hann window applied to the values.\n\npub fn hamming_window(samples: &[f32]) -> Vec<f32> {\n\n let mut windowed_samples = Vec::with_capacity(samples.len());\n\n let samples_len_f32 = samples.len() as f32;\n\n for (i, sample) in samples.iter().enumerate() {\n\n let multiplier = 0.54 - (0.46 * (2.0 * PI * i as f32 / cosf(samples_len_f32 - 1.0)));\n\n windowed_samples.push(multiplier * sample)\n\n }\n\n windowed_samples\n\n}\n\n\n", "file_path": "src/windows.rs", "rank": 1, "score": 109039.08952846963 }, { "content": "/// Applies a Hann window (<https://en.wikipedia.org/wiki/Window_function#Hann_and_Hamming_windows>)\n\n/// to an array of samples.\n\n///\n\n/// ## Return value\n\n/// New vector with Hann window applied to the values.\n\npub fn hann_window(samples: &[f32]) -> Vec<f32> {\n\n let mut windowed_samples = Vec::with_capacity(samples.len());\n\n let samples_len_f32 = samples.len() as f32;\n\n for (i, sample) in samples.iter().enumerate() {\n\n let two_pi_i = 2.0 * PI * i as f32;\n\n let idontknowthename = cosf(two_pi_i / samples_len_f32);\n\n let multiplier = 0.5 * (1.0 - idontknowthename);\n\n windowed_samples.push(multiplier * sample)\n\n }\n\n windowed_samples\n\n}\n\n\n", "file_path": "src/windows.rs", "rank": 2, "score": 109039.08952846963 }, { "content": "/// Applies a Blackman-Harris 7-term window to an array of samples.\n\n///\n\n/// ## More information\n\n/// * <https://en.wikipedia.org/wiki/Window_function#Blackman%E2%80%93Harris_window>\n\n/// * <https://ieeexplore.ieee.org/document/940309>\n\n/// * <https://dsp.stackexchange.com/questions/51095/seven-term-blackman-harris-window>\n\n///\n\n/// ## Return value\n\n/// New vector with Blackman-Harris 7-term window applied to the values.\n\npub fn blackman_harris_7term(samples: &[f32]) -> Vec<f32> {\n\n // constants come from here:\n\n // https://dsp.stackexchange.com/questions/51095/seven-term-blackman-harris-window\n\n const ALPHA: [f32; 7] = [\n\n 0.271_051_4,\n\n -0.433_297_93,\n\n 0.218_123,\n\n -0.065_925_45,\n\n 0.010_811_742,\n\n -0.000_776_584_84,\n\n 0.000_013_887_217,\n\n ];\n\n\n\n blackman_harris_xterm(samples, &ALPHA)\n\n}\n\n\n", "file_path": "src/windows.rs", "rank": 3, "score": 106453.67128360705 }, { "content": "/// Applies a Blackman-Harris 4-term window (<https://en.wikipedia.org/wiki/Window_function#Blackman%E2%80%93Harris_window>)\n\n/// to an array of samples.\n\n///\n\n/// ## Return value\n\n/// New vector with Blackman-Harris 4-term window applied to the values.\n\npub fn blackman_harris_4term(samples: &[f32]) -> Vec<f32> {\n\n // constants come from here:\n\n // https://en.wikipedia.org/wiki/Window_function#Blackman%E2%80%93Harris_window\n\n const ALPHA: [f32; 4] = [0.35875, -0.48829, 0.14128, -0.01168];\n\n\n\n blackman_harris_xterm(samples, &ALPHA)\n\n}\n\n\n", "file_path": "src/windows.rs", "rank": 4, "score": 106451.33902769917 }, { "content": "#[allow(non_snake_case)]\n\npub fn divide_by_N(val: f32, stats: &SpectrumDataStats) -> f32 {\n\n if stats.n == 0.0 {\n\n val\n\n } else {\n\n val / stats.n\n\n }\n\n}\n\n\n", "file_path": "src/scaling.rs", "rank": 5, "score": 98678.23400676702 }, { "content": "/// Scales each frequency value/amplitude in the spectrum to interval `[0.0; 1.0]`.\n\n/// Function is of type [`SpectrumScalingFunction`]. Expects that [`SpectrumDataStats::min`] is\n\n/// not negative.\n\npub fn scale_to_zero_to_one(val: f32, stats: &SpectrumDataStats) -> f32 {\n\n // usually not the case, except you use other scaling functions first,\n\n // that transforms the value to a negative one\n\n /*if stats.min < 0.0 {\n\n val = val + stats.min;\n\n }*/\n\n if stats.max != 0.0 {\n\n val / stats.max\n\n } else {\n\n 0.0\n\n }\n\n}\n\n\n\n/// Divides each value by N. Several resources recommend that the FFT result should be divided\n\n/// by the length of samples, so that values of different samples lengths are comparable.\n", "file_path": "src/scaling.rs", "rank": 6, "score": 96601.16845106454 }, { "content": "/// Calculates the base 10 logarithm of each frequency magnitude and\n\n/// multiplies it with 20. This scaling is quite common, you can\n\n/// find more information for example here:\n\n/// <https://www.sjsu.edu/people/burford.furman/docs/me120/FFT_tutorial_NI.pdf>\n\n///\n\n/// ## Usage\n\n/// ```rust\n\n///use spectrum_analyzer::{samples_fft_to_spectrum, scaling, FrequencyLimit};\n\n///let window = [0.0, 0.1, 0.2, 0.3]; // add real data here\n\n///let spectrum = samples_fft_to_spectrum(\n\n/// &window,\n\n/// 44100,\n\n/// FrequencyLimit::All,\n\n/// Some(&scaling::scale_20_times_log10),\n\n/// );\n\n/// ```\n\n/// Function is of type [`SpectrumScalingFunction`].\n\npub fn scale_20_times_log10(frequency_magnitude: f32, _stats: &SpectrumDataStats) -> f32 {\n\n debug_assert!(!frequency_magnitude.is_infinite());\n\n debug_assert!(!frequency_magnitude.is_nan());\n\n debug_assert!(frequency_magnitude >= 0.0);\n\n if frequency_magnitude == 0.0 {\n\n 0.0\n\n } else {\n\n 20.0 * libm::log10f(frequency_magnitude)\n\n }\n\n}\n\n\n", "file_path": "src/scaling.rs", "rank": 7, "score": 94658.93591328897 }, { "content": "fn bench_without_scaling(samples: Vec<f32>) -> u64 {\n\n let fnc = move || samples_fft_to_spectrum(&samples, 44100, FrequencyLimit::All, None).unwrap();\n\n bench_fnc(Box::new(fnc))\n\n}\n\n\n", "file_path": "examples/bench.rs", "rank": 8, "score": 84992.62244623239 }, { "content": "/// Combines several scaling functions into a new single one.\n\n///\n\n/// # Example\n\n/// ```ignored\n\n/// Some(&combined(&[&divide_by_N, &scale_20_times_log10]))\n\n/// ```\n\npub fn combined<'a>(\n\n fncs: &'a [SpectrumScalingFunction<'a>],\n\n) -> Box<dyn Fn(f32, &SpectrumDataStats) -> f32 + 'a> {\n\n Box::new(move |val, stats| {\n\n let mut val = val;\n\n for fnc in fncs {\n\n val = fnc(val, stats);\n\n }\n\n val\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use alloc::vec::Vec;\n\n\n\n #[test]\n\n fn test_scale_to_zero_to_one() {\n\n let data = vec![0.0_f32, 1.1, 2.2, 3.3, 4.4, 5.5];\n", "file_path": "src/scaling.rs", "rank": 9, "score": 81945.41482523736 }, { "content": "/// Takes an array of samples (length must be a power of 2),\n\n/// e.g. 2048, applies an FFT (using the specified FFT implementation) on it\n\n/// and returns all frequencies with their volume/magnitude.\n\n///\n\n/// By default, no normalization/scaling is done at all and the results,\n\n/// i.e. the frequency magnitudes/amplitudes/values are the raw result from\n\n/// the FFT algorithm, except that complex numbers are transformed\n\n/// to their magnitude.\n\n///\n\n/// * `samples` raw audio, e.g. 16bit audio data but as f32.\n\n/// You should apply an window function (like Hann) on the data first.\n\n/// The final frequency resolution is `sample_rate / (N / 2)`\n\n/// e.g. `44100/(16384/2) == 5.383Hz`, i.e. more samples =>\n\n/// better accuracy/frequency resolution.\n\n/// * `sampling_rate` sampling_rate, e.g. `44100 [Hz]`\n\n/// * `frequency_limit` Frequency limit. See [`FrequencyLimit´]\n\n/// * `scaling_fn` See [`crate::scaling::SpectrumScalingFunction`] for details.\n\n///\n\n/// ## Returns value\n\n/// New object of type [`FrequencySpectrum`].\n\n///\n\n/// ## Examples\n\n/// ### Scaling via dynamic closure\n\n/// ```rust\n\n/// use spectrum_analyzer::{samples_fft_to_spectrum, FrequencyLimit};\n\n/// // get data from audio source\n\n/// let samples = vec![0.0, 1.1, 5.5, -5.5];\n\n/// let res = samples_fft_to_spectrum(\n\n/// &samples,\n\n/// 44100,\n\n/// FrequencyLimit::All,\n\n/// Some(&|val, info| val - info.min),\n\n/// );\n\n/// ```\n\n/// ### Scaling via static function\n\n/// ```rust\n\n/// use spectrum_analyzer::{samples_fft_to_spectrum, FrequencyLimit};\n\n/// use spectrum_analyzer::scaling::scale_to_zero_to_one;\n\n/// // get data from audio source\n\n/// let samples = vec![0.0, 1.1, 5.5, -5.5];\n\n/// let res = samples_fft_to_spectrum(\n\n/// &samples,\n\n/// 44100,\n\n/// FrequencyLimit::All,\n\n/// Some(&scale_to_zero_to_one),\n\n/// );\n\n/// ```\n\n///\n\n/// ## Panics\n\n/// * When `samples.len() > 4096` and `microfft` is used (restriction by the crate)\n\npub fn samples_fft_to_spectrum(\n\n samples: &[f32],\n\n sampling_rate: u32,\n\n frequency_limit: FrequencyLimit,\n\n scaling_fn: Option<SpectrumScalingFunction>,\n\n) -> Result<FrequencySpectrum, SpectrumAnalyzerError> {\n\n // everything below two samples is unreasonable\n\n if samples.len() < 2 {\n\n return Err(SpectrumAnalyzerError::TooFewSamples);\n\n }\n\n // do several checks on input data\n\n if samples.iter().any(|x| x.is_nan()) {\n\n return Err(SpectrumAnalyzerError::NaNValuesNotSupported);\n\n }\n\n if samples.iter().any(|x| x.is_infinite()) {\n\n return Err(SpectrumAnalyzerError::InfinityValuesNotSupported);\n\n }\n\n if !is_power_of_two(samples.len()) {\n\n return Err(SpectrumAnalyzerError::SamplesLengthNotAPowerOfTwo);\n\n }\n", "file_path": "src/lib.rs", "rank": 10, "score": 81447.27723229316 }, { "content": "/// Applies a Blackman-Harris x-term window\n\n/// (<https://en.wikipedia.org/wiki/Window_function#Blackman%E2%80%93Harris_window>)\n\n/// to an array of samples. The x is specified by `alphas.len()`.\n\n///\n\n/// ## Return value\n\n/// New vector with Blackman-Harris x-term window applied to the values.\n\nfn blackman_harris_xterm(samples: &[f32], alphas: &[f32]) -> Vec<f32> {\n\n let mut windowed_samples = Vec::with_capacity(samples.len());\n\n\n\n let samples_len_f32 = samples.len() as f32;\n\n\n\n for sample in samples.iter() {\n\n // Will result in something like that:\n\n /* ALPHA0\n\n + ALPHA1 * ((2.0 * PI * *samples[i])/samples_len_f32).cos()\n\n + ALPHA2 * ((4.0 * PI * *samples[i])/samples_len_f32).cos()\n\n + ALPHA3 * ((6.0 * PI * *samples[i])/samples_len_f32).cos()\n\n */\n\n\n\n let mut acc = 0.0;\n\n for (alpha_i, alpha) in alphas.iter().enumerate() {\n\n // in 1. iter. 0PI, then 2PI, then 4 PI, then 6 PI\n\n let two_pi_iteration = 2.0 * alpha_i as f32 * PI;\n\n let cos = cosf((two_pi_iteration * sample) / samples_len_f32);\n\n acc += alpha * cos;\n\n }\n\n\n\n windowed_samples.push(acc)\n\n }\n\n\n\n windowed_samples\n\n}\n", "file_path": "src/windows.rs", "rank": 11, "score": 81188.12569283105 }, { "content": "/// Like [`sine_wave_audio_data`] but puts multiple sinus waves on top of each other.\n\n/// Returns a audio signal encoded in 16 bit audio resolution which is the sum of\n\n/// multiple sine waves on top of each other. The amplitudes will be scaled from\n\n/// `[-1; 1]` to `[i16::min_value(); i16::max_value()]`\n\n///\n\n/// * `frequency` frequency in Hz for the sinus wave\n\n/// * `sampling_rate` sampling rate, i.e. 44100Hz\n\n/// * `duration_ms` duration of the audio data in milliseconds\n\npub fn sine_wave_audio_data_multiple(\n\n frequencies: &[f32],\n\n sampling_rate: u32,\n\n duration_ms: u32,\n\n) -> Vec<i16> {\n\n if frequencies.is_empty() {\n\n return vec![];\n\n }\n\n\n\n // Generate all sine wave function\n\n let sine_waves = frequencies\n\n .iter()\n\n .map(|f| sine_wave(*f))\n\n .collect::<Vec<Box<dyn Fn(f32) -> f32>>>();\n\n\n\n // How many samples to generate with each sine wave function\n\n let sample_count = (sampling_rate as f32 * (duration_ms as f32 / 1000.0)) as usize;\n\n\n\n // Calculate the final sine wave\n\n let mut sine_wave = Vec::with_capacity(sample_count);\n", "file_path": "src/tests/sine.rs", "rank": 12, "score": 75319.50395307154 }, { "content": "/// Maps a [`Complex32`] to it's magnitude as `f32`. This is done\n\n/// by calculating `sqrt(re*re + im*im)`. This is required to convert\n\n/// the complex FFT result back to real values.\n\n///\n\n/// ## Parameters\n\n/// * `val` A single value from the FFT output buffer of type [`Complex32`].\n\nfn complex_to_magnitude(val: &Complex32) -> f32 {\n\n // calculates sqrt(re*re + im*im), i.e. magnitude of complex number\n\n let sum = val.re * val.re + val.im * val.im;\n\n let sqrt = libm::sqrtf(sum);\n\n debug_assert!(!sqrt.is_nan(), \"sqrt is NaN!\");\n\n sqrt\n\n}\n\n\n\n// idea from https://stackoverflow.com/questions/600293/how-to-check-if-a-number-is-a-power-of-2\n\nconst fn is_power_of_two(num: usize) -> bool {\n\n num != 0 && ((num & (num - 1)) == 0)\n\n}\n\n\n\n// tests module for small unit tests\n\n\n\n#[cfg(test)]\n\nmod tests2 {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_is_power_of_two() {\n\n assert!(!is_power_of_two(0));\n\n assert!(is_power_of_two(1));\n\n assert!(is_power_of_two(2));\n\n assert!(!is_power_of_two(3));\n\n assert!(is_power_of_two(2));\n\n assert!(is_power_of_two(256));\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 13, "score": 70796.83113407533 }, { "content": "#[inline(always)]\n\nfn fft_result_to_spectrum(\n\n samples_len: usize,\n\n fft_result: &[Complex32],\n\n sampling_rate: u32,\n\n frequency_limit: FrequencyLimit,\n\n scaling_fn: Option<SpectrumScalingFunction>,\n\n) -> Result<FrequencySpectrum, SpectrumAnalyzerError> {\n\n let maybe_min = frequency_limit.maybe_min();\n\n let maybe_max = frequency_limit.maybe_max();\n\n\n\n let frequency_resolution = fft_calc_frequency_resolution(sampling_rate, samples_len as u32);\n\n\n\n // collect frequency => frequency value in Vector of Pairs/Tuples\n\n let frequency_vec = fft_result\n\n .iter()\n\n // See https://stackoverflow.com/a/4371627/2891595 for more information as well as\n\n // https://www.gaussianwaves.com/2015/11/interpreting-fft-results-complex-dft-frequency-bins-and-fftshift/\n\n //\n\n // The indices 0 to N/2 (inclusive) are usually the most relevant. Although, index\n\n // N/2-1 is declared as the last useful one on stackoverflow (because in typical applications\n", "file_path": "src/lib.rs", "rank": 14, "score": 69216.32133498593 }, { "content": "#[allow(dead_code)]\n\npub fn sine_wave_audio_data(frequency: f32, sampling_rate: u32, duration_ms: u32) -> Vec<i16> {\n\n sine_wave_audio_data_multiple(&[frequency], sampling_rate, duration_ms)\n\n}\n\n\n", "file_path": "src/tests/sine.rs", "rank": 15, "score": 67313.9887495815 }, { "content": "fn bench_with_scaling(samples: Vec<f32>) -> u64 {\n\n let fnc = move || {\n\n samples_fft_to_spectrum(\n\n &samples,\n\n 44100,\n\n FrequencyLimit::All,\n\n Some(&scaling::divide_by_N),\n\n )\n\n .unwrap()\n\n };\n\n bench_fnc(Box::new(fnc))\n\n}\n\n\n", "file_path": "examples/bench.rs", "rank": 16, "score": 65950.57257347179 }, { "content": "#[test]\n\nfn test_spectrum_frequency_limit_inclusive() {\n\n let sampling_rate = 1024;\n\n let sine_audio = sine_wave_audio_data_multiple(&[512.0], sampling_rate, 1000);\n\n\n\n let sine_audio = sine_audio\n\n .into_iter()\n\n .map(|x| x as f32)\n\n .collect::<Vec<f32>>();\n\n\n\n // frequency resolution will be:\n\n // 1024 / 512 = 2 Hz\n\n // we use even frequency resolution in this example for easy testing\n\n // max detectable frequency here is 512Hz\n\n\n\n let window = hann_window(&sine_audio[0..512]);\n\n\n\n {\n\n let spectrum =\n\n samples_fft_to_spectrum(&window, sampling_rate, FrequencyLimit::Max(400.0), None)\n\n .unwrap();\n", "file_path": "src/tests/mod.rs", "rank": 17, "score": 62223.24360777614 }, { "content": "/// Reads an MP3 and returns the audio data as mono channel + the sample rate in Hertz.\n\nfn read_mp3_to_mono(file: &str) -> (Vec<i16>, u32) {\n\n let mut decoder = Mp3Decoder::new(File::open(file).unwrap());\n\n\n\n let mut sampling_rate = 0;\n\n let mut mono_samples = vec![];\n\n loop {\n\n match decoder.next_frame() {\n\n Ok(Mp3Frame {\n\n data: samples_of_frame,\n\n sample_rate,\n\n channels,\n\n ..\n\n }) => {\n\n // that's a bird weird of the original API. Why should channels or sampling\n\n // rate change from frame to frame?\n\n\n\n // Should be constant throughout the MP3 file.\n\n sampling_rate = sample_rate;\n\n\n\n if channels == 2 {\n", "file_path": "examples/mp3-samples.rs", "rank": 18, "score": 59220.22768348776 }, { "content": "#[inline(always)]\n\nfn fft_calc_frequency_resolution(sampling_rate: u32, samples_len: u32) -> f32 {\n\n sampling_rate as f32 / samples_len as f32\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 55801.511391269836 }, { "content": "/// Benchmark can be used to check how changes effect the performance.\n\n/// Always execute with release flag!\n\nfn main() {\n\n // create 2048 random samples\n\n let samples = (0..2048)\n\n .map(|_| rand::random::<i16>())\n\n .map(|x| x as f32)\n\n .collect::<Vec<_>>();\n\n let hann_window = windows::hann_window(&samples);\n\n\n\n let bench_res_without_scaling = bench_without_scaling(hann_window.clone());\n\n let bench_res_with_scaling = bench_with_scaling(hann_window);\n\n\n\n println!(\n\n \"Bench without scaling: avg = {}us per Iteration\",\n\n bench_res_without_scaling\n\n );\n\n println!(\n\n \"Bench with scaling: avg = {}us per Iteration\",\n\n bench_res_with_scaling\n\n );\n\n}\n\n\n", "file_path": "examples/bench.rs", "rank": 20, "score": 46771.86826274308 }, { "content": "/// Minimal example.\n\nfn main() {\n\n // YOU need to implement the samples source; get microphone input for example\n\n let samples: &[f32] = &[0.0, 3.1, 2.7, -1.0, -2.0, -4.0, 7.0, 6.0];\n\n // apply hann window for smoothing; length must be a power of 2 for the FFT\n\n // 2048 is a good starting point with 44100 kHz\n\n let hann_window = hann_window(&samples[0..8]);\n\n // calc spectrum\n\n let spectrum_hann_window = samples_fft_to_spectrum(\n\n // (windowed) samples\n\n &hann_window,\n\n // sampling rate\n\n 44100,\n\n // optional frequency limit: e.g. only interested in frequencies 50 <= f <= 150?\n\n FrequencyLimit::All,\n\n // optional scale\n\n Some(&divide_by_N),\n\n )\n\n .unwrap();\n\n\n\n for (fr, fr_val) in spectrum_hann_window.data().iter() {\n\n println!(\"{}Hz => {}\", fr, fr_val)\n\n }\n\n}\n", "file_path": "examples/minimal.rs", "rank": 21, "score": 46768.6233030683 }, { "content": "fn main() {\n\n println!(\"bass drum example:\");\n\n example__bass_drum_sample();\n\n println!(\"============================\");\n\n println!(\"clap beat example:\");\n\n example__clap_beat_sample();\n\n println!(\"============================\");\n\n println!(\"high hat example:\");\n\n example__high_hat_sample();\n\n}\n\n\n", "file_path": "examples/mp3-samples.rs", "rank": 22, "score": 45257.513624122446 }, { "content": "/// Example that creates a live visualization of the frequency spectrum of realtime audio data\n\n/// **Execute this with `--release`, otherwise it is very laggy!**.\n\nfn main() {\n\n // Contains the data for the spectrum to be visualized. It contains ordered pairs of\n\n // `(frequency, frequency_value)`. During each iteration, the frequency value gets\n\n // combined with `max(old_value * smoothing_factor, new_value)`.\n\n let visualize_spectrum: RefCell<Vec<(f64, f64)>> = RefCell::new(vec![(0.0, 0.0); 1024]);\n\n\n\n // Closure that captures `visualize_spectrum`.\n\n let to_spectrum_fn = move |audio: &[f32], sampling_rate| {\n\n let skip_elements = audio.len() - 2048;\n\n // spectrum analysis only of the latest 46ms\n\n let relevant_samples = &audio[skip_elements..skip_elements + 2048];\n\n\n\n // do FFT\n\n let hann_window = hann_window(relevant_samples);\n\n let latest_spectrum = samples_fft_to_spectrum(\n\n &hann_window,\n\n sampling_rate as u32,\n\n FrequencyLimit::All,\n\n Some(&divide_by_N),\n\n )\n", "file_path": "examples/live-visualization.rs", "rank": 23, "score": 45257.513624122446 }, { "content": "// Calculates spectrum via FFT for a given set of samples and applies\n\n// all window functions + plots all\n\nfn to_spectrum_and_plot(\n\n samples: &[f32],\n\n sampling_rate: u32,\n\n filename: &str,\n\n frequency_limit: FrequencyLimit,\n\n) {\n\n let no_window = samples;\n\n\n\n let now = Instant::now();\n\n let hann_window = hann_window(no_window);\n\n println!(\n\n \"[Measurement]: Hann-Window with {} samples took: {}µs\",\n\n samples.len(),\n\n now.elapsed().as_micros()\n\n );\n\n let now = Instant::now();\n\n let hamming_window = hamming_window(no_window);\n\n println!(\n\n \"[Measurement]: Hamming-Window with {} samples took: {}µs\",\n\n samples.len(),\n", "file_path": "examples/mp3-samples.rs", "rank": 24, "score": 43906.119403530225 }, { "content": "#[inline(always)]\n\nfn calculate_y_coord_between_points(\n\n (x1, y1): (f32, f32),\n\n (x2, y2): (f32, f32),\n\n x_coord: f32,\n\n) -> f32 {\n\n // e.g. Points (100, 1.0) and (200, 0.0)\n\n // y=f(x)=-0.01x + c\n\n // 1.0 = f(100) = -0.01x + c\n\n // c = 1.0 + 0.01*100 = 2.0\n\n // y=f(180)=-0.01*180 + 2.0\n\n\n\n // gradient, anstieg\n\n let slope = (y2 - y1) / (x2 - x1);\n\n // calculate c in y=f(x)=slope * x + c\n\n let c = y1 - slope * x1;\n\n\n\n slope * x_coord + c\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/spectrum.rs", "rank": 25, "score": 43906.119403530225 }, { "content": "#[test]\n\nfn test_invalid_input() {\n\n // should not contain NaN\n\n let samples = vec![0.0, 1.0, 2.0, 3.0, f32::NAN, 4.0, 5.0, 6.0];\n\n let err = samples_fft_to_spectrum(&samples, 44100, FrequencyLimit::All, None).unwrap_err();\n\n assert!(matches!(err, SpectrumAnalyzerError::NaNValuesNotSupported));\n\n\n\n // should not contain Infinity\n\n let samples = vec![0.0, 1.0, 2.0, 3.0, f32::INFINITY, 4.0, 5.0, 6.0];\n\n let err = samples_fft_to_spectrum(&samples, 44100, FrequencyLimit::All, None).unwrap_err();\n\n assert!(matches!(\n\n err,\n\n SpectrumAnalyzerError::InfinityValuesNotSupported\n\n ));\n\n\n\n // needs at least two samples\n\n let samples = vec![0.0];\n\n let err = samples_fft_to_spectrum(&samples, 44100, FrequencyLimit::All, None).unwrap_err();\n\n assert!(matches!(err, SpectrumAnalyzerError::TooFewSamples));\n\n\n\n // test frequency limit gets verified\n", "file_path": "src/tests/mod.rs", "rank": 26, "score": 42690.39015398416 }, { "content": "#[test]\n\nfn test_spectrum_power() {\n\n let interesting_frequency = 2048.0;\n\n let sine_audio = sine_wave_audio_data_multiple(&[interesting_frequency], 44100, 1000);\n\n\n\n let sine_audio = sine_audio\n\n .into_iter()\n\n .map(|x| x as f32)\n\n .collect::<Vec<f32>>();\n\n\n\n // FFT frequency accuracy is: sample_rate / (N / 2)\n\n // 44100/(4096/2) = 21.5Hz\n\n\n\n // get a window that we want to analyze\n\n // 1/44100 * 4096 => 0.0928s\n\n let short_window = &sine_audio[0..2048];\n\n let long_window = &sine_audio[0..4096];\n\n\n\n let spectrum_short_window =\n\n samples_fft_to_spectrum(short_window, 44100, FrequencyLimit::All, Some(&divide_by_N))\n\n .unwrap();\n", "file_path": "src/tests/mod.rs", "rank": 27, "score": 42690.39015398416 }, { "content": "#[test]\n\nfn test_divide_by_n_has_effect() {\n\n let audio_data = sine_wave_audio_data_multiple(&[100.0, 200.0, 400.0], 1000, 2000);\n\n let audio_data = audio_data.into_iter().map(|x| x as f32).collect::<Vec<_>>();\n\n let audio_data = hann_window(&audio_data[0..1024]);\n\n let normal_spectrum =\n\n samples_fft_to_spectrum(&audio_data, 1000, FrequencyLimit::All, None).unwrap();\n\n let scaled_spectrum =\n\n samples_fft_to_spectrum(&audio_data, 1000, FrequencyLimit::All, Some(&divide_by_N))\n\n .unwrap();\n\n for i in 0..512 {\n\n let actual_no_scaling = normal_spectrum.data()[i].1.val();\n\n let actual_with_scaling = scaled_spectrum.data()[i].1.val();\n\n assert!(\n\n (actual_no_scaling / 1024.0 - actual_with_scaling) < 0.1,\n\n \"[{}] actual_no_scaling={} should be roughly 1024 times bigger than actual_with_scaling={}\", \n\n i,\n\n actual_no_scaling,\n\n actual_with_scaling,\n\n );\n\n }\n", "file_path": "src/tests/mod.rs", "rank": 28, "score": 42690.39015398416 }, { "content": "fn bench_fnc(fnc: Box<dyn Fn() -> FrequencySpectrum>) -> u64 {\n\n // warm-up\n\n for _ in 0..10 {\n\n let _ = fnc();\n\n }\n\n let now = Instant::now();\n\n let runs = 10000;\n\n for _ in 0..runs {\n\n let _ = fnc();\n\n }\n\n let duration = now.elapsed();\n\n (duration.as_micros() / runs) as u64\n\n}\n", "file_path": "examples/bench.rs", "rank": 29, "score": 42028.24522250632 }, { "content": "#[allow(non_snake_case)]\n\nfn example__bass_drum_sample() {\n\n // this sample is exactly 0,628s long\n\n // we have 44100samples/s*0,628s == 28695 samples\n\n // next smaller power of two is: 2^14 == 16384 => FFT needs power of 2\n\n let (samples, sampling_rate) =\n\n read_mp3_to_mono(\"test/samples/bass_drum_with_high-hat_at_end-sample.mp3\");\n\n let samples = samples.into_iter().map(|x| x as f32).collect::<Vec<f32>>();\n\n\n\n to_spectrum_and_plot(\n\n &samples[0..4096],\n\n sampling_rate,\n\n \"example__mp3-samples__bass_drum__spectrum\",\n\n FrequencyLimit::Max(5000.0),\n\n )\n\n}\n\n\n", "file_path": "examples/mp3-samples.rs", "rank": 30, "score": 41590.87339728999 }, { "content": "#[allow(non_snake_case)]\n\nfn example__clap_beat_sample() {\n\n // this sample is exactly 0,379s long\n\n // we have 44100samples/s*0,379s == 16714 samples\n\n // next smaller power of two is: 2^14 == 16384 => FFT needs power of 2\n\n let (samples, sampling_rate) = read_mp3_to_mono(\"test/samples/clap-beat-sample.mp3\");\n\n let samples = samples.into_iter().map(|x| x as f32).collect::<Vec<f32>>();\n\n\n\n to_spectrum_and_plot(\n\n &samples[0..4096],\n\n sampling_rate,\n\n \"example__mp3-samples__clap_beat__spectrum\",\n\n FrequencyLimit::Max(5000.0),\n\n )\n\n}\n\n\n", "file_path": "examples/mp3-samples.rs", "rank": 31, "score": 41590.87339728999 }, { "content": "#[test]\n\nfn test_spectrum_nyquist_theorem2() {\n\n let sine_audio = sine_wave_audio_data_multiple(\n\n // 22050.0 results in aliasing and no good results\n\n &[22049.9],\n\n 44100,\n\n 1000,\n\n )\n\n .into_iter()\n\n .map(|x| x as f32)\n\n .collect::<Vec<f32>>();\n\n let spectrum = samples_fft_to_spectrum(\n\n &sine_audio[0..4096],\n\n 44100,\n\n FrequencyLimit::All,\n\n Some(&scale_to_zero_to_one),\n\n )\n\n .unwrap();\n\n assert_eq!(\n\n 0.0,\n\n spectrum.min_fr().val(),\n", "file_path": "src/tests/mod.rs", "rank": 32, "score": 41590.87339728999 }, { "content": "#[allow(non_snake_case)]\n\nfn example__high_hat_sample() {\n\n // this sample is exactly 0,149s long\n\n // we have 44100samples/s*0,149s == 6571 samples\n\n // next smaller power of two is: 2^12 == 4096 => FFT needs power of 2\n\n let (samples, sampling_rate) = read_mp3_to_mono(\"test/samples/high-hat-sample.mp3\");\n\n let samples = samples.into_iter().map(|x| x as f32).collect::<Vec<f32>>();\n\n\n\n to_spectrum_and_plot(\n\n &samples[0..4096],\n\n sampling_rate,\n\n \"example__mp3-samples__high-hat__spectrum\",\n\n FrequencyLimit::All,\n\n )\n\n}\n\n\n", "file_path": "examples/mp3-samples.rs", "rank": 33, "score": 41590.87339728999 }, { "content": "#[test]\n\nfn test_scaling_produces_error() {\n\n let samples = vec![1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8];\n\n let _ = samples_fft_to_spectrum(\n\n &samples,\n\n 44100,\n\n FrequencyLimit::All,\n\n Some(&|_val, _info| f32::NAN),\n\n )\n\n .expect_err(\"Must throw error due to illegal scaling!\");\n\n}\n\n\n\n/// Test that the scaling actually has the effect that we expect it to have.\n", "file_path": "src/tests/mod.rs", "rank": 34, "score": 41590.87339728999 }, { "content": "#[test]\n\nfn test_spectrum_nyquist_theorem() {\n\n let dummy_audio_samples = vec![0.0; 4096];\n\n let spectrum =\n\n samples_fft_to_spectrum(&dummy_audio_samples, 44100, FrequencyLimit::All, None).unwrap();\n\n assert_eq!(\n\n // because indices 0..N/2 (inclusive) of the FFT result are relevant\n\n // => DC component to Nyquist frequency\n\n 4096 / 2 + 1,\n\n spectrum\n\n .data()\n\n .iter()\n\n .map(|x| x.1)\n\n .filter(|x| x.val() == 0.0)\n\n .count(),\n\n \"All frequency values must be exactly zero because the input signal is zero!\"\n\n );\n\n assert_eq!(\n\n 0.0,\n\n spectrum.min_fr().val(),\n\n \"Minimum frequency must be 0 Hz (DS Component/DC bias/Gleichwert)\"\n\n );\n\n assert_eq!(\n\n 44100.0 / 2.0,\n\n spectrum.max_fr().val(),\n\n \"Maximum frequency must be Nyquist frequency\"\n\n );\n\n}\n\n/// Tests that the spectrum contains the Nyquist frequency using a sine wave at almost Nyquist\n\n/// frequency.\n", "file_path": "src/tests/mod.rs", "rank": 35, "score": 41590.87339728999 }, { "content": "#[test]\n\nfn test_only_null_samples_valid() {\n\n let samples = vec![0.0, 0.0];\n\n let _ = samples_fft_to_spectrum(&samples, 44100, FrequencyLimit::All, None).unwrap();\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 36, "score": 41590.87339728999 }, { "content": "#[test]\n\nfn test_spectrum_and_visualize_sine_waves_50_1000_3777hz() {\n\n let sine_audio = sine_wave_audio_data_multiple(&[50.0, 1000.0, 3777.0], 44100, 1000);\n\n\n\n // visualize waveform\n\n waveform_static_plotters_png_visualize(\n\n &sine_audio,\n\n Channels::Mono,\n\n TEST_OUT_DIR,\n\n \"test_spectrum_and_visualize_sine_waves_50_1000_3777hz--WAVEFORM.png\",\n\n );\n\n\n\n let sine_audio = sine_audio\n\n .into_iter()\n\n .map(|x| x as f32)\n\n .collect::<Vec<f32>>();\n\n\n\n // FFT frequency accuracy is: sample_rate / (N / 2)\n\n // 44100/(4096/2) = 21.5Hz\n\n\n\n // get a window that we want to analyze\n", "file_path": "src/tests/mod.rs", "rank": 37, "score": 39679.63982100207 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/fft/rustfft_complex.rs", "rank": 47, "score": 119.3797668027025 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/fft/mod.rs", "rank": 48, "score": 119.37976680270248 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/tests/sine.rs", "rank": 49, "score": 119.37976680270249 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/fft/microfft_real.rs", "rank": 50, "score": 119.3797668027025 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/frequency.rs", "rank": 51, "score": 119.3797668027025 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/scaling.rs", "rank": 52, "score": 119.37976680270249 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/lib.rs", "rank": 53, "score": 119.37976680270249 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/spectrum.rs", "rank": 54, "score": 119.37976680270249 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "examples/mp3-samples.rs", "rank": 55, "score": 119.37976680270252 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "examples/bench.rs", "rank": 56, "score": 119.37976680270249 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "examples/live-visualization.rs", "rank": 57, "score": 119.37976680270252 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/fft/microfft_complex.rs", "rank": 58, "score": 119.37976680270249 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/tests/mod.rs", "rank": 59, "score": 119.37976680270248 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/error.rs", "rank": 60, "score": 119.37976680270249 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "file_path": "src/windows.rs", "rank": 61, "score": 119.3797668027025 }, { "content": "/*\n\nMIT License\n\n\n\nCopyright (c) 2021 Philipp Schuster\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n\n\nuse spectrum_analyzer::scaling::divide_by_N;\n\nuse spectrum_analyzer::windows::hann_window;\n\nuse spectrum_analyzer::{samples_fft_to_spectrum, FrequencyLimit};\n\n\n\n/// Minimal example.\n", "file_path": "examples/minimal.rs", "rank": 62, "score": 109.57560305233866 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! This module contains convenient public transform functions that you can use\n\n//! as parameters in [`crate::samples_fft_to_spectrum`] for scaling the\n\n//! frequency value (the FFT result). They act as \"idea/inspiration\". Feel free\n\n//! to either compose them or create your own derivation from them.\n\n\n\nuse alloc::boxed::Box;\n\n\n\n/// Helper struct for [`SpectrumScalingFunction`], that gets passed into the\n\n/// function together with the actual value. This structure can be used to scale\n\n/// each value. All properties reference the current data of a\n\n/// [`crate::spectrum::FrequencySpectrum`].\n\n///\n\n/// This uses `f32` in favor of [`crate::FrequencyValue`] because the latter led to\n\n/// some implementation problems.\n\n#[derive(Debug)]\n\npub struct SpectrumDataStats {\n\n /// Minimal frequency value in spectrum.\n", "file_path": "src/scaling.rs", "rank": 63, "score": 24.29572081115358 }, { "content": "#[derive(Debug, Copy, Clone, Default)]\n\npub struct OrderableF32(f32);\n\n\n\nimpl OrderableF32 {\n\n #[inline(always)]\n\n pub const fn val(&self) -> f32 {\n\n self.0\n\n }\n\n}\n\n\n\nimpl From<f32> for OrderableF32 {\n\n #[inline(always)]\n\n fn from(val: f32) -> Self {\n\n debug_assert!(!val.is_nan(), \"NaN-values are not supported!\");\n\n debug_assert!(!val.is_infinite(), \"Infinite-values are not supported!\");\n\n Self(val)\n\n }\n\n}\n\n\n\nimpl Display for OrderableF32 {\n", "file_path": "src/frequency.rs", "rank": 64, "score": 22.92442762340749 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! Module for the struct [`OrderableF32`] and the two\n\n//! convenient type definitions [`Frequency`] and [`FrequencyValue`].\n\n\n\nuse core::cmp::Ordering;\n\nuse core::fmt::{Display, Formatter, Result};\n\nuse core::ops::{Add, Div, Mul, Sub};\n\n\n\n/// A frequency. A convenient wrapper type around `f32`.\n\npub type Frequency = OrderableF32;\n\n/// The value of a frequency in a frequency spectrum. Convenient wrapper around `f32`.\n\n/// Not necessarily the magnitude of the complex numbers because scaling/normalization\n\n/// functions could have been applied.\n\npub type FrequencyValue = OrderableF32;\n\n\n\n/// Small convenient wrapper around `f32`.\n\n/// Mainly required to make `f32` operable in a sorted tree map.\n\n/// You should only use the type aliases `Frequency` and `FrequencyValue`.\n", "file_path": "src/frequency.rs", "rank": 65, "score": 22.7741969515162 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\nuse audio_visualizer::dynamic::live_input::AudioDevAndCfg;\n\nuse audio_visualizer::dynamic::window_top_btm::{open_window_connect_audio, TransformFn};\n\nuse spectrum_analyzer::scaling::divide_by_N;\n\nuse spectrum_analyzer::windows::hann_window;\n\nuse spectrum_analyzer::{samples_fft_to_spectrum, FrequencyLimit, FrequencyValue};\n\nuse std::cell::RefCell;\n\nuse std::cmp::max;\n\n\n\n/// Example that creates a live visualization of the frequency spectrum of realtime audio data\n\n/// **Execute this with `--release`, otherwise it is very laggy!**.\n", "file_path": "examples/live-visualization.rs", "rank": 66, "score": 20.27360077212534 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! Errors related to the spectrum analysis via FFT. Most probably, the errors will\n\n//! result in wrong input data, before the actual calculation has begun.\n\n//!\n\n//! This module focuses on the \"overall\" errors. More specific errors might be\n\n//! located in submodules.\n\n\n\nuse crate::limit::FrequencyLimitError;\n\n\n\n/// Describes main errors of the library. Almost all errors\n\n/// are caused by wrong input.\n\n#[derive(Debug)]\n\npub enum SpectrumAnalyzerError {\n\n /// There must be at least two samples.\n\n TooFewSamples,\n\n /// NaN values in samples are not supported!\n\n NaNValuesNotSupported,\n\n /// Infinity-values (regarding floating point representation) in samples are not supported!\n", "file_path": "src/error.rs", "rank": 67, "score": 20.08583312332127 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n\n\n#![deny(\n\n clippy::all,\n\n clippy::cargo,\n\n clippy::nursery,\n\n // clippy::restriction,\n\n // clippy::pedantic\n\n)]\n\n// now allow a few rules which are denied by the above statement\n\n// --> they are ridiculous and not necessary\n\n#![allow(\n\n clippy::suboptimal_flops,\n\n clippy::redundant_pub_crate,\n\n clippy::fallible_impl_from\n\n)]\n\n#![deny(missing_debug_implementations)]\n\n#![deny(rustdoc::all)]\n", "file_path": "examples/mp3-samples.rs", "rank": 68, "score": 19.222418633593453 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n\n\n#![deny(\n\n clippy::all,\n\n clippy::cargo,\n\n clippy::nursery,\n\n // clippy::restriction,\n\n // clippy::pedantic\n\n)]\n\n// now allow a few rules which are denied by the above statement\n\n// --> they are ridiculous and not necessary\n\n#![allow(\n\n clippy::suboptimal_flops,\n\n clippy::redundant_pub_crate,\n\n clippy::fallible_impl_from\n\n)]\n\n#![deny(missing_debug_implementations)]\n\n#![deny(rustdoc::all)]\n\n\n\nuse std::time::Instant;\n\n\n\nuse spectrum_analyzer::*;\n\n\n\n/// Benchmark can be used to check how changes effect the performance.\n\n/// Always execute with release flag!\n", "file_path": "examples/bench.rs", "rank": 69, "score": 18.40319923657838 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! Complex FFT using `microfft::complex`. Results should be equal to the ones from `rustfft`.\n\n//! The difference is that this implementation works in `no_std`-environments but it is\n\n//! limited to a maximum sample length of 4096 (with microfft version 0.4.0)\n\n\n\nuse alloc::vec::Vec;\n\n\n\nuse crate::fft::Fft;\n\nuse core::convert::TryInto;\n\nuse microfft::complex;\n\n\n\n/// The result of a FFT is always complex but because different FFT crates might\n\n/// use different versions of \"num-complex\", each implementation exports\n\n/// it's own version that gets used in lib.rs for binary compatibility.\n\npub use microfft::Complex32;\n\n\n\n/// Dummy struct with no properties but used as a type\n\n/// to implement a concrete FFT strategy using (`microfft::complex`).\n", "file_path": "src/fft/microfft_complex.rs", "rank": 70, "score": 18.052543683653944 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! Module for the struct [`FrequencySpectrum`].\n\n\n\nuse crate::error::SpectrumAnalyzerError;\n\nuse crate::frequency::{Frequency, FrequencyValue};\n\nuse crate::scaling::{SpectrumDataStats, SpectrumScalingFunction};\n\nuse alloc::collections::BTreeMap;\n\nuse alloc::vec::Vec;\n\nuse core::cell::{Cell, Ref, RefCell};\n\n\n\n/// Convenient wrapper around the processed FFT result which describes each frequency and\n\n/// its value/amplitude in the analyzed slice of samples. It only consists of the frequencies\n\n/// which were desired, e.g. specified via\n\n/// [`crate::limit::FrequencyLimit`] when [`crate::samples_fft_to_spectrum`] was called.\n\n///\n\n/// This means, the spectrum can cover all data from the DC component (0Hz) to the\n\n/// Nyquist frequency.\n\n///\n", "file_path": "src/spectrum.rs", "rank": 71, "score": 17.85281639187906 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! Several window functions which you can apply before doing the FFT.\n\n//! For more information:\n\n//! - <https://en.wikipedia.org/wiki/Window_function>\n\n//! - <https://www.youtube.com/watch?v=dCeHOf4cJE0> (FFT and windowing by Texas Instruments)\n\n\n\nuse alloc::vec::Vec;\n\nuse core::f32::consts::PI;\n\n// replacement for std functions like sin and cos in no_std-environments\n\nuse libm::cosf;\n\n\n\n/*/// Describes what window function should be applied to\n\n/// the `samples` parameter of [`crate::samples_fft_to_spectrum`]\n\n/// should be applied before the FFT starts. See\n\n/// https://en.wikipedia.org/wiki/Window_function for more\n\n/// resources.\n\npub enum WindowFn {\n\n\n\n}*/\n\n\n\n/// Applies a Hann window (<https://en.wikipedia.org/wiki/Window_function#Hann_and_Hamming_windows>)\n\n/// to an array of samples.\n\n///\n\n/// ## Return value\n\n/// New vector with Hann window applied to the values.\n", "file_path": "src/windows.rs", "rank": 72, "score": 17.12078600896075 }, { "content": "/// All results are related to the sampling rate provided to the library function which\n\n/// creates objects of this struct!\n\n#[derive(Debug, Default)]\n\npub struct FrequencySpectrum {\n\n /// Raw data. Vector is sorted from lowest\n\n /// frequency to highest and data is normalized/scaled\n\n /// according to all applied scaling functions.\n\n data: RefCell<Vec<(Frequency, FrequencyValue)>>,\n\n /// Frequency resolution of the examined samples in Hertz,\n\n /// i.e the frequency steps between elements in the vector\n\n /// inside field [`Self::data`].\n\n frequency_resolution: f32,\n\n /// Number of samples. This property must be kept separately, because\n\n /// `data.borrow().len()` might contain less than N elements, if the\n\n /// spectrum was created with a [`crate::limit::FrequencyLimit`] .\n\n samples_len: u32,\n\n /// Average value of frequency value/magnitude/amplitude\n\n /// corresponding to data in [`FrequencySpectrum::data`].\n\n average: Cell<FrequencyValue>,\n\n /// Median value of frequency value/magnitude/amplitude\n", "file_path": "src/spectrum.rs", "rank": 73, "score": 17.08804900828108 }, { "content": " let max_detectable_frequency = sampling_rate as f32 / 2.0;\n\n // verify frequency limit: unwrap error or else ok\n\n let _ = frequency_limit\n\n .verify(max_detectable_frequency)\n\n .map_err(SpectrumAnalyzerError::InvalidFrequencyLimit)?;\n\n\n\n // With FFT we transform an array of time-domain waveform samples\n\n // into an array of frequency-domain spectrum samples\n\n // https://www.youtube.com/watch?v=z7X6jgFnB6Y\n\n\n\n // FFT result has same length as input\n\n // (but when we interpret the result, we don't need all indices)\n\n\n\n // applies the f32 samples onto the FFT algorithm implementation\n\n // chosen at compile time (via Cargo feature).\n\n // If a complex FFT implementation was chosen, this will internally\n\n // transform all data to Complex numbers.\n\n let buffer = FftImpl::fft_apply(samples);\n\n\n\n // This function:\n", "file_path": "src/lib.rs", "rank": 74, "score": 17.043152895653368 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! Module for generating synthetic sine waves.\n\n\n\nuse alloc::boxed::Box;\n\nuse alloc::vec::Vec;\n\nuse core::f32::consts::PI;\n\n\n\n/// Creates a sine (sinus) wave function for a given frequency.\n\n/// Don't forget to scale up the value to the audio resolution.\n\n/// So far, amplitude is in interval `[-1; 1]`. The parameter\n\n/// of the returned function is the point in time in seconds.\n\n///\n\n/// * `frequency` is in Hertz\n", "file_path": "src/tests/sine.rs", "rank": 75, "score": 16.904379108458027 }, { "content": " #[inline(always)]\n\n pub fn range(&self) -> FrequencyValue {\n\n self.max().1 - self.min().1\n\n }\n\n\n\n /// Returns the underlying data.\n\n #[inline(always)]\n\n pub fn data(&self) -> Ref<Vec<(Frequency, FrequencyValue)>> {\n\n self.data.borrow()\n\n }\n\n\n\n /// Returns the frequency resolution of this spectrum.\n\n #[inline(always)]\n\n pub const fn frequency_resolution(&self) -> f32 {\n\n self.frequency_resolution\n\n }\n\n\n\n /// Returns the number of samples used to obtain this spectrum.\n\n #[inline(always)]\n\n pub const fn samples_len(&self) -> u32 {\n", "file_path": "src/spectrum.rs", "rank": 76, "score": 16.878767880314125 }, { "content": " fn fmt(&self, f: &mut Formatter<'_>) -> Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl Ord for OrderableF32 {\n\n #[inline(always)]\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n self.partial_cmp(other).unwrap()\n\n }\n\n}\n\n\n\nimpl Eq for OrderableF32 {}\n\n\n\nimpl PartialEq for OrderableF32 {\n\n #[inline(always)]\n\n fn eq(&self, other: &Self) -> bool {\n\n matches!(self.cmp(other), Ordering::Equal)\n\n }\n\n}\n", "file_path": "src/frequency.rs", "rank": 77, "score": 16.79715204873362 }, { "content": " /// then `get_frequency_value_closest(320)` will return `(300.0, 0.5)`.\n\n ///\n\n /// ## Panics\n\n /// If parameter `search_fre` (frequency) is below the lowest or the maximum\n\n /// frequency, this function panics!\n\n ///\n\n /// ## Parameters\n\n /// - `search_fr` The frequency of that you want the amplitude/value in the spectrum.\n\n ///\n\n /// ## Return\n\n /// Closest matching point in spectrum, determined by [`Self::frequency_resolution`].\n\n #[inline(always)]\n\n pub fn freq_val_closest(&self, search_fr: f32) -> (Frequency, FrequencyValue) {\n\n let data = self.data.borrow();\n\n\n\n // lowest frequency in the spectrum\n\n // TODO use minFrequency() and maxFrequency()\n\n let (min_fr, min_fr_val) = data[0];\n\n // highest frequency in the spectrum\n\n let (max_fr, max_fr_val) = data[data.len() - 1];\n", "file_path": "src/spectrum.rs", "rank": 78, "score": 16.779183296853173 }, { "content": " /// frequency, this function panics! This is because the user provide\n\n /// the min/max frequency when the spectrum is created and knows about it.\n\n /// This is similar to an intended \"out of bounds\"-access.\n\n ///\n\n /// ## Parameters\n\n /// - `search_fr` The frequency of that you want the amplitude/value in the spectrum.\n\n ///\n\n /// ## Return\n\n /// Either exact value of approximated value, determined by [`Self::frequency_resolution`].\n\n #[inline(always)]\n\n pub fn freq_val_exact(&self, search_fr: f32) -> FrequencyValue {\n\n let data = self.data.borrow();\n\n\n\n // lowest frequency in the spectrum\n\n // TODO use minFrequency() and maxFrequency()\n\n let (min_fr, min_fr_val) = data[0];\n\n // highest frequency in the spectrum\n\n let (max_fr, max_fr_val) = data[data.len() - 1];\n\n\n\n // https://docs.rs/float-cmp/0.8.0/float_cmp/\n", "file_path": "src/spectrum.rs", "rank": 79, "score": 16.756987078292457 }, { "content": "\n\nuse alloc::vec::Vec;\n\n\n\nuse crate::error::SpectrumAnalyzerError;\n\nuse crate::fft::{Complex32, Fft, FftImpl};\n\npub use crate::frequency::{Frequency, FrequencyValue};\n\npub use crate::limit::FrequencyLimit;\n\npub use crate::limit::FrequencyLimitError;\n\nuse crate::scaling::SpectrumScalingFunction;\n\npub use crate::spectrum::FrequencySpectrum;\n\n\n\npub mod error;\n\nmod fft;\n\nmod frequency;\n\nmod limit;\n\npub mod scaling;\n\nmod spectrum;\n\npub mod windows;\n\n\n\n// test module for large \"integration\"-like tests\n", "file_path": "src/lib.rs", "rank": 80, "score": 16.081331840669133 }, { "content": "/// You must take care of, that you don't have division by zero in your function or\n\n/// that the result is NaN or Infinity (regarding IEEE-754). If the result is NaN or Infinity,\n\n/// the library will return `Err`.\n\n///\n\n/// This uses `f32` in favor of [`crate::FrequencyValue`] because the latter led to\n\n/// some implementation problems.\n\npub type SpectrumScalingFunction<'a> = &'a dyn Fn(f32, &SpectrumDataStats) -> f32;\n\n\n\n/// Calculates the base 10 logarithm of each frequency magnitude and\n\n/// multiplies it with 20. This scaling is quite common, you can\n\n/// find more information for example here:\n\n/// <https://www.sjsu.edu/people/burford.furman/docs/me120/FFT_tutorial_NI.pdf>\n\n///\n\n/// ## Usage\n\n/// ```rust\n\n///use spectrum_analyzer::{samples_fft_to_spectrum, scaling, FrequencyLimit};\n\n///let window = [0.0, 0.1, 0.2, 0.3]; // add real data here\n\n///let spectrum = samples_fft_to_spectrum(\n\n/// &window,\n\n/// 44100,\n\n/// FrequencyLimit::All,\n\n/// Some(&scaling::scale_20_times_log10),\n\n/// );\n\n/// ```\n\n/// Function is of type [`SpectrumScalingFunction`].\n", "file_path": "src/scaling.rs", "rank": 81, "score": 16.015905445211438 }, { "content": " pub min: f32,\n\n /// Maximum frequency value in spectrum.\n\n pub max: f32,\n\n /// Average frequency value in spectrum.\n\n pub average: f32,\n\n /// Median frequency value in spectrum.\n\n pub median: f32,\n\n /// Number of samples (`samples.len()`). Already\n\n /// casted to f32, to avoid repeatedly casting in a loop for each value.\n\n pub n: f32,\n\n}\n\n\n\n/// Describes the type for a function that scales/normalizes the data inside [`crate::FrequencySpectrum`].\n\n/// The scaling only affects the value/amplitude of the frequency, but not the frequency itself.\n\n/// It gets applied to every single element.\n\n/// ///\n\n/// A scaling function can be used for example to subtract the minimum (`min`) from each value.\n\n/// It is optional to use the second parameter [`SpectrumDataStats`].\n\n/// and the type works with static functions as well as dynamically created closures.\n\n///\n", "file_path": "src/scaling.rs", "rank": 82, "score": 16.012947366781145 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! Complex FFT using `rustfft`. Results should be equal to the ones from `microfft`.\n\n//! The difference is that this implementation works only in `std`-environments\n\n//! and can handle sample lengths of more than 4096.\n\n\n\nuse alloc::vec::Vec;\n\n\n\nuse crate::fft::Fft as FftAbstraction;\n\nuse rustfft::algorithm::Radix4;\n\nuse rustfft::{Fft, FftDirection};\n\n\n\n/// The result of a FFT is always complex but because different FFT crates might\n\n/// use different versions of \"num-complex\", each implementation exports\n\n/// it's own version that gets used in lib.rs for binary compatibility.\n\npub use rustfft::num_complex::Complex32;\n\n\n\n/// Dummy struct with no properties but used as a type\n\n/// to implement a concrete FFT strategy using (`rustfft::algorithm::Radix4`).\n", "file_path": "src/fft/rustfft_complex.rs", "rank": 83, "score": 15.827613933257087 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! Test module for \"integration\"-like tests. No small unit tests of simple functions.\n\n\n\nuse crate::error::SpectrumAnalyzerError;\n\nuse crate::scaling::{divide_by_N, scale_to_zero_to_one};\n\nuse crate::tests::sine::sine_wave_audio_data_multiple;\n\nuse crate::windows::{hamming_window, hann_window};\n\nuse crate::{samples_fft_to_spectrum, FrequencyLimit};\n\nuse alloc::vec::Vec;\n\nuse audio_visualizer::spectrum::plotters_png_file::spectrum_static_plotters_png_visualize;\n\nuse audio_visualizer::waveform::plotters_png_file::waveform_static_plotters_png_visualize;\n\nuse audio_visualizer::Channels;\n\nuse core::cmp::max;\n\n\n\n// /// Directory with test samples (e.g. mp3) can be found here.\n\n// const TEST_SAMPLES_DIR: &str = \"test/samples\";\n\n/// If tests create files, they should be stored here.\n\nconst TEST_OUT_DIR: &str = \"test/out\";\n\n\n\nmod sine;\n\n\n\n#[test]\n", "file_path": "src/tests/mod.rs", "rank": 84, "score": 15.73368956470836 }, { "content": " /// * `samples_len` Number of samples. Might be bigger than `data.len()`\n\n /// if the spectrum is obtained with a frequency limit.\n\n #[inline(always)]\n\n pub fn new(\n\n data: Vec<(Frequency, FrequencyValue)>,\n\n frequency_resolution: f32,\n\n samples_len: u32,\n\n ) -> Self {\n\n debug_assert!(\n\n data.len() >= 2,\n\n \"Input data of length={} for spectrum makes no sense!\",\n\n data.len()\n\n );\n\n\n\n let obj = Self {\n\n data: RefCell::new(data),\n\n frequency_resolution,\n\n samples_len,\n\n // default/placeholder values\n\n average: Cell::new(FrequencyValue::from(-1.0)),\n", "file_path": "src/spectrum.rs", "rank": 85, "score": 15.551846033777903 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! Real FFT using `microfft::real`.\n\n//! Works in `no_std`-environments, maximum sample length is 4096 (with microfft version 0.4.0)\n\n//! and it's faster than a \"typical\" complex FFT.\n\n\n\nuse alloc::vec::Vec;\n\n\n\nuse crate::fft::Fft;\n\nuse core::convert::TryInto;\n\nuse microfft::real;\n\n\n\n/// The result of a FFT is always complex but because different FFT crates might\n\n/// use different versions of \"num-complex\", each implementation exports\n\n/// it's own version that gets used in lib.rs for binary compatibility.\n\npub use microfft::Complex32;\n\n\n\n/// Dummy struct with no properties but used as a type\n\n/// to implement a concrete FFT strategy using (`microfft::real`).\n", "file_path": "src/fft/microfft_real.rs", "rank": 86, "score": 15.461666059017839 }, { "content": "## How to use FFT to get a frequency spectrum?\n\n\n\nThis library is full of additional and useful links and comments about how an FFT result \n\ncan be used to get a frequency spectrum. In this document I want to give a short introduction \n\nwhere inside the code you can find specific things.\n\n\n\n**TL;DR:** Although this crate has 1400 lines of code, **the part which gets the frequency and\n\ntheir values from the FFT is small and simple**. Most of the code is related to my convenient \n\nabstraction over the FFT result including several getters, transform/scaling functions, and\n\ntests.\n\n\n\n**I don't explain how FFT works but how you use the result!**\n\nIf you want to understand that too:\n\n\n\n- check out all links provided [at the end of README.md](/README.md)\n\n- look into `lib.rs` (**probalby gives you 90 percent of the things you want to know**) \n\n and the comments over the FFT abstraction in `src/fft/mod.rs` and \n\n `src/fft/rustfft-complex/mod.rs`.\n\n \n\n\n\nThis is everything important you need. Everything inside \n\n `spectrum.rs` and the other files is just convenient stuff + tests for when you \n\nwant to use this crate in your program.\n", "file_path": "EDUCATIONAL.md", "rank": 87, "score": 14.86226090239834 }, { "content": " median: Cell::new(FrequencyValue::from(-1.0)),\n\n min: Cell::new((Frequency::from(-1.0), FrequencyValue::from(-1.0))),\n\n max: Cell::new((Frequency::from(-1.0), FrequencyValue::from(-1.0))),\n\n };\n\n // IMPORTANT!!\n\n obj.calc_statistics();\n\n obj\n\n }\n\n\n\n /// Applies the function `scaling_fn` to each element and updates\n\n /// `min`, `max`, etc. afterwards accordingly. It ensures that no value\n\n /// is `NaN` or `Infinity` afterwards (regarding IEEE-754).\n\n ///\n\n /// ## Parameters\n\n /// * `scaling_fn` See [`crate::scaling::SpectrumScalingFunction`].\n\n #[inline(always)]\n\n pub fn apply_scaling_fn(\n\n &self,\n\n scaling_fn: SpectrumScalingFunction,\n\n ) -> Result<(), SpectrumAnalyzerError> {\n", "file_path": "src/spectrum.rs", "rank": 88, "score": 14.74697196284392 }, { "content": "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n*/\n\n//! A simple and fast `no_std` library to get the frequency spectrum of a digital signal\n\n//! (e.g. audio) using FFT. It follows the KISS principle and consists of simple building\n\n//! blocks/optional features.\n\n//!\n\n//! In short, this is a convenient wrapper around a FFT implementation. You choose the\n\n//! implementation at compile time via Cargo features. This crate uses\n\n//! \"microfft\" by default for FFT. See README for more advise.\n\n//!\n\n//! ## Examples\n\n//! ### Scaling via dynamic closure\n\n//! ```rust\n\n//! use spectrum_analyzer::{samples_fft_to_spectrum, FrequencyLimit};\n\n//! // get data from audio source\n\n//! let samples = vec![0.0, 1.1, 5.5, -5.5];\n\n//! let res = samples_fft_to_spectrum(\n\n//! &samples,\n\n//! 44100,\n", "file_path": "src/lib.rs", "rank": 89, "score": 14.736695211046367 }, { "content": " let avg = sum / data_sorted.len() as f32;\n\n let average: FrequencyValue = avg.into();\n\n\n\n let median = {\n\n // we assume that data_sorted.length() is always even, because\n\n // it must be a power of 2 (for FFT)\n\n let a = data_sorted[data_sorted.len() / 2 - 1].1;\n\n let b = data_sorted[data_sorted.len() / 2].1;\n\n (a + b) / 2.0.into()\n\n };\n\n\n\n // because we sorted the vector a few lines above\n\n // by the value, the following lines are correct\n\n // i.e. we get min/max value with corresponding frequency\n\n let min = data_sorted[0];\n\n let max = data_sorted[data_sorted.len() - 1];\n\n\n\n // check that I get the comparison right (and not from max to min)\n\n debug_assert!(min.1 <= max.1, \"min must be <= max\");\n\n\n", "file_path": "src/spectrum.rs", "rank": 90, "score": 13.728834428644713 }, { "content": "pub struct FftImpl;\n\n\n\nimpl FftImpl {\n\n /// Converts all samples to a complex number (imaginary part is set to zero)\n\n /// as preparation for the FFT.\n\n ///\n\n /// ## Parameters\n\n /// `samples` Input samples.\n\n ///\n\n /// ## Return value\n\n /// New vector with elements of FFT output/result.\n\n #[inline(always)]\n\n fn samples_to_complex(samples: &[f32]) -> Vec<Complex32> {\n\n samples\n\n .iter()\n\n .map(|x| Complex32::new(*x, 0.0))\n\n .collect::<Vec<Complex32>>()\n\n }\n\n}\n\n\n", "file_path": "src/fft/microfft_complex.rs", "rank": 91, "score": 13.682238367962356 }, { "content": "pub struct FftImpl;\n\n\n\nimpl FftImpl {\n\n /// Converts all samples to a complex number (imaginary part is set to zero)\n\n /// as preparation for the FFT.\n\n ///\n\n /// ## Parameters\n\n /// `samples` Input samples.\n\n ///\n\n /// ## Return value\n\n /// New vector with elements of FFT output/result.\n\n #[inline(always)]\n\n fn samples_to_complex(samples: &[f32]) -> Vec<Complex32> {\n\n samples\n\n .iter()\n\n .map(|x| Complex32::new(*x, 0.0))\n\n .collect::<Vec<Complex32>>()\n\n }\n\n}\n\n\n", "file_path": "src/fft/rustfft_complex.rs", "rank": 92, "score": 13.682238367962354 }, { "content": "#[cfg(test)]\n\nmod tests;\n\n\n\n/// Takes an array of samples (length must be a power of 2),\n\n/// e.g. 2048, applies an FFT (using the specified FFT implementation) on it\n\n/// and returns all frequencies with their volume/magnitude.\n\n///\n\n/// By default, no normalization/scaling is done at all and the results,\n\n/// i.e. the frequency magnitudes/amplitudes/values are the raw result from\n\n/// the FFT algorithm, except that complex numbers are transformed\n\n/// to their magnitude.\n\n///\n\n/// * `samples` raw audio, e.g. 16bit audio data but as f32.\n\n/// You should apply an window function (like Hann) on the data first.\n\n/// The final frequency resolution is `sample_rate / (N / 2)`\n\n/// e.g. `44100/(16384/2) == 5.383Hz`, i.e. more samples =>\n\n/// better accuracy/frequency resolution.\n\n/// * `sampling_rate` sampling_rate, e.g. `44100 [Hz]`\n\n/// * `frequency_limit` Frequency limit. See [`FrequencyLimit´]\n\n/// * `scaling_fn` See [`crate::scaling::SpectrumScalingFunction`] for details.\n", "file_path": "src/lib.rs", "rank": 93, "score": 13.36493236885485 }, { "content": " self.min.replace(min);\n\n self.max.replace(max);\n\n self.average.replace(average);\n\n self.median.replace(median);\n\n }\n\n}\n\n\n\n/*impl FromIterator<(Frequency, FrequencyValue)> for FrequencySpectrum {\n\n\n\n #[inline(always)]\n\n fn from_iter<T: IntoIterator<Item=(Frequency, FrequencyValue)>>(iter: T) -> Self {\n\n // 1024 is just a guess: most likely 2048 is a common FFT length,\n\n // i.e. 1024 results for the frequency spectrum.\n\n let mut vec = Vec::with_capacity(1024);\n\n for (fr, val) in iter {\n\n vec.push((fr, val))\n\n }\n\n\n\n FrequencySpectrum::new(vec)\n\n }\n", "file_path": "src/spectrum.rs", "rank": 94, "score": 13.206055849199664 }, { "content": " self.samples_len\n\n }\n\n\n\n /// Getter for the highest frequency that is captured inside this spectrum.\n\n /// Shortcut for `spectrum.data()[spectrum.data().len() - 1].0`.\n\n /// This corresponds to the [`crate::limit::FrequencyLimit`] of the spectrum.\n\n ///\n\n /// This method could return the Nyquist frequency, if there was no Frequency\n\n /// limit while obtaining the spectrum.\n\n #[inline(always)]\n\n pub fn max_fr(&self) -> Frequency {\n\n let data = self.data.borrow();\n\n data[data.len() - 1].0\n\n }\n\n\n\n /// Getter for the lowest frequency that is captured inside this spectrum.\n\n /// Shortcut for `spectrum.data()[0].0`.\n\n /// This corresponds to the [`crate::limit::FrequencyLimit`] of the spectrum.\n\n ///\n\n /// This method could return the DC component, see [`Self::dc_component`].\n", "file_path": "src/spectrum.rs", "rank": 95, "score": 12.8102564697159 }, { "content": " // 1) calculates the corresponding frequency of each index in the FFT result\n\n // 2) filters out unwanted frequencies\n\n // 3) calculates the magnitude (absolute value) at each frequency index for each complex value\n\n // 4) optionally scales the magnitudes\n\n // 5) collects everything into the struct \"FrequencySpectrum\"\n\n fft_result_to_spectrum(\n\n samples.len(),\n\n &buffer,\n\n sampling_rate,\n\n frequency_limit,\n\n scaling_fn,\n\n )\n\n}\n\n\n\n/// Transforms the FFT result into the spectrum by calculating the corresponding frequency of each\n\n/// FFT result index and optionally calculating the magnitudes of the complex numbers if a complex\n\n/// FFT implementation is chosen.\n\n///\n\n/// ## Parameters\n\n/// * `samples_len` Length of samples. This is a dedicated field because it can't always be\n\n/// derived from `fft_result.len()`. There are for example differences for\n\n/// `fft_result.len()` in real and complex FFT.\n\n/// * `fft_result` Result buffer from FFT. Has the same length as the samples array.\n\n/// * `sampling_rate` sampling_rate, e.g. `44100 [Hz]`\n\n/// * `frequency_limit` Frequency limit. See [`FrequencyLimit´]\n\n/// * `scaling_fn` See [`crate::scaling::SpectrumScalingFunction`].\n\n///\n\n/// ## Return value\n\n/// New object of type [`FrequencySpectrum`].\n", "file_path": "src/lib.rs", "rank": 96, "score": 12.614851541276277 }, { "content": "\n\n/// Abstraction over different FFT implementations. This is necessary because this crate\n\n/// uses different compile time features to exchange the FFT implementation, i.e. real or complex.\n\n/// Each of them operates on possibly different \"num-complex\"-versions for example.\n\npub(crate) trait Fft<ComplexType> {\n\n /// Applies the FFT on the given implementation. If necessary, the data is converted to a\n\n /// complex number first. The resulting vector contains complex numbers without any\n\n /// normalization/scaling. Usually you calc the magnitude of each complex number on the\n\n /// resulting vector to get the amplitudes of the frequencies in the next step.\n\n ///\n\n /// ## Parameters\n\n /// * `samples` samples for FFT. Length MUST be a power of 2 for FFT, e.g. 1024 or 4096!\n\n ///\n\n /// ## Return\n\n /// Vector of FFT results.\n\n fn fft_apply(samples: &[f32]) -> Vec<ComplexType>;\n\n\n\n /// Returns the relevant results of the FFT result. For complex FFT this is\n\n /// `N/2 + 1`, i.e. indices `0..=N/2` (inclusive end) are relevant. Real FFT\n\n /// implementations might be different here, because they may only have\n", "file_path": "src/fft/mod.rs", "rank": 97, "score": 12.5473572658559 }, { "content": " assert_eq!(\n\n spectrum.max_fr().val(),\n\n sampling_rate as f32 / 2.0,\n\n \"Upper bound frequency must be inclusive!\"\n\n );\n\n }\n\n {\n\n let spectrum = samples_fft_to_spectrum(\n\n &window,\n\n sampling_rate,\n\n FrequencyLimit::Range(412.0, 510.0),\n\n None,\n\n )\n\n .unwrap();\n\n assert_eq!(\n\n spectrum.min_fr().val(),\n\n 412.0,\n\n \"Lower bound frequency must be inclusive!\"\n\n );\n\n assert_eq!(\n\n spectrum.max_fr().val(),\n\n 510.0,\n\n \"Upper bound frequency must be inclusive!\"\n\n );\n\n }\n\n}\n\n\n\n/// Tests that the spectrum contains the Nyquist frequency.\n", "file_path": "src/tests/mod.rs", "rank": 98, "score": 12.532412260433738 }, { "content": " #[inline(always)]\n\n pub fn dc_component(&self) -> Option<FrequencyValue> {\n\n let data = self.data.borrow();\n\n let (maybe_dc_component, dc_value) = &data[0];\n\n if maybe_dc_component.val() == 0.0 {\n\n Some(*dc_value)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Returns the value of the given frequency from the spectrum either exactly or approximated.\n\n /// If `search_fr` is not exactly given in the spectrum, i.e. due to the\n\n /// [`Self::frequency_resolution`], this function takes the two closest\n\n /// neighbors/points (A, B), put a linear function through them and calculates\n\n /// the point C in the middle. This is done by the private function\n\n /// `calculate_y_coord_between_points`.\n\n ///\n\n /// ## Panics\n\n /// If parameter `search_fr` (frequency) is below the lowest or the maximum\n", "file_path": "src/spectrum.rs", "rank": 99, "score": 12.30702806528617 } ]
Rust
predict/src/bin/build_driving_model.rs
ehsanul/brick
291c0783f3b062cf73887cb3581dd92342891165
extern crate bincode; extern crate flate2; extern crate predict; extern crate state; use bincode::serialize_into; use flate2::write::GzEncoder; use flate2::Compression; use predict::driving_model::{DrivingModel, PlayerTransformation, TransformationMap}; use predict::sample; use state::*; use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::error::Error; use std::fs::{create_dir_all, File}; use std::io::BufWriter; use std::path::Path; fn build_model_for(control_branch: &str) -> DrivingModel { let all_samples = sample::load_all_samples(&format!("./data/samples/flat_ground/{}/", control_branch)); let mut model = DrivingModel::default(); index_all_samples(&mut model.tick2, &all_samples, 2); index_all_samples(&mut model.tick16, &all_samples, 16); model } fn write_model(path: &Path, model: DrivingModel) -> Result<(), Box<dyn Error>> { let f = BufWriter::new(File::create(path)?); let mut e = GzEncoder::new(f, Compression::default()); Ok(serialize_into(&mut e, &model)?) } fn index_all_samples(indexed: &mut TransformationMap, all_samples: &[Vec<PlayerState>], num_ticks: usize) { for sample in all_samples { if sample.len() < sample::MIN_SAMPLE_LENGTH { println!("bad sample: {:?}", sample[0]); } let mut j = 0; let ratio = FPS as usize / sample::RECORD_FPS; while sample[j..].len() > num_ticks / ratio { let key = sample::normalized_player_rounded(&sample[j]); match indexed.entry(key) { Vacant(e) => { e.insert(PlayerTransformation::from_samples(&sample[j..], num_ticks)); } Occupied(mut e) => { let should_replace = { let existing_transformation = e.get(); let existing_delta_x = (existing_transformation.start_local_vx as f32 - sample::GROUND_SPEED_GRID_FACTOR * e.key().local_vx as f32) .abs(); let existing_delta_y = (existing_transformation.start_local_vy as f32 - sample::GROUND_SPEED_GRID_FACTOR * e.key().local_vy as f32) .abs(); let existing_delta = (existing_delta_x.powf(2.0) + existing_delta_y.powf(2.0)).sqrt(); let new_lv = sample[j].local_velocity(); let new_delta_x = (new_lv.x - sample::GROUND_SPEED_GRID_FACTOR * e.key().local_vx as f32).abs(); let new_delta_y = (new_lv.y - sample::GROUND_SPEED_GRID_FACTOR * e.key().local_vy as f32).abs(); let new_delta = (new_delta_x.powf(2.0) + new_delta_y.powf(2.0)).sqrt(); new_delta < existing_delta }; if should_replace { e.insert(PlayerTransformation::from_samples(&sample[j..], num_ticks)); } } }; j += 1; } } } fn main() -> Result<(), Box<dyn Error>> { let control_branches = [ "throttle_straight", "throttle_right", "throttle_left", "boost_straight", "boost_right", "boost_left", ]; let base_path = Path::new("./models/flat_ground"); create_dir_all(&base_path)?; for control_branch in control_branches.iter() { let model = build_model_for(control_branch); let filename = format!("{}.bincode.gz", control_branch); write_model(&base_path.join(filename), model)?; } Ok(()) }
extern crate bincode; extern crate flate2; extern crate predict; extern crate state; use bincode::serialize_into; use flate2::write::GzEncoder; use flate2::Compression; use predict::driving_model::{DrivingModel, PlayerTransformation, TransformationMap}; use predict::sample; use state::*; use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::error::Error; use std::fs::{create_dir_all, File}; use std::io::BufWriter; use std::path::Path; fn build_model_for(control_branch: &str) -> DrivingModel { let all_samples = sample::load_all_samples(&format!("./data/samples/flat_ground/{}/", control_branch)); let mut model = DrivingModel::default(); index_all_samples(&mut model.tick2, &all_samples, 2); index_all_samples(&mut model.tick16, &all_samples, 16); model } fn write_model(path: &Path, model: DrivingModel) -> Result<(), Box<dyn Error>> { let f = BufWriter::new(File::create(path)?); let mut e = GzEncoder::new(f, Compression::default()); Ok(serialize_into(&mut e, &model)?) } fn index_all_samples(indexed: &mut TransformationMap, all_samples: &[Vec<PlayerState>], num_ticks: usize) { for sample in all_samples { if sample.len() < sample::MIN_SAMPLE_LENGTH { println!("bad sample: {:?}", sample[0]); } let mut j = 0; let ratio = FPS as usize / sample::RECORD_FPS; while sample[j..].len() > num_ticks / ratio { let key = sample::normalized_player_rounded(&sample[j]); match indexed.entry(key) { Vacant(e) => { e.insert(PlayerTransformation::from_samples(&sample[j..], num_ticks)); } Occupied(mut e) => { let should_replace = { let existing_transformation = e.get(); let existing_delta_x = (existing_transformation.start_local_vx as f32 - sample::GROUND_SPEED_GRID_FACTOR * e.key().local_vx as f32) .abs();
let existing_delta = (existing_delta_x.powf(2.0) + existing_delta_y.powf(2.0)).sqrt(); let new_lv = sample[j].local_velocity(); let new_delta_x = (new_lv.x - sample::GROUND_SPEED_GRID_FACTOR * e.key().local_vx as f32).abs(); let new_delta_y = (new_lv.y - sample::GROUND_SPEED_GRID_FACTOR * e.key().local_vy as f32).abs(); let new_delta = (new_delta_x.powf(2.0) + new_delta_y.powf(2.0)).sqrt(); new_delta < existing_delta }; if should_replace { e.insert(PlayerTransformation::from_samples(&sample[j..], num_ticks)); } } }; j += 1; } } } fn main() -> Result<(), Box<dyn Error>> { let control_branches = [ "throttle_straight", "throttle_right", "throttle_left", "boost_straight", "boost_right", "boost_left", ]; let base_path = Path::new("./models/flat_ground"); create_dir_all(&base_path)?; for control_branch in control_branches.iter() { let model = build_model_for(control_branch); let filename = format!("{}.bincode.gz", control_branch); write_model(&base_path.join(filename), model)?; } Ok(()) }
let existing_delta_y = (existing_transformation.start_local_vy as f32 - sample::GROUND_SPEED_GRID_FACTOR * e.key().local_vy as f32) .abs();
assignment_statement
[ { "content": "fn percentile_value(numbers: &mut Vec<f32>, percentile: f32) -> f32 {\n\n numbers.sort_by(|a, b| a.partial_cmp(b).unwrap_or(Ordering::Less));\n\n let i = (percentile * numbers.len() as f32) as usize / 100;\n\n numbers[i]\n\n}\n\n\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 0, "score": 211561.96753803157 }, { "content": "fn write_data(path: &str, plan: Plan) -> Result<(), Box<dyn Error>> {\n\n let serializable_plan = SerializablePlan(plan);\n\n create_dir_all(&path)?;\n\n let file_path = Path::new(path).join(\"plan.bincode\");\n\n let f = BufWriter::new(File::create(file_path)?);\n\n let mut e = GzEncoder::new(f, Compression::default());\n\n Ok(serialize_into(&mut e, &serializable_plan)?)\n\n}\n\n\n", "file_path": "generate-data/src/main.rs", "rank": 1, "score": 198817.00637231107 }, { "content": "pub fn load_sample_file(path: &PathBuf) -> Vec<PlayerState> {\n\n let mut rdr = csv::ReaderBuilder::new()\n\n .has_headers(false)\n\n .from_reader(fs::File::open(path).unwrap_or_else(|_| panic!(\"File doesn't exist: {}\", path.to_string_lossy())));\n\n let data: Vec<PlayerState> = rdr\n\n .records()\n\n .map(|record| {\n\n let record = record.expect(\"CSV parse failed?\");\n\n let _frame: f32 = record\n\n .get(0)\n\n .expect(\"Invalid row?\")\n\n .parse()\n\n .expect(\"Can't convert time to f32\");\n\n PlayerState {\n\n position: Vector3::new(\n\n record.get(1).expect(\"Invalid row?\").parse().expect(\"Can't convert x to f32\"),\n\n record.get(2).expect(\"Invalid row?\").parse().expect(\"Can't convert y to f32\"),\n\n record.get(3).expect(\"Invalid row?\").parse().expect(\"Can't convert z to f32\"),\n\n ),\n\n velocity: Vector3::new(\n", "file_path": "predict/src/sample.rs", "rank": 2, "score": 190793.92016337195 }, { "content": "fn rms(numbers: &[f32]) -> f32 {\n\n let total_error_squared: f32 = numbers.iter().map(|x| x * x).sum();\n\n (total_error_squared / (numbers.len() as f32)).sqrt()\n\n}\n\n\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 3, "score": 187791.54601372126 }, { "content": "#[allow(dead_code)]\n\nfn restore_snapshot(rlbot: &rlbot::RLBot, bot: &mut BotState, frame: &mut u32, name: &str) -> Result<(), Box<dyn Error>> {\n\n let dir = \"data/snapshots\";\n\n let path = Path::new(dir).join(name.to_owned() + \".bincode.gz\");\n\n let f = BufReader::new(File::open(path)?);\n\n let mut decoder = GzDecoder::new(f);\n\n let (historical_game, historical_bot): (GameState, BotState) = bincode::deserialize_from(&mut decoder)?;\n\n\n\n // replace our bot data with the historical bot\n\n *bot = historical_bot;\n\n\n\n // update RL game state with historical game state\n\n *frame = historical_game.frame;\n\n let player = historical_game.player;\n\n let ball = historical_game.ball;\n\n let car_state = get_desired_car_state(&player);\n\n let ball_state = get_desired_ball_state(&ball);\n\n let desired_game_state = rlbot::DesiredGameState::new().car_state(0, car_state).ball_state(ball_state);\n\n Ok(rlbot.set_game_state(&desired_game_state)?)\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 4, "score": 187116.28041781217 }, { "content": "#[allow(non_snake_case)]\n\npub fn calculate_hit(ball: &BallState, player: &PlayerState, collision: &Vector3<f32>) -> Result<BallState, Box<dyn Error>> {\n\n let n1 = (collision - ball.position).normalize();\n\n\n\n let L_b = (collision - ball.position).cross_matrix();\n\n let L_c = (collision - player.position).cross_matrix();\n\n\n\n let player_rotation = player.rotation.to_rotation_matrix();\n\n let invI_c = player_rotation * (*CAR_INVERSE_INERTIA * player_rotation.transpose());\n\n\n\n let invM = ((1.0 / BALL_MASS) + (1.0 / CAR_MASS)) * na::Matrix3::identity()\n\n - ((L_b * L_b) / BALL_INERTIA)\n\n - (L_c * (invI_c * L_c));\n\n let M = invM.try_inverse().ok_or(\"M matrix inversion failed\")?;\n\n\n\n let delta_v = (player.velocity - (L_c * player.angular_velocity)) - (ball.velocity - (L_b * ball.angular_velocity));\n\n\n\n // compute the impulse that is consistent with an inelastic collision\n\n let J1 = M * delta_v;\n\n\n\n let J1_perp = J1.dot(&n1).min(-1.0f32) * n1;\n", "file_path": "predict/src/ball.rs", "rank": 5, "score": 175373.2154631068 }, { "content": "fn shoot<H: HeuristicModel>(model: &mut H, game: &GameState, bot: &mut BotState) -> PlanResult {\n\n let desired_ball_position: Vector3<f32> = opponent_goal_shoot_at(&game);\n\n let last_plan = None;\n\n // FIXME check if last plan is still valid before using this\n\n // let last_plan\n\n // if bot.last_action.is_some() && bot.last_action.as_ref().unwrap() == &Action::Shoot {\n\n // bot.plan.as_ref()\n\n // } else {\n\n // None\n\n // };\n\n let result = hit_ball(model, game, bot, &desired_ball_position, last_plan);\n\n bot.last_action = Some(Action::Shoot);\n\n result\n\n}\n\n\n", "file_path": "brain/src/play.rs", "rank": 6, "score": 174329.64113795568 }, { "content": "fn files<'a>(dir: &'a str) -> impl Iterator<Item = PathBuf> + 'a {\n\n WalkDir::new(dir)\n\n .into_iter()\n\n .filter_entry(|e| !is_hidden(e))\n\n .filter_map(|entry| {\n\n let entry = entry.unwrap();\n\n if entry.file_type().is_file() {\n\n Some(entry.path().to_owned())\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "predict/src/sample.rs", "rank": 7, "score": 174212.58094068783 }, { "content": "fn non_admissable_estimated_time<H: HeuristicModel>(model: &mut H, current: &PlayerState, ball: &BallState) -> f32 {\n\n // unreachable, we can't fly\n\n if ball.position.z - BALL_COLLISION_RADIUS > CAR_DIMENSIONS.z + CAR_OFFSET.z {\n\n return std::f32::MAX;\n\n }\n\n\n\n let mut single_heuristic_cost = [0.0];\n\n model\n\n .unscaled_heuristic(&[current.clone()], &mut single_heuristic_cost[0..1])\n\n .expect(\"Heuristic failed initial!\");\n\n unsafe { *single_heuristic_cost.get_unchecked(0) }\n\n}\n\n\n\n// TODO\n\n//fn shadow(game: &GameState) -> PlayerState {\n\n//}\n\n\n", "file_path": "brain/src/play.rs", "rank": 8, "score": 172663.29007675778 }, { "content": "#[allow(dead_code)]\n\nfn turn_plan(current: &PlayerState, angle: f32) -> Result<Plan, Box<dyn Error>> {\n\n let mut plan = vec![];\n\n let current_heading = current.rotation.to_rotation_matrix() * Vector3::new(-1.0, 0.0, 0.0);\n\n let desired_heading = Rotation3::from_euler_angles(0.0, 0.0, angle) * current_heading;\n\n let mut turn_controller = BrickControllerState::default();\n\n turn_controller.throttle = Throttle::Forward;\n\n turn_controller.steer = if angle < 0.0 { Steer::Right } else { Steer::Left };\n\n\n\n let mut straight_controller = BrickControllerState::default();\n\n straight_controller.throttle = Throttle::Forward;\n\n\n\n const TURN_DURATION: f32 = 16.0 * TICK;\n\n // straighten out for zero angular velocity at end, hopefully 16 ticks is enough?\n\n const STRAIGHT_DURATION: f32 = 16.0 * TICK;\n\n\n\n // iterate till dot product is maximized (ie we match the desired heading)\n\n let mut last_dot = std::f32::MIN;\n\n let mut player = current.clone();\n\n loop {\n\n let turn_player = predict::player::next_player_state(&player, &turn_controller, TURN_DURATION)?;\n", "file_path": "brick/src/main.rs", "rank": 9, "score": 170263.80498751812 }, { "content": "#[allow(dead_code)]\n\nfn forward_plan(current: &PlayerState, time: f32) -> Result<Plan, Box<dyn Error>> {\n\n let mut plan = vec![];\n\n\n\n let mut controller = BrickControllerState::default();\n\n controller.throttle = Throttle::Forward;\n\n\n\n let mut player = current.clone();\n\n let mut time_so_far = 0.0;\n\n while time_so_far < time {\n\n let step_duration = 16.0 * TICK;\n\n player = predict::player::next_player_state(&player, &controller, step_duration)?;\n\n plan.push((player.clone(), controller.clone(), step_duration));\n\n time_so_far += step_duration;\n\n }\n\n\n\n Ok(plan)\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 10, "score": 170263.80498751812 }, { "content": "/// main entrypoint for bot to figure out what to do given the current state\n\n// TODO we need to also include our current (ie previously used) strategy state as an input here,\n\n// and logic for expiring it if it's no longer applicable.\n\npub fn play<H: HeuristicModel>(model: &mut H, game: &GameState, bot: &mut BotState) -> PlanResult {\n\n match what_do(game) {\n\n Action::Shoot => shoot(model, game, bot),\n\n }\n\n}\n\n\n", "file_path": "brain/src/play.rs", "rank": 11, "score": 169660.26353005215 }, { "content": "fn bot_test_plan<H: brain::HeuristicModel>(model: &mut H, game: &GameState, bot: &mut BotState) -> PlanResult {\n\n // canned plans\n\n //\n\n // // let player = &game.player.lag_compensated_player(&bot.controller_history, LAG_FRAMES);\n\n // let player = PlayerState::default();\n\n //\n\n // let mut plan_result = if let Ok(plan) = snek_plan(&player) {\n\n // //for (i, (_next_player, controller, cost)) in plan.iter().enumerate() {\n\n // // println!(\"i: {}, steer: {:?}, steps: {}\", i, controller.steer, (cost / TICK).round() as i32);\n\n // //}\n\n // PlanResult {\n\n // plan: Some(plan),\n\n // cost_diff: 0.0,\n\n // visualization_lines: vec![],\n\n // visualization_points: vec![],\n\n // }\n\n // } else {\n\n // PlanResult {\n\n // plan: None,\n\n // cost_diff: 0.0,\n", "file_path": "brick/src/main.rs", "rank": 12, "score": 164581.71772103396 }, { "content": "pub fn csv_files<'a>(dir: &'a str) -> impl Iterator<Item = PathBuf> + 'a {\n\n files(dir).filter(|path| path.extension() == Some(OsStr::new(\"csv\")))\n\n}\n\n\n", "file_path": "predict/src/sample.rs", "rank": 13, "score": 163306.19112659086 }, { "content": "fn pd_adjust(input: &mut rlbot::ControllerState, errors: &VecDeque<f32>) {\n\n // build up some errors before we do anything\n\n if errors.len() <= DIFFERENTIAL_STEPS {\n\n return;\n\n }\n\n let last_error = errors[errors.len() - 1];\n\n let error_slope = (last_error - errors[errors.len() - 1 - DIFFERENTIAL_STEPS]) / DIFFERENTIAL_STEPS as f32;\n\n //println!(\n\n // \"last_error: {:?}, error_slope: {:?}\",\n\n // last_error, error_slope\n\n //); // TODO normalize slope to speed!\n\n let proportional_signal = PROPORTIONAL_DIST_GAIN * last_error;\n\n let differential_signal = DIFFERENTIAL_GAIN * error_slope;\n\n let signal = proportional_signal + differential_signal;\n\n //println!(\n\n // \"signal: {}, p: {}, d: {}\",\n\n // signal, proportional_signal, differential_signal\n\n //);\n\n input.steer += signal;\n\n\n", "file_path": "brain/src/play.rs", "rank": 14, "score": 160683.25878978104 }, { "content": "pub fn load_all_samples(dir: &str) -> Vec<Vec<PlayerState>> {\n\n csv_files(dir).map(|f| load_sample_file(&f)).collect()\n\n}\n\n\n", "file_path": "predict/src/sample.rs", "rank": 15, "score": 159547.10458973018 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let args = Docopt::new(USAGE).and_then(|dopt| dopt.parse()).unwrap_or_else(|e| e.exit());\n\n\n\n let test_bot = args.get_bool(\"--bot-test\");\n\n if args.get_bool(\"--bot\") || test_bot {\n\n thread::spawn(move || loop {\n\n let t = thread::spawn(move || {\n\n if test_bot {\n\n panic::catch_unwind(run_bot_test).expect(\"Panic catch unwind failed\");\n\n } else {\n\n panic::catch_unwind(run_bot).expect(\"Panic catch unwind failed\");\n\n }\n\n });\n\n t.join()\n\n .expect_err(\"The bot thread should only end if panic, but it didn't panic.\");\n\n println!(\"XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\");\n\n println!(\"XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\");\n\n println!(\"XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\");\n\n println!(\"XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\");\n\n println!(\"XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\");\n", "file_path": "brick/src/main.rs", "rank": 16, "score": 143380.21652448294 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let rlbot = rlbot::init()?;\n\n\n\n let _batmobile = rlbot::PlayerLoadout::new().car_id(803);\n\n let fennec = rlbot::PlayerLoadout::new().car_id(4284);\n\n let mut settings = rlbot::MatchSettings::new().player_configurations(vec![rlbot::PlayerConfiguration::new(\n\n rlbot::PlayerClass::RLBotPlayer,\n\n \"Recorder\",\n\n 0,\n\n )\n\n .loadout(fennec)]);\n\n\n\n settings.mutator_settings = rlbot::MutatorSettings::new()\n\n .match_length(rlbot::MatchLength::Unlimited)\n\n .boost_option(rlbot::BoostOption::Unlimited_Boost);\n\n\n\n rlbot.start_match(&settings)?;\n\n rlbot.wait_for_match_start()?;\n\n\n\n // set initial state\n", "file_path": "record/src/main.rs", "rank": 17, "score": 143380.21652448288 }, { "content": "#[allow(dead_code)]\n\nfn record_snapshot(game: &GameState, bot: &BotState) -> Result<(), Box<dyn Error>> {\n\n let dir = \"data/snapshots\";\n\n create_dir_all(dir)?;\n\n\n\n let file_path = SNAPSHOT_NUMBER.with(|num| {\n\n let mut path;\n\n loop {\n\n path = Path::new(dir).join(format!(\"snapshot{}.bincode.gz\", *num.borrow()));\n\n if !path.exists() {\n\n break;\n\n }\n\n (*num.borrow_mut()) += 1;\n\n }\n\n path\n\n });\n\n let f = BufWriter::new(File::create(file_path)?);\n\n let mut e = GzEncoder::new(f, Compression::default());\n\n Ok(bincode::serialize_into(&mut e, &(game, bot))?)\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 18, "score": 143040.5615297909 }, { "content": "#[allow(dead_code)]\n\nfn square_plan(player: &PlayerState) -> Result<Plan, Box<dyn Error>> {\n\n let mut plan = vec![];\n\n plan.push((player.clone(), BrickControllerState::default(), 0.0));\n\n for _ in 0..4 {\n\n let mut plan_part = forward_plan(&plan[plan.len() - 1].0, 1000.0)?;\n\n plan.append(&mut plan_part);\n\n let mut plan_part = turn_plan(&plan[plan.len() - 1].0, -PI / 2.0)?;\n\n plan.append(&mut plan_part);\n\n }\n\n\n\n Ok(plan)\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 19, "score": 141725.95846020436 }, { "content": "#[allow(dead_code)]\n\nfn snek_plan(player: &PlayerState) -> Result<Plan, Box<dyn Error>> {\n\n let mut plan = vec![];\n\n plan.push((player.clone(), BrickControllerState::default(), 0.0));\n\n\n\n let mut plan_part = forward_plan(&plan[plan.len() - 1].0, 500.0)?;\n\n plan.append(&mut plan_part);\n\n for _ in 0..2 {\n\n let mut plan_part = turn_plan(&plan[plan.len() - 1].0, PI / 6.0)?;\n\n plan.append(&mut plan_part);\n\n let mut plan_part = turn_plan(&plan[plan.len() - 1].0, -PI / 6.0)?;\n\n plan.append(&mut plan_part);\n\n }\n\n\n\n Ok(plan)\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 20, "score": 141725.95846020436 }, { "content": "#[allow(dead_code, clippy::same_item_push)]\n\nfn snek_plan2(current: &PlayerState) -> Result<Plan, Box<dyn Error>> {\n\n let mut plan = vec![];\n\n let mut player = current.clone();\n\n plan.push((player.clone(), BrickControllerState::default(), 0.0));\n\n\n\n let mut controller = BrickControllerState::default();\n\n controller.throttle = Throttle::Forward;\n\n for _ in 0..4 {\n\n player = predict::player::next_player_state(&player, &controller, 16.0 * TICK)?;\n\n plan.push((player.clone(), controller.clone(), 16.0 * TICK));\n\n }\n\n\n\n for _ in 0..2 {\n\n controller.steer = Steer::Left;\n\n for _ in 0..4 {\n\n player = predict::player::next_player_state(&player, &controller, 16.0 * TICK)?;\n\n plan.push((player.clone(), controller.clone(), 16.0 * TICK));\n\n }\n\n\n\n controller.steer = Steer::Right;\n\n for _ in 0..4 {\n\n player = predict::player::next_player_state(&player, &controller, 16.0 * TICK)?;\n\n plan.push((player.clone(), controller.clone(), 16.0 * TICK));\n\n }\n\n }\n\n\n\n Ok(plan)\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 21, "score": 141725.95846020436 }, { "content": "#[allow(dead_code)]\n\nfn snek_plan3(current: &PlayerState) -> Result<Plan, Box<dyn Error>> {\n\n let mut plan = vec![];\n\n let mut player = current.clone();\n\n plan.push((player.clone(), BrickControllerState::default(), 0.0));\n\n\n\n let mut controller = BrickControllerState::default();\n\n controller.throttle = Throttle::Forward;\n\n\n\n let get_steer = |y| -> Steer {\n\n if y < 400.0 {\n\n Steer::Straight\n\n } else if y < 800.0 {\n\n Steer::Left\n\n } else if y < 1200.0 {\n\n Steer::Right\n\n } else if y < 1600.0 {\n\n Steer::Left\n\n } else if y < 2000.0 {\n\n Steer::Right\n\n } else {\n", "file_path": "brick/src/main.rs", "rank": 22, "score": 141725.95846020436 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n // fewer than normal threads, so some cores are left free for the computer user...\n\n rayon::ThreadPoolBuilder::new().num_threads(10).build_global().unwrap();\n\n\n\n let desired_ball_position: Vector3<f32> = brain::play::opponent_goal_shoot_at(&GameState::default());\n\n let ball = BallState::default();\n\n let desired_contact = brain::play::simple_desired_contact(&ball, &desired_ball_position);\n\n\n\n let config = SearchConfig {\n\n step_duration: 16.0 * TICK,\n\n slop: 40.0,\n\n max_cost: 10.0,\n\n max_iterations: 10_000_000, // allow more iterations before giving up\n\n scale_heuristic: 1.0,\n\n custom_filter: Some(|_| true), // ignore bounds\n\n };\n\n\n\n let max_speed_r = (MAX_BOOST_SPEED / SPEED_FACTOR).round() as i32;\n\n // TODO negative vy\n\n //(-max_speed_r..=max_speed_r).into_par_iter().for_each(|speed_r| {\n", "file_path": "generate-data/src/main.rs", "rank": 23, "score": 140338.8565583713 }, { "content": "#[allow(dead_code)]\n\nfn offset_forward_plan(current: &PlayerState) -> Result<Plan, Box<dyn Error>> {\n\n let mut offset_player = current.clone();\n\n let heading = offset_player.rotation.to_rotation_matrix() * Vector3::new(-1.0, 0.0, 0.0);\n\n let clockwise_90_rotation = Rotation3::from_euler_angles(0.0, 0.0, -PI / 2.0);\n\n let right = clockwise_90_rotation * heading;\n\n offset_player.position += 200.0 * right;\n\n\n\n forward_plan(&offset_player, 4000.0)\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 24, "score": 138902.14923979476 }, { "content": "fn update_bot_state(game: &GameState, bot: &mut BotState, plan_result: &PlanResult) {\n\n if let Some(ref new_plan) = plan_result.plan {\n\n if let Some(ref existing_plan) = bot.plan {\n\n let new_plan_cost = new_plan.iter().map(|(_, _, cost)| cost).sum::<f32>();\n\n\n\n let closest_index = brain::play::closest_plan_index(&game.player, &existing_plan);\n\n let existing_plan_cost = existing_plan\n\n .iter()\n\n .enumerate()\n\n .filter(|(index, _val)| *index > closest_index)\n\n .map(|(_index, (_, _, cost))| cost)\n\n .sum::<f32>();\n\n\n\n // bail, we got a worse plan!\n\n if new_plan_cost >= existing_plan_cost && plan_is_valid(&game, &existing_plan) {\n\n //println!(\"bailing longer plan! existing_plan_cost: {}, new_plan_cost: {}\", existing_plan_cost, new_plan_cost);\n\n return;\n\n }\n\n\n\n //let existing_diff = bot.cost_diff.abs();\n", "file_path": "brick/src/main.rs", "rank": 25, "score": 136144.51328504024 }, { "content": "fn interpolate_scalar(start: f32, end: f32, factor: f32) -> f32 {\n\n (1.0 - factor) * start + factor * end\n\n}\n\n\n", "file_path": "predict/src/player.rs", "rank": 26, "score": 135997.99627517667 }, { "content": "// source: https://github.com/samuelpmish/RLUtilities/blob/master/src/simulation/ball.cc#L90\n\nfn psyonix_scale_impulse(val: f32) -> f32 {\n\n if val <= 500.0 {\n\n 0.65\n\n } else if val <= 2300.0 {\n\n 0.65 - 0.1 * (val - 500.0) / (2300.0 - 500.0)\n\n } else {\n\n 0.55 - 0.25 * (val - 2300.0) / (4600.0 - 2300.0)\n\n }\n\n}\n\n\n\n/// calculates ball state after collision with player. caller of this function must ensure there\n\n/// actually *is* a collision between the ball and player, otherwise the result of this is\n\n/// unpredictable\n", "file_path": "predict/src/ball.rs", "rank": 27, "score": 135555.98017042995 }, { "content": "// source: https://www.youtube.com/watch?v=9uh8-nBlufM\n\nfn next_ball_state_rolling(ball: &BallState, time_step: f32) -> BallState {\n\n let mut next = ball.clone();\n\n\n\n let acceleration;\n\n if ball.velocity.norm() > SLIDING_SPEED {\n\n let heading = ball.velocity / ball.velocity.norm();\n\n acceleration = -SLIDING_DECELERATION * heading - AIR_RESISTANCE * next.velocity;\n\n } else {\n\n acceleration = -ROLLING_RESISTANCE * next.velocity\n\n }\n\n\n\n next.position += time_step * next.velocity;\n\n next.velocity += time_step * acceleration;\n\n\n\n // hard-coding certain values\n\n next.velocity.z = 0.0;\n\n next.position.z = BALL_COLLISION_RADIUS;\n\n\n\n next\n\n}\n\n\n", "file_path": "predict/src/ball.rs", "rank": 28, "score": 133684.07069944148 }, { "content": "fn next_ball_state_soaring(ball: &BallState, time_step: f32) -> BallState {\n\n let mut next;\n\n\n\n if let Some(normal) = arena_contact_normal(&ball) {\n\n if na::Matrix::dot(&ball.velocity, &normal) < 0.0 {\n\n // we're going towards the arena contact, so let's bounce\n\n next = calculate_bounce(&ball, &normal);\n\n } else {\n\n // already bounced\n\n next = ball.clone();\n\n }\n\n } else {\n\n next = ball.clone();\n\n }\n\n\n\n // FIXME rl utils uses prev velocity instead of next velocity, we should too?\n\n let acceleration = Vector3::new(0.0, 0.0, -GRAVITY) - AIR_RESISTANCE * next.velocity;\n\n\n\n if next.velocity.norm() > BALL_MAX_SPEED {\n\n next.velocity = next.velocity.normalize() * BALL_MAX_SPEED;\n", "file_path": "predict/src/ball.rs", "rank": 29, "score": 133684.07069944148 }, { "content": "fn draw_lines(rlbot: &rlbot::RLBot, lines: &[Line], chunk_num: &mut i32) -> Result<(), Box<dyn Error>> {\n\n for chunk in lines.chunks(200) {\n\n // TODO in case of multiple bricks, add player index * 1000 to the group id\n\n let mut group = rlbot.begin_render_group(VISUALIZATION_GROUP_ID + *chunk_num);\n\n\n\n for &l in chunk.iter() {\n\n let p1 = l.0;\n\n let p2 = l.1;\n\n let p3 = l.2;\n\n let color = group.color_rgb((255.0 * p3.x) as u8, (255.0 * p3.y) as u8, (255.0 * p3.z) as u8);\n\n group.draw_line_3d((-p1.x, p1.y, p1.z), (-p2.x, p2.y, p2.z), color);\n\n }\n\n\n\n group.render()?;\n\n\n\n *chunk_num += 1;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 30, "score": 133070.9461525139 }, { "content": "pub fn next_ball_state(ball: &BallState, time_step: f32) -> BallState {\n\n match find_prediction_category(&ball) {\n\n PredictionCategory::Soaring => next_ball_state_soaring(&ball, time_step),\n\n PredictionCategory::Rolling => next_ball_state_rolling(&ball, time_step),\n\n }\n\n}\n\n\n", "file_path": "predict/src/ball.rs", "rank": 31, "score": 131631.0769737587 }, { "content": "// using the current player state as a starting point, apply the plan\n\n// FIXME unexplode plan first and calculate using those longer durations for more accurate\n\n// values, then explode again\n\n// TODO for even more accuracy:\n\n// - simulate following the plan while not exactly on it\n\n// - simulate turn error correction\n\nfn adjusted_plan(player: &PlayerState, plan: Plan) -> Result<Plan, Box<dyn Error>> {\n\n let mut adjusted = Vec::with_capacity(plan.len());\n\n adjusted.push((player.clone(), BrickControllerState::default(), 0.0));\n\n let mut last_player: PlayerState = player.clone();\n\n plan.iter().filter(|(_, _, cost)| *cost > 0.0).map(|(_, controller, cost): &(PlayerState, BrickControllerState, f32)| -> Result<(PlayerState, BrickControllerState, f32), Box<dyn Error>> {\n\n let next_player = predict::player::next_player_state(&last_player, &controller, *cost);\n\n if next_player.is_err() {\n\n // print to stderr now since we're swallowing these errors right after this\n\n eprintln!(\"Warning: failed to adjust plan: {}\", next_player.as_ref().unwrap_err());\n\n }\n\n last_player = next_player?;\n\n Ok((last_player.clone(), controller.clone(), *cost))\n\n }).filter_map(Result::ok).for_each(|plan_val| {\n\n adjusted.push(plan_val);\n\n });\n\n\n\n Ok(adjusted)\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 32, "score": 131146.91554685327 }, { "content": "fn stitch_with_current_plan(game: &GameState, bot: &BotState, plan_result: &mut PlanResult) {\n\n if let Some(last_plan) = adjusted_plan_or_fallback(&game, bot) {\n\n if let Some(plan) = &plan_result.plan {\n\n let closest_index_now = brain::play::closest_plan_index(&game.player, &last_plan);\n\n let closest_index_plan = brain::play::closest_plan_index(&plan[0].0, &last_plan);\n\n\n\n if closest_index_plan < closest_index_now {\n\n // we had NOT compensated enough for the logic lag\n\n eprintln!(\"Aborting plan stitching due to inadequate logic lag compensation\");\n\n return;\n\n }\n\n\n\n // we had compensated enough for the logic lag, now adjust the plan\n\n let mut stitched_plan = last_plan\n\n .iter()\n\n .cloned()\n\n .skip(closest_index_now)\n\n .take(closest_index_plan - closest_index_now)\n\n .collect::<Vec<_>>();\n\n stitched_plan.extend(plan.clone());\n", "file_path": "brick/src/main.rs", "rank": 33, "score": 130584.05703337654 }, { "content": "/// factor: number from 0.0 to 1.0 for interpolation between start and end, 0.0 being 100% at\n\n/// start, 1.0 being 100% at end. Note that this actually also handles factors outside the 0.0 to\n\n/// 1.0 range, in which case it's a linear extrapolation\n\nfn interpolate(start: Vector3<f32>, end: Vector3<f32>, factor: f32) -> Vector3<f32> {\n\n (1.0 - factor) * start + factor * end\n\n}\n\n\n", "file_path": "predict/src/player.rs", "rank": 34, "score": 127672.58337431183 }, { "content": "fn compare<'a>(controller: BrickControllerState, all_samples: impl Iterator<Item = &'a Vec<PlayerState>>) {\n\n let mut position_errors = vec![];\n\n let mut velocity_errors = vec![];\n\n let mut avz_errors = vec![];\n\n let mut max_position_error = 0.0;\n\n let mut max_velocity_error = 0.0;\n\n let mut max_avz_error = 0.0;\n\n\n\n for full_sample in all_samples {\n\n let mut i = 0;\n\n // offset by 32 frames to ensure minimum 32 frames of simulation ahead in the slice\n\n while full_sample[i..].len() > NUM_TICKS {\n\n let sample = &full_sample[i..];\n\n i += 1;\n\n\n\n let player_start = &sample[0];\n\n let player_end = &sample[NUM_TICKS];\n\n\n\n // we can miss data at the edges, or not be able to extrapolate at the edges. ignore for now\n\n // TODO remove all these checks\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 35, "score": 127625.76234056253 }, { "content": "pub fn ball_trajectory(ball: &BallState, duration: f32) -> Vec<BallState> {\n\n let mut t = 0.0;\n\n let mut trajectory = Vec::with_capacity((duration / TICK).ceil() as usize);\n\n let mut ball_now = ball.clone();\n\n trajectory.push(ball_now);\n\n while t < duration {\n\n t += TICK;\n\n ball_now = next_ball_state(trajectory.last().unwrap(), TICK);\n\n trajectory.push(ball_now);\n\n }\n\n trajectory\n\n}\n\n\n", "file_path": "predict/src/ball.rs", "rank": 36, "score": 126973.54273017513 }, { "content": "// FIXME lot more going in the rl utils now, probably helps handle more types of bounces:\n\n// https://github.com/samuelpmish/RLUtilities/blob/master/src/simulation/ball.cc#L36\n\nfn calculate_bounce(ball: &BallState, normal: &Unit<Vector3<f32>>) -> BallState {\n\n let mut bounced = (*ball).clone();\n\n\n\n let v_perp = na::Matrix::dot(&ball.velocity, &normal.into_inner()) * normal.into_inner();\n\n let v_para = ball.velocity - v_perp;\n\n let v_spin = BALL_COLLISION_RADIUS * normal.cross(&ball.angular_velocity); // velocity of edge of ball, relative to ball center\n\n let s = v_para + v_spin; // this is the velocity at point of impact (edge of ball) in global coords\n\n\n\n let ratio = v_perp.norm() / s.norm();\n\n\n\n let delta_v_perp = -(1.0 + RESTITUTION) * v_perp;\n\n let delta_v_para = -f32::min(1.0, Y * ratio) * MU * s;\n\n\n\n bounced.velocity += delta_v_perp + delta_v_para;\n\n bounced.angular_velocity += A * BALL_COLLISION_RADIUS * delta_v_para.cross(&normal);\n\n\n\n bounced\n\n}\n\n\n", "file_path": "predict/src/ball.rs", "rank": 37, "score": 126973.54273017513 }, { "content": "#[test]\n\nfn test_throttle_straight() {\n\n let mut controller = BrickControllerState::default();\n\n controller.throttle = Throttle::Forward;\n\n let all_samples = predict::sample::THROTTLE_STRAIGHT_ALL.iter();\n\n compare(controller, all_samples);\n\n}\n\n\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 38, "score": 126776.07604328476 }, { "content": "#[test]\n\nfn test_boost_left() {\n\n let mut controller = BrickControllerState::default();\n\n controller.steer = Steer::Left;\n\n controller.boost = true;\n\n let all_samples = predict::sample::BOOST_LEFT_ALL.iter();\n\n compare(controller, all_samples);\n\n}\n\n\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 39, "score": 126776.07604328476 }, { "content": "#[test]\n\nfn test_throttle_right() {\n\n let mut controller = BrickControllerState::default();\n\n controller.steer = Steer::Right;\n\n controller.throttle = Throttle::Forward;\n\n let all_samples = predict::sample::THROTTLE_RIGHT_ALL.iter();\n\n compare(controller, all_samples);\n\n}\n\n\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 40, "score": 126776.07604328476 }, { "content": "#[test]\n\nfn test_boost_straight() {\n\n let mut controller = BrickControllerState::default();\n\n controller.boost = true;\n\n let all_samples = predict::sample::BOOST_STRAIGHT_ALL.iter();\n\n compare(controller, all_samples);\n\n}\n\n\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 41, "score": 126776.07604328476 }, { "content": "#[test]\n\nfn test_boost_right() {\n\n let mut controller = BrickControllerState::default();\n\n controller.steer = Steer::Right;\n\n controller.boost = true;\n\n let all_samples = predict::sample::BOOST_RIGHT_ALL.iter();\n\n compare(controller, all_samples);\n\n}\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 42, "score": 126776.07604328476 }, { "content": "#[test]\n\nfn test_throttle_left() {\n\n let mut controller = BrickControllerState::default();\n\n controller.steer = Steer::Left;\n\n controller.throttle = Throttle::Forward;\n\n let all_samples = predict::sample::THROTTLE_LEFT_ALL.iter();\n\n compare(controller, all_samples);\n\n}\n\n\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 43, "score": 126776.07604328476 }, { "content": "// rip-off of: https://github.com/samuelpmish/RLUtilities/blob/master/src/simulation/ball.cc#L82\n\npub fn closest_point_for_collision(ball: &BallState, player: &PlayerState) -> Vector3<f32> {\n\n let mut local_pos = player.rotation.to_rotation_matrix().inverse() * (ball.position - player.hitbox_center());\n\n local_pos.x = na::clamp(local_pos.x, -CAR_DIMENSIONS.x / 2.0, CAR_DIMENSIONS.x / 2.0);\n\n local_pos.y = na::clamp(local_pos.y, -CAR_DIMENSIONS.y / 2.0, CAR_DIMENSIONS.y / 2.0);\n\n local_pos.z = na::clamp(local_pos.z, -CAR_DIMENSIONS.z / 2.0, CAR_DIMENSIONS.z / 2.0);\n\n player.hitbox_center() + player.rotation.to_rotation_matrix() * local_pos\n\n}\n\n\n", "file_path": "predict/src/player.rs", "rank": 44, "score": 124306.60083388686 }, { "content": "fn round_player_state(player: &PlayerState, step_duration: f32, speed: f32) -> RoundedPlayerState {\n\n // we're using the rounded speed to determine the grid size. we want a good bit of tolerance for\n\n // this, if we relax the rounded velocity equality check. or some other logic that will ensure\n\n // same grid for different player states that we want to match\n\n let rounding_factor = 1.0; // TODO tune. for both correctness AND speed!\n\n let mut rounded_speed = (speed / rounding_factor).round();\n\n if rounded_speed == 0.0 {\n\n rounded_speed = 0.5;\n\n }\n\n let rounded_speed = rounded_speed * rounding_factor;\n\n\n\n let grid_size = step_duration * rounded_speed;\n\n //let velocity_margin = 250.0; // TODO tune\n\n let (_roll, _pitch, yaw) = player.rotation.euler_angles();\n\n\n\n RoundedPlayerState {\n\n // TODO we could have individual grid sizes for x/y/z based on vx/vy/vz. not sure it's\n\n // worth it.\n\n x: (grid_size * (player.position.x / grid_size).round()) as i16,\n\n y: (grid_size * (player.position.y / grid_size).round()) as i16,\n", "file_path": "brain/src/plan.rs", "rank": 45, "score": 123786.24154692181 }, { "content": "pub fn index_all_samples<'a>(all_samples: &'a [Vec<PlayerState>]) -> SampleMap<'a> {\n\n let mut indexed = SampleMap::default();\n\n\n\n for sample in all_samples {\n\n if sample.len() < MIN_SAMPLE_LENGTH {\n\n println!(\"bad sample: {:?}\", sample[0]);\n\n }\n\n\n\n let mut j = 0;\n\n\n\n while sample[j..].len() >= MIN_SAMPLE_LENGTH {\n\n let key = normalized_player_rounded(&sample[j]);\n\n\n\n match indexed.entry(key) {\n\n Vacant(e) => {\n\n e.insert(&sample[j..]);\n\n }\n\n Occupied(mut e) => {\n\n // replace the sample in case we have one closer to the intended normalized\n\n // velocity value\n", "file_path": "predict/src/sample.rs", "rank": 46, "score": 121820.00587066825 }, { "content": "fn move_ball_out_of_the_way(rlbot: &rlbot::RLBot) -> Result<(), Box<dyn Error>> {\n\n let position = rlbot::Vector3Partial::new().x(3800.0).y(4800.0).z(98.0);\n\n\n\n let physics = rlbot::DesiredPhysics::new().location(position);\n\n\n\n let ball_state = rlbot::DesiredBallState::new().physics(physics);\n\n\n\n let desired_game_state = rlbot::DesiredGameState::new().ball_state(ball_state);\n\n\n\n rlbot.set_game_state(&desired_game_state)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "record/src/main.rs", "rank": 47, "score": 117982.18694526449 }, { "content": "#[allow(dead_code)]\n\nfn move_ball_out_of_the_way(rlbot: &rlbot::RLBot) -> Result<(), Box<dyn Error>> {\n\n let position = rlbot::Vector3Partial::new().x(3800.0).y(4800.0).z(98.0);\n\n let physics = rlbot::DesiredPhysics::new().location(position);\n\n let ball_state = rlbot::DesiredBallState::new().physics(physics);\n\n let desired_game_state = rlbot::DesiredGameState::new().ball_state(ball_state);\n\n rlbot.set_game_state(&desired_game_state)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 48, "score": 117982.18694526449 }, { "content": "/// returns normal at contact point if ball is currently colliding with the arena\n\npub fn arena_contact_normal(ball: &BallState) -> Option<Unit<Vector3<f32>>> {\n\n let sphere = ncollide::shape::Ball::new(BALL_COLLISION_RADIUS);\n\n let ball_pos = Isometry3::new(ball.position, na::zero()); // TODO if we want to handle cube ball, track and pass on the rotation\n\n let arena_pos = Isometry3::new(na::zero(), na::zero());\n\n\n\n let margin = 0.0;\n\n let contact = ncollide::query::contact(&arena_pos, &(*ARENA), &ball_pos, &sphere, margin);\n\n\n\n contact.map(|c| c.normal)\n\n}\n\n\n", "file_path": "predict/src/ball.rs", "rank": 49, "score": 117457.24844638104 }, { "content": "fn squared_distance(a: f32, b: f32) -> f32 {\n\n (b - a).powf(2.0)\n\n}\n\n\n\npub(crate) fn knn_distance(a: &[f32], b: &[f32]) -> f32 {\n\n squared_distance(a[0], b[0]) // x\n\n + squared_distance(a[1], b[1]) // y\n\n // FIXME we don't set these in generate-data // + SCALE_VELOCITY_DISTANCE * squared_distance(a[2], b[2]) // vx\n\n + SCALE_VELOCITY_DISTANCE * squared_distance(a[3], b[3]) // vy\n\n + scaled_circular_distance(a[4], b[4]).powf(2.0) // yaw\n\n}\n\n\n\nimpl KnnHeuristic {\n\n pub fn try_new(path: &str) -> Result<Self, Box<dyn Error>> {\n\n let mut tree = KdTree::new(KNN_DIMENSIONS);\n\n\n\n let mut rdr = csv::ReaderBuilder::new().has_headers(false).from_reader(File::open(path)?);\n\n\n\n for record in rdr.records() {\n\n let record = record?;\n", "file_path": "heuristic/src/knn.rs", "rank": 50, "score": 117071.44103866865 }, { "content": "// +PI and -PI are the same angle, so the distance needs to take that into account!\n\nfn scaled_circular_distance(a: f32, b: f32) -> f32 {\n\n SCALE_CIRCULAR_DISTANCE * (a - b).abs().min(2.0 * PI + a - b).min(2.0 * PI + b - a)\n\n}\n\n\n", "file_path": "heuristic/src/knn.rs", "rank": 51, "score": 115256.24647769672 }, { "content": "// scale to [0, 1]\n\nfn scale(val: f32, min: f32, max: f32) -> f32 {\n\n (val - min) / (max - min)\n\n}\n\n\n", "file_path": "heuristic/src/neural.rs", "rank": 52, "score": 114744.14763777424 }, { "content": "/// updates our game state, which is a representation of the packet/ticket, but with our own data\n\n/// types etc\n\npub fn update_game_state(game_state: &mut GameState, tick: &rlbot::GameTickPacket, player_index: usize, frame: u32) {\n\n let ball = tick.ball.as_ref().expect(\"Missing ball\");\n\n let players = &tick.players;\n\n let player = players.get(player_index).expect(\"Missing player\");\n\n\n\n let bp = &ball.physics;\n\n let bl = &bp.location;\n\n let bv = &bp.velocity;\n\n let bav = &bp.angular_velocity;\n\n game_state.ball.position = Vector3::new(-bl.x, bl.y, bl.z); // x should be positive towards right, it only makes sense\n\n game_state.ball.velocity = Vector3::new(-bv.x, bv.y, bv.z); // x should be positive towards right, it only makes sense\n\n game_state.ball.angular_velocity = Vector3::new(-bav.x, bav.y, bav.z); // x should be positive towards right, it only makes sense\n\n\n\n let pp = &player.physics;\n\n let pl = &pp.location;\n\n let pv = &pp.velocity;\n\n let pav = &pp.angular_velocity;\n\n let pr = &pp.rotation;\n\n game_state.player.position = Vector3::new(-pl.x, pl.y, pl.z); // x should be positive towards right, it only makes sense\n\n game_state.player.velocity = Vector3::new(-pv.x, pv.y, pv.z); // x should be positive towards right, it only makes sense\n", "file_path": "state/src/lib.rs", "rank": 53, "score": 114361.92730102448 }, { "content": "pub fn normalized_player_rounded(player: &PlayerState) -> NormalizedPlayerState {\n\n let lv = player.local_velocity();\n\n NormalizedPlayerState {\n\n local_vx: (lv.x / GROUND_SPEED_GRID_FACTOR).round() as i16,\n\n local_vy: (lv.y / GROUND_SPEED_GRID_FACTOR).round() as i16,\n\n avz: (player.angular_velocity.z / GROUND_AVZ_GRID_FACTOR).round() as i16,\n\n }\n\n}\n\n\n", "file_path": "predict/src/sample.rs", "rank": 54, "score": 112433.1908980073 }, { "content": "// NOTE min/max values must match those used when scaling training data!\n\nfn scale_pos(val: f32) -> f32 {\n\n scale(val, -10_000.0, 10_000.0)\n\n}\n", "file_path": "heuristic/src/neural.rs", "rank": 55, "score": 112205.81032495532 }, { "content": "fn scale_avel(val: f32) -> f32 {\n\n scale(val, -6.0, 6.0)\n\n}\n", "file_path": "heuristic/src/neural.rs", "rank": 56, "score": 112198.88311425704 }, { "content": "fn scale_rot(val: f32) -> f32 {\n\n scale(val, -3.2, 3.2)\n\n}\n\n\n\nimpl HeuristicModel for NeuralHeuristic {\n\n fn unscaled_heuristic(&mut self, players: &[PlayerState], costs: &mut [f32]) -> Result<(), Box<dyn Error>> {\n\n let mut players_tensor = Tensor::new(&[players.len() as u64, 12u64]);\n\n for (i, player) in players.iter().enumerate() {\n\n let offset = i * 12;\n\n // FIXME use normalization rotation\n\n let pos = player.position; //self.normalization_rotation * (player.position - self.ball_position);\n\n players_tensor[offset + 0] = scale_pos(pos.x);\n\n players_tensor[offset + 1] = scale_pos(pos.y);\n\n players_tensor[offset + 2] = scale_pos(pos.z);\n\n\n\n // FIXME use normalization rotation\n\n let vel = player.velocity; //self.normalization_rotation * player.velocity;\n\n players_tensor[offset + 3] = scale_vel(vel.x);\n\n players_tensor[offset + 4] = scale_vel(vel.y);\n\n players_tensor[offset + 5] = scale_vel(vel.z);\n", "file_path": "heuristic/src/neural.rs", "rank": 57, "score": 112198.88311425704 }, { "content": "fn scale_vel(val: f32) -> f32 {\n\n scale(val, -2300.0, 2300.0)\n\n}\n", "file_path": "heuristic/src/neural.rs", "rank": 58, "score": 112198.88311425704 }, { "content": "/// modifies the plan to use finer-grained steps\n\npub fn explode_plan(plan: &Option<Plan>) -> Result<Option<Plan>, Box<dyn Error>> {\n\n if let Some(ref plan) = plan {\n\n if plan.get(0).is_none() {\n\n return Ok(None);\n\n }\n\n let mut exploded_plan = Vec::with_capacity(plan.len()); // will be at least this long\n\n exploded_plan.push(plan[0].clone());\n\n\n\n // for every plan segment, we expand within that segment. using small ticks repeated causes\n\n // the errors to accumulate rapidly, so we start over with each plan segment starting with\n\n // a more accurate base value\n\n for i in 1..plan.len() {\n\n let num_ticks = (plan[i].2 / TICK).round() as i32;\n\n let num_steps = num_ticks / TICKS_PER_STEP;\n\n #[allow(clippy::modulo_one)]\n\n let remaining_ticks = num_ticks % TICKS_PER_STEP;\n\n\n\n // NOTE since the controller value in each plan tuple is the controller that needs to\n\n // be applied to the *previous* player state in order to reach the player state in the\n\n // current tuple, we get the player from the period index and apply the controller from\n", "file_path": "brain/src/plan.rs", "rank": 59, "score": 108663.535782413 }, { "content": "/// tuple of (translation, acceleration, angular_acceleration, rotation)\n\ntype PlayerPrediction = (Vector3<f32>, Vector3<f32>, Vector3<f32>, Rotation3<f32>);\n\n\n", "file_path": "predict/src/player.rs", "rank": 60, "score": 108462.61216926313 }, { "content": "pub fn next_input(player: &PlayerState, bot: &mut BotState) -> rlbot::ControllerState {\n\n if let Some(ref plan) = bot.plan {\n\n // we need to take into account the inputs previously sent that will be processed\n\n // prior to finding where we are. instead of passing the current player, apply\n\n // LAG_FRAMES inputs that are not yet applied\n\n let player = player.lag_compensated_player(&bot.controller_history, LAG_FRAMES);\n\n let index = closest_plan_index(&player, &plan);\n\n\n\n // we need to look one past closest index to see the controller to reach next position\n\n if index < plan.len() - 1 {\n\n let current_heading = player.rotation.to_rotation_matrix() * Vector3::new(-1.0, 0.0, 0.0);\n\n let (closest_player, _, _) = &plan[index];\n\n let (_next_player, controller, _) = &plan[index + 1];\n\n //println!(\"index: {}, controller.steer: {:?}\", index, controller.steer);\n\n\n\n // FIXME we should account for differences in the tick and interpolate between the two\n\n // closest indices to get the real closet delta/distance\n\n let closest_delta = player.position - closest_player.position;\n\n let closest_distance = closest_delta.norm();\n\n let clockwise_90_rotation = Rotation3::from_euler_angles(0.0, 0.0, PI / 2.0);\n", "file_path": "brain/src/play.rs", "rank": 61, "score": 106432.37558073991 }, { "content": "pub fn opponent_goal_shoot_at(game: &GameState) -> Vector3<f32> {\n\n // TODO calculate which part of the goal is hardest for the opponent to reach\n\n match game.player.team {\n\n // FIXME check if we have this right...\n\n Team::Blue => Vector3::new(0.0, BACK_WALL_DISTANCE, GOAL_Z / 2.0),\n\n Team::Orange => Vector3::new(0.0, -BACK_WALL_DISTANCE, GOAL_Z / 2.0),\n\n }\n\n}\n\n\n", "file_path": "brain/src/play.rs", "rank": 62, "score": 104433.25081872902 }, { "content": "fn ground_turn_matching_transformation(\n\n normalized: sample::NormalizedPlayerState,\n\n controller: &BrickControllerState,\n\n time_step: f32,\n\n xrange: i16,\n\n yrange: i16,\n\n skipx: Option<i16>,\n\n skipy: Option<i16>,\n\n) -> Option<&'static driving_model::PlayerTransformation> {\n\n // based on current player state, and steer, throttle and boost, gets the right transformation,\n\n // with some wiggle room based on xrange/yrange\n\n let mut local_normalized = normalized;\n\n let mut transformation: Option<&'static driving_model::PlayerTransformation> = None;\n\n\n\n // step_by is not yet stabilized... so using plain loops instead\n\n let ystep = if yrange < 0 { -1 } else { 1 };\n\n let xstep = if xrange < 0 { -1 } else { 1 };\n\n let mut dy = 0;\n\n 'outer: loop {\n\n let mut dx = 0;\n", "file_path": "predict/src/player.rs", "rank": 63, "score": 103222.67247138472 }, { "content": "/// for now, doesn't handle landing sideways or at any angle really, nor drifting. collision with\n\n/// arena is also not handled. collisions with other players or ball will never be handled here\n\nfn next_player_state_grounded(\n\n current: &PlayerState,\n\n controller: &BrickControllerState,\n\n time_step: f32,\n\n) -> Result<PlayerState, String> {\n\n let mut next = (*current).clone();\n\n\n\n let (translation, velocity, angular_velocity, rotation) = ground_turn_prediction(&current, &controller, time_step)?;\n\n\n\n // because we extrapolate around the edges of our measurements, it's possible we calculate\n\n // a velocity beyond what's possible in the game. so we must scale it down here.\n\n let scale = if velocity.norm() > MAX_BOOST_SPEED {\n\n MAX_BOOST_SPEED / velocity.norm()\n\n } else {\n\n 1.0\n\n };\n\n\n\n // XXX NOTE using the velocity scaling for the translation isn't correct at all, but it's\n\n // likely to be at least proportional, and likely closer to correct than not scaling at all.\n\n next.position = current.position + scale * translation;\n\n next.position.z = RESTING_Z; // avoid drifting upward/downward when we're just driving on the ground!\n\n next.velocity = scale * velocity;\n\n next.angular_velocity = angular_velocity;\n\n next.rotation = UnitQuaternion::from_rotation_matrix(&rotation); // was easier to just return the end rotation directly. TODO stop using quaternion\n\n\n\n Ok(next)\n\n}\n\n\n", "file_path": "predict/src/player.rs", "rank": 64, "score": 101572.3151671092 }, { "content": "fn find_prediction_category(ball: &BallState) -> PredictionCategory {\n\n // NOTE using the \"soaring\" calculations when interacting with wall/curves, even when rolling,\n\n // since we at least have arena collision for that and will get a better prediction even if not\n\n // 100% accurate\n\n let in_air = ball.position.z > BALL_COLLISION_RADIUS || ball.velocity.z.abs() < 1.0;\n\n let on_side_curve = ball.position.x.abs() > SIDE_CURVE_DISTANCE;\n\n let on_back_curve = ball.position.y.abs() > BACK_CURVE_DISTANCE;\n\n if in_air || on_side_curve || on_back_curve {\n\n PredictionCategory::Soaring\n\n } else {\n\n PredictionCategory::Rolling\n\n }\n\n}\n\n\n", "file_path": "predict/src/ball.rs", "rank": 65, "score": 100987.15522134882 }, { "content": "pub fn next_player_state(\n\n current: &PlayerState,\n\n controller: &BrickControllerState,\n\n time_step: f32,\n\n) -> Result<PlayerState, String> {\n\n let mut next_player = match find_prediction_category(&current) {\n\n PredictionCategory::Ground => next_player_state_grounded(&current, &controller, time_step)?,\n\n //PredictionCategory::Ground2 => next_velocity_grounded2(&current, &controller, time_step),\n\n //PredictionCategory::Wall => next_velocity_walled(&current, &controller, time_step),\n\n //PredictionCategory::Ceiling => next_velocity_ceilinged(&current, &controller, time_step),\n\n //PredictionCategory::CurveWall => next_velocity_curve_walled(&current, &controller, time_step),\n\n //PredictionCategory::Air => next_velocity_flying(&current, &controller, time_step),\n\n };\n\n\n\n if next_player.position.z < CAR_DIMENSIONS.z / 2.0 {\n\n next_player.position.z = CAR_DIMENSIONS.z / 2.0;\n\n }\n\n\n\n Ok(next_player)\n\n}\n\n\n", "file_path": "predict/src/player.rs", "rank": 66, "score": 99963.0474230132 }, { "content": "pub fn update_gamepad(gilrs: &mut Gilrs, gamepad: &mut Gamepad) {\n\n while let Some(Event { event, .. }) = gilrs.next_event() {\n\n match event {\n\n EventType::ButtonChanged(button, value, _code) => match button {\n\n Button::RightTrigger2 => gamepad.rt2 = value,\n\n Button::LeftTrigger2 => gamepad.lt2 = value,\n\n _ => {}\n\n },\n\n EventType::ButtonPressed(button, _code) => match button {\n\n Button::Select => gamepad.select_toggled = !gamepad.select_toggled,\n\n Button::RightTrigger => gamepad.rt = true,\n\n Button::LeftTrigger => gamepad.lt = true,\n\n Button::North => gamepad.north = true,\n\n Button::East => gamepad.east = true,\n\n Button::South => gamepad.south = true,\n\n Button::West => gamepad.west = true,\n\n _ => {}\n\n },\n\n EventType::ButtonReleased(button, _code) => match button {\n\n Button::RightTrigger => gamepad.rt = false,\n", "file_path": "passthrough/src/lib.rs", "rank": 67, "score": 97728.00180727622 }, { "content": "pub fn find_prediction_category(_current: &PlayerState) -> PredictionCategory {\n\n // hard-coded the only thing we can handle right now\n\n PredictionCategory::Ground\n\n}\n\n\n", "file_path": "predict/src/player.rs", "rank": 68, "score": 97641.80599584113 }, { "content": "pub fn normalized_player(player: &PlayerState, ceil_vx: bool, ceil_vy: bool) -> NormalizedPlayerState {\n\n let avz = (player.angular_velocity.z / GROUND_AVZ_GRID_FACTOR).round() as i16;\n\n\n\n let lv = player.local_velocity();\n\n\n\n let mut local_vx = if ceil_vx {\n\n (lv.x / GROUND_SPEED_GRID_FACTOR).ceil() as i16\n\n } else {\n\n (lv.x / GROUND_SPEED_GRID_FACTOR).floor() as i16\n\n };\n\n\n\n let mut local_vy = if ceil_vy {\n\n (lv.y / GROUND_SPEED_GRID_FACTOR).ceil() as i16\n\n } else {\n\n (lv.y / GROUND_SPEED_GRID_FACTOR).floor() as i16\n\n };\n\n\n\n // scale down if we're looking at something beyond the limits after a ceil or something\n\n let max = ((MAX_BOOST_SPEED / GROUND_SPEED_GRID_FACTOR).round() as i32).pow(2);\n\n let sum = (local_vy as i32).pow(2) + (local_vx as i32).pow(2);\n\n if sum > max {\n\n let ratio = (MAX_BOOST_SPEED / GROUND_SPEED_GRID_FACTOR) / (sum as f32).sqrt();\n\n local_vx = (local_vx as f32 * ratio).round() as i16;\n\n local_vy = (local_vy as f32 * ratio).round() as i16;\n\n }\n\n\n\n NormalizedPlayerState { local_vx, local_vy, avz }\n\n}\n", "file_path": "predict/src/sample.rs", "rank": 69, "score": 96652.66721464458 }, { "content": "fn is_hidden(entry: &DirEntry) -> bool {\n\n entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)\n\n}\n\n\n\npub type SampleMap<'a> = HashMap<NormalizedPlayerState, &'a [PlayerState], MyHasher>;\n\n\n\n//fn continuous_sample(sample: &[PlayerState]) {\n\n// let mut last = sample[0];\n\n// for player in sample[1..] {\n\n// let predicted = player.position + TICK * last.velocity;\n\n// if last.velocity.x()\n\n//\n\n// last = player;\n\n// }\n\n// return true\n\n//}\n\n\n", "file_path": "predict/src/sample.rs", "rank": 70, "score": 95338.77090267272 }, { "content": "extern crate predict;\n\nextern crate state;\n\n\n\nuse state::*;\n\nuse std::cmp::Ordering;\n\n\n\nconst NUM_TICKS: usize = 16;\n\n\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 71, "score": 91530.72803642876 }, { "content": " if (player_start.angular_velocity.z.abs() / 0.2) >= 20.0\n\n || player_start.local_velocity().y < 50.0\n\n || player_start.local_velocity().x.abs() > 1000.0\n\n {\n\n continue;\n\n }\n\n\n\n let predicted_player_end =\n\n predict::player::next_player_state(&player_start, &controller, NUM_TICKS as f32 * TICK)\n\n .expect(\"failed prediction\");\n\n let position_error = (predicted_player_end.position - player_end.position).norm();\n\n let velocity_error = (predicted_player_end.velocity - player_end.velocity).norm();\n\n let avz_error = (predicted_player_end.angular_velocity.z - player_end.angular_velocity.z).abs();\n\n\n\n position_errors.push(position_error);\n\n velocity_errors.push(velocity_error);\n\n avz_errors.push(avz_error);\n\n\n\n if position_error > max_position_error {\n\n max_position_error = position_error;\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 72, "score": 91522.34966898417 }, { "content": " println!(\"player_start normalized: {:?}\", normalized);\n\n println!(\"player_start: {:?}\", player_start);\n\n println!(\"expected player_end: {:?}\", player_end);\n\n println!(\"predicted player_end: {:?}\", predicted_player_end);\n\n }\n\n }\n\n }\n\n\n\n println!();\n\n println!();\n\n println!(\"---------------------------------------------------------\");\n\n println!(\"{:?}\", controller);\n\n println!(\"max position error: {}\", max_position_error);\n\n println!(\"rms position error: {}\", rms(&position_errors));\n\n println!(\n\n \"50th percentile position error: {}\",\n\n percentile_value(&mut position_errors, 50.0)\n\n );\n\n println!(\n\n \"95th percentile position error: {}\",\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 73, "score": 91517.44336597652 }, { "content": " }\n\n if velocity_error > max_velocity_error {\n\n max_velocity_error = velocity_error;\n\n }\n\n if avz_error > max_avz_error {\n\n max_avz_error = avz_error;\n\n }\n\n\n\n //println!(\"position error: {}\", position_error);\n\n //if position_error > 20.0 {\n\n if velocity_error > 40.0 {\n\n let normalized = predict::sample::normalized_player_rounded(&player_start);\n\n println!();\n\n println!(\"---------------------------------------------------------\");\n\n println!(\n\n \"position error: {}, velocity_error: {}, avz_error: {}\",\n\n position_error, velocity_error, avz_error\n\n );\n\n let v = player_start.velocity;\n\n println!(\"rg '{},{},{}' -- *.csv\", v.x, v.y, v.z);\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 74, "score": 91517.16905277994 }, { "content": " percentile_value(&mut position_errors, 95.0)\n\n );\n\n println!(\n\n \"99th percentile position error: {}\",\n\n percentile_value(&mut position_errors, 99.0)\n\n );\n\n println!(\n\n \"99.9th percentile position error: {}\",\n\n percentile_value(&mut position_errors, 99.9)\n\n );\n\n\n\n println!(\"max velocity error: {}\", max_velocity_error);\n\n println!(\"rms velocity error: {}\", rms(&velocity_errors));\n\n println!(\n\n \"50th percentile velocity error: {}\",\n\n percentile_value(&mut velocity_errors, 50.0)\n\n );\n\n println!(\n\n \"95th percentile velocity error: {}\",\n\n percentile_value(&mut velocity_errors, 95.0)\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 75, "score": 91516.95228822879 }, { "content": " assert!(max_position_error < 20.0);\n\n assert!(rms(&position_errors) < 0.6);\n\n assert!(percentile_value(&mut position_errors, 50.0) < 0.5);\n\n assert!(percentile_value(&mut position_errors, 95.0) < 1.5);\n\n assert!(percentile_value(&mut position_errors, 99.0) < 2.5);\n\n assert!(percentile_value(&mut position_errors, 99.9) < 7.5);\n\n\n\n assert!(rms(&velocity_errors) < 9.5);\n\n assert!(rms(&avz_errors) < 0.15);\n\n}\n\n\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 76, "score": 91516.93651574614 }, { "content": " );\n\n println!(\n\n \"99th percentile velocity error: {}\",\n\n percentile_value(&mut velocity_errors, 99.0)\n\n );\n\n println!(\n\n \"99.9th percentile velocity error: {}\",\n\n percentile_value(&mut velocity_errors, 99.9)\n\n );\n\n\n\n println!(\"max avz error: {}\", max_avz_error);\n\n println!(\"rms avz error: {}\", rms(&avz_errors));\n\n println!(\"50th percentile avz error: {}\", percentile_value(&mut avz_errors, 50.0));\n\n println!(\"95th percentile avz error: {}\", percentile_value(&mut avz_errors, 95.0));\n\n println!(\"99th percentile avz error: {}\", percentile_value(&mut avz_errors, 99.0));\n\n println!(\"99.9th percentile avz error: {}\", percentile_value(&mut avz_errors, 99.9));\n\n println!();\n\n println!();\n\n\n\n // TODO investigate the worst offenders and reduce these values\n", "file_path": "predict/tests/compare_with_sample_files.rs", "rank": 77, "score": 91516.80251945769 }, { "content": "fn update_simulation_visualization(bot: &BotState, plan_result: &PlanResult) {\n\n let PlanResult {\n\n plan,\n\n visualization_lines: lines,\n\n visualization_points: points,\n\n ..\n\n } = plan_result;\n\n\n\n let mut visualization_lines = LINES.write().unwrap();\n\n visualization_lines.clear();\n\n\n\n // lines directly from plan result\n\n visualization_lines.append(&mut lines.clone());\n\n\n\n // white line showing best planned path\n\n if let Some(ref plan) = bot.plan {\n\n visualization_lines.append(&mut plan_lines(&plan, Point3::new(1.0, 1.0, 1.0)));\n\n }\n\n\n\n // blue line showing most recently calculated path\n", "file_path": "brick/src/main.rs", "rank": 78, "score": 91008.26187065193 }, { "content": "/// guess best point on ball to hit, get the heading at that point\n\npub fn simple_desired_contact(ball: &BallState, desired_ball_position: &Vector3<f32>) -> DesiredContact {\n\n let desired_vector = Unit::new_normalize(desired_ball_position - ball.position);\n\n let desired_velocity = 3000.0 * desired_vector.into_inner();\n\n let velocity_delta = desired_velocity - ball.velocity;\n\n\n\n // this is pretty crude, doesn't even consider that the ball will undergo gravity after the\n\n // hit! but should be good enough for us here for now\n\n let impulse_direction = Unit::new_normalize(velocity_delta);\n\n let ball_normal = -1.0 * impulse_direction.into_inner();\n\n\n\n DesiredContact {\n\n position: ball.position + BALL_COLLISION_RADIUS * ball_normal,\n\n heading: -1.0 * ball_normal,\n\n }\n\n}\n\n\n", "file_path": "brain/src/play.rs", "rank": 79, "score": 90668.26837218515 }, { "content": "pub fn ball_collides(ball: &BallState, player: &PlayerState) -> bool {\n\n (closest_point_for_collision(ball, player) - ball.position).norm() < BALL_COLLISION_RADIUS\n\n}\n\n\n", "file_path": "predict/src/player.rs", "rank": 80, "score": 88117.44811873179 }, { "content": "fn ground_turn_prediction(\n\n current: &PlayerState,\n\n controller: &BrickControllerState,\n\n time_step: f32,\n\n) -> Result<PlayerPrediction, String> {\n\n // we don't have transformations for single ticks, but we'll do some special handling of\n\n // this case as we need it for car-ball collisions\n\n let original_time_step = time_step;\n\n let time_step = if time_step == TICK { 2.0 * TICK } else { time_step };\n\n\n\n let quad = ground_turn_quad_tranformations(current, controller, time_step);\n\n\n\n // TODO error\n\n let mut x1y1 = quad[0].ok_or_else(|| {\n\n format!(\n\n \"Missing turn x1y1 for player: {:?} & controller: {:?}\",\n\n sample::normalized_player(&current, false, false),\n\n controller\n\n )\n\n })?;\n", "file_path": "predict/src/player.rs", "rank": 81, "score": 86340.53431925841 }, { "content": "//// since the ray collision algorithm doesn't allow for the ray ending early, but does account\n\n//// for point of origin, we just test it in both directions for a complete line test. minimize\n\n//// the overhead by using the previous position as the ray origin first, assuming we'll\n\n//// mostly be moving *towards* the desired position for most expanded a* paths\n\npub fn line_collides_bounding_box(bounding_box: &BoundingBox, start: Vector3<f32>, end: Vector3<f32>) -> bool {\n\n ray_collides_bounding_box(&bounding_box, start, end) && ray_collides_bounding_box(&bounding_box, end, start)\n\n}\n\n\n", "file_path": "brain/src/plan.rs", "rank": 82, "score": 82855.9513637187 }, { "content": "// https://bheisler.github.io/post/writing-gpu-accelerated-path-tracer-part-3/\n\n// https://gamedev.stackexchange.com/a/18459/4929\n\npub fn ray_collides_bounding_box(bounding_box: &BoundingBox, start: Vector3<f32>, end: Vector3<f32>) -> bool {\n\n let dir = end - start;\n\n let dir_inv = Vector3::new(1.0 / dir.x, 1.0 / dir.y, 1.0 / dir.z);\n\n\n\n let mut txmin = (bounding_box.min_x - start.x) * dir_inv.x;\n\n let mut txmax = (bounding_box.max_x - start.x) * dir_inv.x;\n\n\n\n if txmin > txmax {\n\n mem::swap(&mut txmin, &mut txmax);\n\n }\n\n\n\n let mut tymin = (bounding_box.min_y - start.y) * dir_inv.y;\n\n let mut tymax = (bounding_box.max_y - start.y) * dir_inv.y;\n\n\n\n if tymin > tymax {\n\n mem::swap(&mut tymin, &mut tymax);\n\n }\n\n\n\n if txmin > tymax || tymin > txmax {\n\n return false;\n", "file_path": "brain/src/plan.rs", "rank": 83, "score": 82854.29875989299 }, { "content": "pub fn trajectory_enters_soccar_goal(ball: &BallState) -> bool {\n\n // FIXME check full ball trajectory, with bouncing\n\n //let goal = Vector3::new(0.0, BACK_WALL_DISTANCE, GOAL_Z / 2.0);\n\n let goal = na::Vector2::new(0.0, BACK_WALL_DISTANCE);\n\n let pos = na::Vector2::new(ball.position.x, ball.position.y);\n\n let v = na::Vector2::new(ball.velocity.x, ball.velocity.y);\n\n\n\n // check if 2d ball velocty is towards goal\n\n // TODO use exact angle of left side and right side of goal from current ball position instead\n\n // of the dot product approximate check\n\n (goal - pos).normalize().dot(&v.normalize()) > 0.95\n\n}\n\n\n", "file_path": "predict/src/ball.rs", "rank": 84, "score": 82523.51265513619 }, { "content": "fn bot_logic_loop(sender: Sender<PlanResult>, receiver: Receiver<(GameState, BotState)>) {\n\n let mut model = brain::get_model();\n\n loop {\n\n let (mut game, mut bot) = receiver.recv().expect(\"Couldn't receive game state\");\n\n\n\n // make sure we have the latest, drop earlier states\n\n while let Ok((g, b)) = receiver.try_recv() {\n\n game = g;\n\n bot = b;\n\n }\n\n\n\n let plan_result = brain::play::play(&mut model, &game, &mut bot);\n\n sender.send(plan_result).expect(\"Failed to send plan result\");\n\n }\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 85, "score": 82042.77881412214 }, { "content": "#[allow(clippy::needless_collect)] // it's actually needed as rev does not work after unfold\n\nfn reverse_path(\n\n parents: &ParentsMap,\n\n initial_index: usize,\n\n initial_is_secondary: bool,\n\n initial_player: &PlayerState,\n\n initial_cost: f32,\n\n) -> Plan {\n\n let path = itertools::unfold((initial_index, initial_is_secondary), |vals| {\n\n let index = (*vals).0;\n\n let is_secondary = (*vals).1;\n\n parents.get_index(index).map(|(_rounded, (v1, maybe_v2))| {\n\n let vertex = if is_secondary { maybe_v2.as_ref().unwrap() } else { v1 };\n\n (*vals).0 = vertex.parent_index;\n\n (*vals).1 = vertex.parent_is_secondary;\n\n let player = if index == initial_index {\n\n initial_player.clone()\n\n } else {\n\n vertex.player.clone()\n\n };\n\n let cost = if index == initial_index {\n", "file_path": "brain/src/plan.rs", "rank": 86, "score": 80724.95916002918 }, { "content": "fn bot_logic_loop_test(sender: Sender<PlanResult>, receiver: Receiver<(GameState, BotState)>) {\n\n let mut gilrs = Gilrs::new().unwrap();\n\n let mut gamepad = Gamepad::default();\n\n let mut model = brain::get_model();\n\n\n\n let mut loop_helper = LoopHelper::builder().build_with_target_rate(1000.0); // limit to 1000 FPS\n\n //.build_with_target_rate(0.2); // limit to 0.2 FPS\n\n\n\n loop {\n\n loop_helper.loop_start();\n\n let (mut game, mut bot) = receiver.recv().expect(\"Couldn't receive game state\");\n\n\n\n // make sure we have the latest, drop earlier states\n\n while let Ok((g, b)) = receiver.try_recv() {\n\n game = g;\n\n bot = b;\n\n }\n\n\n\n update_gamepad(&mut gilrs, &mut gamepad);\n\n if !gamepad.select_toggled {\n", "file_path": "brick/src/main.rs", "rank": 87, "score": 80245.34270831264 }, { "content": "pub fn get_model() -> impl HeuristicModel {\n\n // TODO config file or something\n\n //let path = \"./heuristic/train/nn/simple_throttle_cost_saved_model/1552341051/\";\n\n //NeuralHeuristic::try_new(path).expect(\"Failed to initialize NeuralHeuristic\")\n\n\n\n // TODO config file or something\n\n let path = \"./time.csv\";\n\n KnnHeuristic::try_new(path).expect(\"Failed to initialize KnnHeuristic\")\n\n\n\n //BasicHeuristic::default()\n\n}\n", "file_path": "brain/src/lib.rs", "rank": 88, "score": 77233.36109787448 }, { "content": "fn interpolate_transformation_halfway(\n\n transformation: driving_model::PlayerTransformation,\n\n current: &PlayerState,\n\n) -> driving_model::PlayerTransformation {\n\n let mut transformation = transformation;\n\n transformation.translation_x /= 2;\n\n transformation.translation_y /= 2;\n\n transformation.end_yaw /= 2.0;\n\n transformation.end_angular_velocity_z =\n\n current.angular_velocity.z + (transformation.end_angular_velocity_z - current.angular_velocity.z) / 2.0;\n\n\n\n // NOTE start local vx/vy is with car facing forwards in y direction, but end_velocity is with car\n\n // facing leftwards, ie negative x direction. so we align them first before interpolation\n\n let start_velocity = Vector3::new(\n\n transformation.start_local_vx as f32,\n\n transformation.start_local_vy as f32,\n\n 0.0,\n\n );\n\n let rotation = Rotation3::from_euler_angles(0.0, 0.0, std::f32::consts::PI / 2.0); // anti-clockwise\n\n let start_velocity = rotation * start_velocity;\n", "file_path": "predict/src/player.rs", "rank": 89, "score": 76915.82275578206 }, { "content": "fn plan_lines(plan: &[PlanStep], color: Point3<f32>) -> Vec<Line> {\n\n let mut lines = Vec::with_capacity(plan.len());\n\n let pos = plan\n\n .get(0)\n\n .map(|(p, _, _)| p.position)\n\n .unwrap_or_else(|| Vector3::new(0.0, 0.0, 0.0));\n\n let mut last_point = Point3::new(pos.x, pos.y, pos.z);\n\n for (ps, _, _) in plan {\n\n let point = Point3::new(ps.position.x, ps.position.y, ps.position.z + 0.1);\n\n lines.push((last_point, point, color));\n\n last_point = point;\n\n }\n\n lines\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 90, "score": 76694.12100400763 }, { "content": "pub fn closest_plan_index(given_player: &PlayerState, plan: &[PlanStep]) -> usize {\n\n assert!(!plan.is_empty());\n\n\n\n let mut index = 0;\n\n let mut min_distance = std::f32::MAX;\n\n\n\n for (i, (player, _, _)) in plan.iter().enumerate() {\n\n let distance = (given_player.position - player.position).norm();\n\n if distance < min_distance {\n\n min_distance = distance;\n\n index = i\n\n }\n\n }\n\n\n\n index\n\n}\n\n\n", "file_path": "brain/src/play.rs", "rank": 91, "score": 75465.96543076662 }, { "content": "pub fn get_collision(\n\n ball_trajectory: &[BallState],\n\n player: &PlayerState,\n\n controller: &BrickControllerState,\n\n time_step: f32,\n\n) -> Option<(PlayerState, BallState, Vector3<f32>, f32)> {\n\n let num_ticks: usize = (time_step / TICK).round() as usize;\n\n assert!(num_ticks % 2 == 0);\n\n\n\n // 2-tick steps\n\n let mut last = player.clone();\n\n for step in 1..=(num_ticks / 2) {\n\n if let Ok(next) = next_player_state(&last, controller, TICK * 2.0) {\n\n if let Some(ball) = ball_trajectory.get(step * 2) {\n\n if ball_collides(ball, &next) {\n\n // check if one tick earlier collides, since we are using 2-tick steps\n\n if let Ok(next_single_tick) = next_player_state(&last, controller, TICK) {\n\n let single_tick_ball = &ball_trajectory[step * 2 - 1];\n\n if ball_collides(single_tick_ball, &next_single_tick) {\n\n let collision_time = (2 * step - 1) as f32 * TICK;\n", "file_path": "predict/src/player.rs", "rank": 92, "score": 75306.55501168605 }, { "content": "fn ground_turn_quad_tranformations(\n\n current: &PlayerState,\n\n controller: &BrickControllerState,\n\n time_step: f32,\n\n) -> [Option<&'static driving_model::PlayerTransformation>; 4] {\n\n // TODO handle missing values properly: go lower/higher to find another point to use as an\n\n // interpolation anchor\n\n //println!(\"x1y1\");\n\n let normalized = sample::normalized_player(&current, false, false);\n\n let mut x1y1 = ground_turn_matching_transformation(normalized, &controller, time_step, -3, -3, None, None);\n\n\n\n //println!(\"x2y1\");\n\n let normalized = sample::normalized_player(&current, true, false);\n\n let mut x2y1 = ground_turn_matching_transformation(normalized, &controller, time_step, 3, -3, None, None);\n\n\n\n // when we fail in on direction, search in the other\n\n if x1y1.is_some() && x2y1.is_none() {\n\n //println!(\"-- x2y1 fallback --\");\n\n let x1y1_transformation = x1y1.as_ref().unwrap();\n\n let skip = x1y1_transformation.normalized_player(current.angular_velocity.z);\n", "file_path": "predict/src/player.rs", "rank": 93, "score": 74599.54881069326 }, { "content": "type Line = (Point3<f32>, Point3<f32>, Point3<f32>);\n\n\n\nlazy_static! {\n\n static ref GAME_STATE: RwLock<GameState> = RwLock::new(GameState::default());\n\n static ref LINES: RwLock<Vec<Line>> = RwLock::new(vec![]);\n\n static ref POINTS: RwLock<Vec<(Point3<f32>, Point3<f32>)>> = RwLock::new(vec![]);\n\n}\n\n\n", "file_path": "brick/src/main.rs", "rank": 94, "score": 74342.6727986946 }, { "content": "fn record_missing_record_state(\n\n rlbot: &rlbot::RLBot,\n\n input: &ControllerState,\n\n index: &mut HashMap<predict::sample::NormalizedPlayerState, PlayerState>,\n\n record_state: &mut RecordState,\n\n adjustment: &mut Adjustment,\n\n) -> Result<(), Box<dyn Error>> {\n\n record_state.records.clear();\n\n rlbot.update_player_input(0, &input)?;\n\n\n\n loop {\n\n // waits and checks the tick to ensure it meets our conditions. and it records the first tick\n\n record_state.set_game_state_accurately(&rlbot, index, adjustment)?;\n\n\n\n loop {\n\n //rlbot.update_player_input(0, &input)?;\n\n let tick = next_flat(&rlbot);\n\n record_state.record(&tick);\n\n\n\n if record_state.sample_complete() {\n", "file_path": "record/src/main.rs", "rank": 95, "score": 74182.22861973986 }, { "content": "fn throttle_left() -> ControllerState {\n\n let mut input = ControllerState::default();\n\n input.throttle = 1.0;\n\n input.steer = -1.0;\n\n input\n\n}\n\n\n", "file_path": "record/src/main.rs", "rank": 96, "score": 72572.96087564385 }, { "content": "fn boost_straight() -> ControllerState {\n\n let mut input = ControllerState::default();\n\n input.boost = true;\n\n input\n\n}\n\n\n", "file_path": "record/src/main.rs", "rank": 97, "score": 72572.96087564385 }, { "content": "fn throttle_straight() -> ControllerState {\n\n let mut input = ControllerState::default();\n\n input.throttle = 1.0;\n\n input\n\n}\n\n\n", "file_path": "record/src/main.rs", "rank": 98, "score": 72572.96087564385 }, { "content": "fn throttle_right() -> ControllerState {\n\n let mut input = ControllerState::default();\n\n input.throttle = 1.0;\n\n input.steer = 1.0;\n\n input\n\n}\n\n\n", "file_path": "record/src/main.rs", "rank": 99, "score": 72572.96087564385 } ]
Rust
utoipa-gen/src/openapi.rs
juhaku/utoipa
070b00c13b41040e9605c62a0d7c3b5fcf04899c
use proc_macro2::Ident; use proc_macro_error::ResultExt; use syn::{ parenthesized, parse::{Parse, ParseStream}, punctuated::Punctuated, token::{And, Comma}, Attribute, Error, ExprPath, GenericParam, Generics, Token, }; use proc_macro2::TokenStream; use quote::{format_ident, quote, quote_spanned, ToTokens}; use crate::{ parse_utils, path::PATH_STRUCT_PREFIX, security_requirement::SecurityRequirementAttr, Array, ExternalDocs, }; mod info; #[derive(Default)] #[cfg_attr(feature = "debug", derive(Debug))] pub struct OpenApiAttr { handlers: Punctuated<ExprPath, Comma>, components: Punctuated<Component, Comma>, modifiers: Punctuated<Modifier, Comma>, security: Option<Array<SecurityRequirementAttr>>, tags: Option<Array<Tag>>, external_docs: Option<ExternalDocs>, } pub fn parse_openapi_attrs(attrs: &[Attribute]) -> Option<OpenApiAttr> { attrs .iter() .find(|attribute| attribute.path.is_ident("openapi")) .map(|attribute| attribute.parse_args::<OpenApiAttr>().unwrap_or_abort()) } impl Parse for OpenApiAttr { fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> { const EXPECTED_ATTRIBUTE: &str = "unexpected attribute, expected any of: handlers, components, modifiers, security, tags, external_docs"; let mut openapi = OpenApiAttr::default(); while !input.is_empty() { let ident = input.parse::<Ident>().map_err(|error| { Error::new(error.span(), &format!("{}, {}", EXPECTED_ATTRIBUTE, error)) })?; let attribute = &*ident.to_string(); match attribute { "handlers" => { openapi.handlers = parse_utils::parse_punctuated_within_parenthesis(input)?; } "components" => { openapi.components = parse_utils::parse_punctuated_within_parenthesis(input)? } "modifiers" => { openapi.modifiers = parse_utils::parse_punctuated_within_parenthesis(input)?; } "security" => { let security; parenthesized!(security in input); openapi.security = Some(parse_utils::parse_groups(&security)?) } "tags" => { let tags; parenthesized!(tags in input); openapi.tags = Some(parse_utils::parse_groups(&tags)?); } "external_docs" => { let external_docs; parenthesized!(external_docs in input); openapi.external_docs = Some(external_docs.parse()?); } _ => { return Err(Error::new(ident.span(), EXPECTED_ATTRIBUTE)); } } if !input.is_empty() { input.parse::<Token![,]>()?; } } Ok(openapi) } } #[cfg_attr(feature = "debug", derive(Debug))] struct Component { ty: Ident, generics: Generics, } impl Component { fn has_lifetime_generics(&self) -> bool { self.generics .params .iter() .any(|generic| matches!(generic, GenericParam::Lifetime(_))) } } impl Parse for Component { fn parse(input: ParseStream) -> syn::Result<Self> { Ok(Component { ty: input.parse()?, generics: input.parse()?, }) } } #[cfg_attr(feature = "debug", derive(Debug))] struct Modifier { and: And, ident: Ident, } impl ToTokens for Modifier { fn to_tokens(&self, tokens: &mut TokenStream) { let and = &self.and; let ident = &self.ident; tokens.extend(quote! { #and #ident }) } } impl Parse for Modifier { fn parse(input: ParseStream) -> syn::Result<Self> { Ok(Self { and: input.parse()?, ident: input.parse()?, }) } } #[derive(Default)] #[cfg_attr(feature = "debug", derive(Debug))] struct Tag { name: String, description: Option<String>, external_docs: Option<ExternalDocs>, } impl Parse for Tag { fn parse(input: ParseStream) -> syn::Result<Self> { const EXPECTED_ATTRIBUTE: &str = "unexpected token, expected any of: name, description, external_docs"; let mut tag = Tag::default(); while !input.is_empty() { let ident = input.parse::<Ident>().map_err(|error| { syn::Error::new(error.span(), &format!("{}, {}", EXPECTED_ATTRIBUTE, error)) })?; let attribute_name = &*ident.to_string(); match attribute_name { "name" => tag.name = parse_utils::parse_next_literal_str(input)?, "description" => { tag.description = Some(parse_utils::parse_next_literal_str(input)?) } "external_docs" => { let content; parenthesized!(content in input); tag.external_docs = Some(content.parse::<ExternalDocs>()?); } _ => return Err(syn::Error::new(ident.span(), EXPECTED_ATTRIBUTE)), } if !input.is_empty() { input.parse::<Token![,]>()?; } } Ok(tag) } } impl ToTokens for Tag { fn to_tokens(&self, tokens: &mut TokenStream) { let name = &self.name; tokens.extend(quote! { utoipa::openapi::tag::TagBuilder::new().name(#name) }); if let Some(ref description) = self.description { tokens.extend(quote! { .description(Some(#description)) }); } if let Some(ref external_docs) = self.external_docs { tokens.extend(quote! { .external_docs(Some(#external_docs)) }); } tokens.extend(quote! { .build() }) } } pub(crate) struct OpenApi(pub OpenApiAttr, pub Ident); impl ToTokens for OpenApi { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { let OpenApi(attributes, ident) = self; let info = info::impl_info(); let components = impl_components(&attributes.components, tokens).map(|components| { quote! { .components(Some(#components)) } }); let modifiers = &attributes.modifiers; let modifiers_len = modifiers.len(); modifiers.iter().for_each(|modifier| { let assert_modifier = format_ident!("_Assert{}", modifier.ident); let ident = &modifier.ident; quote_spanned! {modifier.ident.span()=> struct #assert_modifier where #ident : utoipa::Modify; }; }); let path_items = impl_paths(&attributes.handlers); let securities = attributes.security.as_ref().map(|securities| { quote! { .security(Some(#securities)) } }); let tags = attributes.tags.as_ref().map(|tags| { quote! { .tags(Some(#tags)) } }); let external_docs = attributes.external_docs.as_ref().map(|external_docs| { quote! { .external_docs(Some(#external_docs)) } }); tokens.extend(quote! { impl utoipa::OpenApi for #ident { fn openapi() -> utoipa::openapi::OpenApi { use utoipa::{Component, Path}; let mut openapi = utoipa::openapi::OpenApiBuilder::new() .info(#info) .paths(#path_items) #components #securities #tags #external_docs.build(); let _mods: [&dyn utoipa::Modify; #modifiers_len] = [#modifiers]; _mods.iter().for_each(|modifier| modifier.modify(&mut openapi)); openapi } } }); } } fn impl_components( components: &Punctuated<Component, Comma>, tokens: &mut TokenStream, ) -> Option<TokenStream> { if !components.is_empty() { let mut components_tokens = components.iter().fold( quote! { utoipa::openapi::ComponentsBuilder::new() }, |mut schema, component| { let ident = &component.ty; let span = ident.span(); let component_name = &*ident.to_string(); let (_, ty_generics, _) = component.generics.split_for_impl(); let assert_ty_generics = if component.has_lifetime_generics() { Some(quote! {<'static>}) } else { Some(ty_generics.to_token_stream()) }; let assert_component = format_ident!("_AssertComponent{}", component_name); tokens.extend(quote_spanned! {span=> struct #assert_component where #ident #assert_ty_generics: utoipa::Component; }); let ty_generics = if component.has_lifetime_generics() { None } else { Some(ty_generics) }; schema.extend(quote! { .component(#component_name, <#ident #ty_generics>::component()) }); schema }, ); components_tokens.extend(quote! { .build() }); Some(components_tokens) } else { None } } fn impl_paths(handler_paths: &Punctuated<ExprPath, Comma>) -> TokenStream { handler_paths.iter().fold( quote! { utoipa::openapi::path::PathsBuilder::new() }, |mut paths, handler| { let segments = handler.path.segments.iter().collect::<Vec<_>>(); let handler_fn_name = &*segments.last().unwrap().ident.to_string(); let tag = &*segments .iter() .take(segments.len() - 1) .map(|part| part.ident.to_string()) .collect::<Vec<_>>() .join("::"); let handler_ident = format_ident!("{}{}", PATH_STRUCT_PREFIX, handler_fn_name); let handler_ident_name = &*handler_ident.to_string(); let usage = syn::parse_str::<ExprPath>( &vec![ if tag.is_empty() { None } else { Some(tag) }, Some(handler_ident_name), ] .into_iter() .flatten() .collect::<Vec<_>>() .join("::"), ) .unwrap(); paths.extend(quote! { .path(#usage::path(), #usage::path_item(Some(#tag))) }); paths }, ) }
use proc_macro2::Ident; use proc_macro_error::ResultExt; use syn::{ parenthesized, parse::{Parse, ParseStream}, punctuated::Punctuated, token::{And, Comma}, Attribute, Error, ExprPath, GenericParam, Generics, Token, }; use proc_macro2::TokenStream; use quote::{format_ident, quote, quote_spanned, ToTokens}; use crate::{ parse_utils, path::PATH_STRUCT_PREFIX, security_requirement::SecurityRequirementAttr, Array, ExternalDocs, }; mod info; #[derive(Default)] #[cfg_attr(feature = "debug", derive(Debug))] pub struct OpenApiAttr { handlers: Punctuated<ExprPath, Comma>, components: Punctuated<Component, Comma>, modifiers: Punctuated<Modifier, Comma>, security: Option<Array<SecurityRequirementAttr>>, tags: Option<Array<Tag>>, external_docs: Option<ExternalDocs>, } pub fn parse_openapi_attrs(attrs: &[Attribute]) -> Option<OpenApiAttr> { attrs .iter() .find(|attribute| attribute.path.is_ident("openapi")) .map(|attribute| attribute.parse_args::<OpenApiAttr>().unwrap_or_abort()) } impl Parse for OpenApiAttr { fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> { const EXPECTED_ATTRIBUTE: &str = "unexpected attribute, expected any of: handlers, components, modifiers, security, tags, external_docs"; let mut openapi = OpenApiAttr::default(); while !input.is_empty() { let ident = input.parse::<Ident>().map_err(|error| { Error::new(error.span(), &format!("{}, {}", EXPECTED_ATTRIBUTE, error)) })?; let attribute = &*ident.to_string(); match attribute { "handlers" => { openapi.handlers = parse_utils::parse_punctuated_within_parenthesis(input)?; } "components" => { openapi.components = parse_utils::parse_punctuated_within_parenthesis(input)? } "modifiers" => { openapi.modifiers = parse_utils::parse_punctuated_within_parenthesis(input)?; } "security" => { let security; parenthesized!(security in input); openapi.security = Some(parse_utils::parse_groups(&security)?) } "tags" => { let tags; parenthesized!(tags in input); openapi.tags = Some(parse_utils::parse_groups(&tags)?); } "external_docs" => { let external_docs; parenthesized!(external_docs in input); openapi.external_docs = Some(external_docs.parse()?); } _ => { return Err(Error::new(ident.span(), EXPECTED_ATTRIBUTE)); } } if !input.is_empty() { input.parse::<Token![,]>()?; } } Ok(openapi) } } #[cfg_attr(feature = "debug", derive(Debug))] struct Component { ty: Ident, generics: Generics, } impl Component {
} impl Parse for Component { fn parse(input: ParseStream) -> syn::Result<Self> { Ok(Component { ty: input.parse()?, generics: input.parse()?, }) } } #[cfg_attr(feature = "debug", derive(Debug))] struct Modifier { and: And, ident: Ident, } impl ToTokens for Modifier { fn to_tokens(&self, tokens: &mut TokenStream) { let and = &self.and; let ident = &self.ident; tokens.extend(quote! { #and #ident }) } } impl Parse for Modifier { fn parse(input: ParseStream) -> syn::Result<Self> { Ok(Self { and: input.parse()?, ident: input.parse()?, }) } } #[derive(Default)] #[cfg_attr(feature = "debug", derive(Debug))] struct Tag { name: String, description: Option<String>, external_docs: Option<ExternalDocs>, } impl Parse for Tag { fn parse(input: ParseStream) -> syn::Result<Self> { const EXPECTED_ATTRIBUTE: &str = "unexpected token, expected any of: name, description, external_docs"; let mut tag = Tag::default(); while !input.is_empty() { let ident = input.parse::<Ident>().map_err(|error| { syn::Error::new(error.span(), &format!("{}, {}", EXPECTED_ATTRIBUTE, error)) })?; let attribute_name = &*ident.to_string(); match attribute_name { "name" => tag.name = parse_utils::parse_next_literal_str(input)?, "description" => { tag.description = Some(parse_utils::parse_next_literal_str(input)?) } "external_docs" => { let content; parenthesized!(content in input); tag.external_docs = Some(content.parse::<ExternalDocs>()?); } _ => return Err(syn::Error::new(ident.span(), EXPECTED_ATTRIBUTE)), } if !input.is_empty() { input.parse::<Token![,]>()?; } } Ok(tag) } } impl ToTokens for Tag { fn to_tokens(&self, tokens: &mut TokenStream) { let name = &self.name; tokens.extend(quote! { utoipa::openapi::tag::TagBuilder::new().name(#name) }); if let Some(ref description) = self.description { tokens.extend(quote! { .description(Some(#description)) }); } if let Some(ref external_docs) = self.external_docs { tokens.extend(quote! { .external_docs(Some(#external_docs)) }); } tokens.extend(quote! { .build() }) } } pub(crate) struct OpenApi(pub OpenApiAttr, pub Ident); impl ToTokens for OpenApi { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { let OpenApi(attributes, ident) = self; let info = info::impl_info(); let components = impl_components(&attributes.components, tokens).map(|components| { quote! { .components(Some(#components)) } }); let modifiers = &attributes.modifiers; let modifiers_len = modifiers.len(); modifiers.iter().for_each(|modifier| { let assert_modifier = format_ident!("_Assert{}", modifier.ident); let ident = &modifier.ident; quote_spanned! {modifier.ident.span()=> struct #assert_modifier where #ident : utoipa::Modify; }; }); let path_items = impl_paths(&attributes.handlers); let securities = attributes.security.as_ref().map(|securities| { quote! { .security(Some(#securities)) } }); let tags = attributes.tags.as_ref().map(|tags| { quote! { .tags(Some(#tags)) } }); let external_docs = attributes.external_docs.as_ref().map(|external_docs| { quote! { .external_docs(Some(#external_docs)) } }); tokens.extend(quote! { impl utoipa::OpenApi for #ident { fn openapi() -> utoipa::openapi::OpenApi { use utoipa::{Component, Path}; let mut openapi = utoipa::openapi::OpenApiBuilder::new() .info(#info) .paths(#path_items) #components #securities #tags #external_docs.build(); let _mods: [&dyn utoipa::Modify; #modifiers_len] = [#modifiers]; _mods.iter().for_each(|modifier| modifier.modify(&mut openapi)); openapi } } }); } } fn impl_components( components: &Punctuated<Component, Comma>, tokens: &mut TokenStream, ) -> Option<TokenStream> { if !components.is_empty() { let mut components_tokens = components.iter().fold( quote! { utoipa::openapi::ComponentsBuilder::new() }, |mut schema, component| { let ident = &component.ty; let span = ident.span(); let component_name = &*ident.to_string(); let (_, ty_generics, _) = component.generics.split_for_impl(); let assert_ty_generics = if component.has_lifetime_generics() { Some(quote! {<'static>}) } else { Some(ty_generics.to_token_stream()) }; let assert_component = format_ident!("_AssertComponent{}", component_name); tokens.extend(quote_spanned! {span=> struct #assert_component where #ident #assert_ty_generics: utoipa::Component; }); let ty_generics = if component.has_lifetime_generics() { None } else { Some(ty_generics) }; schema.extend(quote! { .component(#component_name, <#ident #ty_generics>::component()) }); schema }, ); components_tokens.extend(quote! { .build() }); Some(components_tokens) } else { None } } fn impl_paths(handler_paths: &Punctuated<ExprPath, Comma>) -> TokenStream { handler_paths.iter().fold( quote! { utoipa::openapi::path::PathsBuilder::new() }, |mut paths, handler| { let segments = handler.path.segments.iter().collect::<Vec<_>>(); let handler_fn_name = &*segments.last().unwrap().ident.to_string(); let tag = &*segments .iter() .take(segments.len() - 1) .map(|part| part.ident.to_string()) .collect::<Vec<_>>() .join("::"); let handler_ident = format_ident!("{}{}", PATH_STRUCT_PREFIX, handler_fn_name); let handler_ident_name = &*handler_ident.to_string(); let usage = syn::parse_str::<ExprPath>( &vec![ if tag.is_empty() { None } else { Some(tag) }, Some(handler_ident_name), ] .into_iter() .flatten() .collect::<Vec<_>>() .join("::"), ) .unwrap(); paths.extend(quote! { .path(#usage::path(), #usage::path_item(Some(#tag))) }); paths }, ) }
fn has_lifetime_generics(&self) -> bool { self.generics .params .iter() .any(|generic| matches!(generic, GenericParam::Lifetime(_))) }
function_block-full_function
[]
Rust
src/main.rs
Drarig29/crypto-balance
b9f57923b4eb0c145ff7908c6d82425093225413
#[macro_use] extern crate rocket; extern crate chrono; extern crate dotenv; extern crate hex; extern crate hmac; extern crate mongodb; extern crate reqwest; extern crate serde; extern crate serde_json; extern crate sha2; mod aggregate; mod database; mod model; mod requests; mod utils; use chrono::{DateTime, Utc}; use dotenv::dotenv; use env::VarError; use rocket::fs::NamedFile; use rocket::http::{ContentType, Status}; use rocket::request::Request; use rocket::response::{self, Responder}; use rocket::serde::json::Json; use rocket::Response; use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; use std::path::{Path, PathBuf}; use std::{env, vec}; #[derive(Clone)] pub struct Environment { app_password: String, binance_key: String, binance_secret: String, nomics_key: String, mongodb_host: String, mongodb_port: String, mongodb_username: String, mongodb_password: String, } #[derive(Serialize, Deserialize)] pub struct AuthBody { password: String, } #[derive(Serialize, Deserialize)] pub struct RequestBody { password: String, conversion: String, start: String, end: String, } #[derive(Debug)] struct ApiResponse { status: Status, json: Value, } #[derive(Debug)] pub struct TimeSpan { start: DateTime<Utc>, end: DateTime<Utc>, } pub const BINANCE_API_BASE_URL: &str = "https://api.binance.com/sapi/v1/accountSnapshot"; pub const NOMICS_API_BASE_URL: &str = "https://api.nomics.com/v1/currencies/sparkline"; pub const ACCOUNT_TYPE: &str = "SPOT"; fn get_env_vars() -> Result<Environment, VarError> { let app_password = env::var("APPLICATION_PASSWORD")?; let binance_key = env::var("BINANCE_API_KEY")?; let binance_secret = env::var("BINANCE_API_SECRET")?; let nomics_key = env::var("NOMICS_API_KEY")?; let mongodb_host = env::var("MONGODB_HOST")?; let mongodb_port = env::var("MONGODB_PORT")?; let mongodb_username = env::var("MONGODB_USERNAME")?; let mongodb_password = env::var("MONGODB_PASSWORD")?; Ok(Environment { app_password, binance_key, binance_secret, nomics_key, mongodb_host, mongodb_port, mongodb_username, mongodb_password, }) } #[get("/")] async fn index() -> Option<NamedFile> { NamedFile::open("static/index.html").await.ok() } #[get("/<file..>")] async fn files(file: PathBuf) -> Option<NamedFile> { NamedFile::open(Path::new("static/").join(file)).await.ok() } impl<'r> Responder<'r, 'r> for ApiResponse { fn respond_to(self, req: &Request) -> response::Result<'r> { Response::build_from(self.json.respond_to(req).unwrap()) .status(self.status) .header(ContentType::JSON) .ok() } } #[post("/auth", format = "json", data = "<body>")] async fn auth(body: Json<AuthBody>) -> ApiResponse { let env_variables = match get_env_vars() { Ok(res) => res, Err(e) => { return ApiResponse { status: Status::InternalServerError, json: json!(e.to_string()), } } }; if body.password != env_variables.app_password { return ApiResponse { status: Status::Forbidden, json: json!("Incorrect password".to_string()), }; } ApiResponse { status: Status::Ok, json: json!("Success".to_string()), } } #[post("/api", format = "json", data = "<body>")] async fn api(body: Json<RequestBody>) -> ApiResponse { let env_variables = match get_env_vars() { Ok(res) => res, Err(e) => { return ApiResponse { status: Status::InternalServerError, json: json!(e.to_string()), } } }; if body.password != env_variables.app_password { return ApiResponse { status: Status::Forbidden, json: json!("Incorrect password".to_string()), }; } println!("Start: {}\nEnd: {}", body.start, body.end); let start = DateTime::parse_from_rfc3339(&body.start) .unwrap() .with_timezone(&Utc); let end = DateTime::parse_from_rfc3339(&body.end) .unwrap() .with_timezone(&Utc); let mongodb_url = format!( "mongodb://{}:{}@{}:{}", env_variables.mongodb_username, env_variables.mongodb_password, env_variables.mongodb_host, env_variables.mongodb_port, ); let client = mongodb::Client::with_uri_str(&mongodb_url).await.unwrap(); let database = client.database("crypto-balance"); let available_snapshots = database::get_snapshots(&database, start, end).await; let needed_timespans = utils::get_timespans_to_retrieve(available_snapshots, start, end); if needed_timespans.is_empty() { let computed_snapshots = database::get_computed_snapshots(&database, start, end).await; let result = serde_json::to_value(&computed_snapshots).unwrap(); return ApiResponse { status: Status::Ok, json: result, }; } let split_by_30_days = utils::split_all_timespans_max_days(&needed_timespans, 30); let snapshots = match requests::get_all_snapshots( &env_variables, ACCOUNT_TYPE, 30, &split_by_30_days, ) .await { Ok(snapshots) => snapshots, Err(e) => { return ApiResponse { status: Status::InternalServerError, json: json!(e.to_string()), } } }; database::push_snapshots(&database, snapshots).await; let assets = database::get_possessed_assets(&database).await; let split_by_45_days = utils::split_all_timespans_max_days(&needed_timespans, 45); let price_history = match requests::get_all_history( &env_variables, &assets, &body.conversion, &split_by_45_days, ) .await { Ok(price_history) => price_history, Err(e) => { return ApiResponse { status: Status::InternalServerError, json: json!(e.to_string()), } } }; database::push_history(&database, price_history).await; let computed_snapshots = database::get_computed_snapshots(&database, start, end).await; let result = serde_json::to_value(&computed_snapshots).unwrap(); ApiResponse { status: Status::Ok, json: result, } } #[rocket::main] async fn main() { dotenv().ok(); let res = rocket::build() .mount("/", routes![index, auth, api, files]) .launch() .await; res.expect("An error occured while launching the rocket.") }
#[macro_use] extern crate rocket; extern crate chrono; extern crate dotenv; extern crate hex; extern crate hmac; extern crate mongodb; extern crate reqwest; extern crate serde; extern crate serde_json; extern crate sha2; mod aggregate; mod database; mod model; mod requests; mod utils; use chrono::{DateTime, Utc}; use dotenv::dotenv; use env::VarError; use rocket::fs::NamedFile; use rocket::http::{ContentType, Status}; use rocket::request::Request; use rocket::response::{self, Responder}; use rocket::serde::json::Json; use rocket::Response; use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; use std::path::{Path, PathBuf}; use std::{env, vec}; #[derive(Clone)] pub struct Environment { app_password: String, binance_key: String, binance_secret: String, nomics_key: String, mongodb_host: String, mongodb_port: String, mongodb_username: String, mongodb_password: String, } #[derive(Serialize, Deserialize)] pub struct AuthBody { password: String, } #[derive(Serialize, Deserialize)] pub struct RequestBody { password: String, conversion: String, start: String, end: String, } #[derive(Debug)] struct ApiResponse { status: Status, json: Value, } #[derive(Debug)] pub struct TimeSpan { start: DateTime<Utc>, end: DateTime<Utc>, } pub const BINANCE_API_BASE_URL: &str = "https://api.binance.com/sapi/v1/accountSnapshot"; pub const NOMICS_API_BASE_URL: &str = "https://api.nomics.com/v1/currencies/sparkline"; pub const ACCOUNT_TYPE: &str = "SPOT"; fn get_env_vars() -> Result<Environment, VarError> { let app_password = env::var("APPLICATION_PASSWORD")?; let binance_key = env::var("BINANCE_API_KEY")?; let binance_secret = env::var("BINANCE_API_SECRET")?; let nomics_key = env::var("NOMICS_API_KEY")?; let mongodb_host = env::var("MONGODB_HOST")?; let mongodb_port = env::var("MONGODB_PORT")?; let mongodb_username = env::var("MONGODB_USERNAME")?; let mongodb_password = env::var("MONGODB_PASSWORD")?;
} #[get("/")] async fn index() -> Option<NamedFile> { NamedFile::open("static/index.html").await.ok() } #[get("/<file..>")] async fn files(file: PathBuf) -> Option<NamedFile> { NamedFile::open(Path::new("static/").join(file)).await.ok() } impl<'r> Responder<'r, 'r> for ApiResponse { fn respond_to(self, req: &Request) -> response::Result<'r> { Response::build_from(self.json.respond_to(req).unwrap()) .status(self.status) .header(ContentType::JSON) .ok() } } #[post("/auth", format = "json", data = "<body>")] async fn auth(body: Json<AuthBody>) -> ApiResponse { let env_variables = match get_env_vars() { Ok(res) => res, Err(e) => { return ApiResponse { status: Status::InternalServerError, json: json!(e.to_string()), } } }; if body.password != env_variables.app_password { return ApiResponse { status: Status::Forbidden, json: json!("Incorrect password".to_string()), }; } ApiResponse { status: Status::Ok, json: json!("Success".to_string()), } } #[post("/api", format = "json", data = "<body>")] async fn api(body: Json<RequestBody>) -> ApiResponse { let env_variables = match get_env_vars() { Ok(res) => res, Err(e) => { return ApiResponse { status: Status::InternalServerError, json: json!(e.to_string()), } } }; if body.password != env_variables.app_password { return ApiResponse { status: Status::Forbidden, json: json!("Incorrect password".to_string()), }; } println!("Start: {}\nEnd: {}", body.start, body.end); let start = DateTime::parse_from_rfc3339(&body.start) .unwrap() .with_timezone(&Utc); let end = DateTime::parse_from_rfc3339(&body.end) .unwrap() .with_timezone(&Utc); let mongodb_url = format!( "mongodb://{}:{}@{}:{}", env_variables.mongodb_username, env_variables.mongodb_password, env_variables.mongodb_host, env_variables.mongodb_port, ); let client = mongodb::Client::with_uri_str(&mongodb_url).await.unwrap(); let database = client.database("crypto-balance"); let available_snapshots = database::get_snapshots(&database, start, end).await; let needed_timespans = utils::get_timespans_to_retrieve(available_snapshots, start, end); if needed_timespans.is_empty() { let computed_snapshots = database::get_computed_snapshots(&database, start, end).await; let result = serde_json::to_value(&computed_snapshots).unwrap(); return ApiResponse { status: Status::Ok, json: result, }; } let split_by_30_days = utils::split_all_timespans_max_days(&needed_timespans, 30); let snapshots = match requests::get_all_snapshots( &env_variables, ACCOUNT_TYPE, 30, &split_by_30_days, ) .await { Ok(snapshots) => snapshots, Err(e) => { return ApiResponse { status: Status::InternalServerError, json: json!(e.to_string()), } } }; database::push_snapshots(&database, snapshots).await; let assets = database::get_possessed_assets(&database).await; let split_by_45_days = utils::split_all_timespans_max_days(&needed_timespans, 45); let price_history = match requests::get_all_history( &env_variables, &assets, &body.conversion, &split_by_45_days, ) .await { Ok(price_history) => price_history, Err(e) => { return ApiResponse { status: Status::InternalServerError, json: json!(e.to_string()), } } }; database::push_history(&database, price_history).await; let computed_snapshots = database::get_computed_snapshots(&database, start, end).await; let result = serde_json::to_value(&computed_snapshots).unwrap(); ApiResponse { status: Status::Ok, json: result, } } #[rocket::main] async fn main() { dotenv().ok(); let res = rocket::build() .mount("/", routes![index, auth, api, files]) .launch() .await; res.expect("An error occured while launching the rocket.") }
Ok(Environment { app_password, binance_key, binance_secret, nomics_key, mongodb_host, mongodb_port, mongodb_username, mongodb_password, })
call_expression
[ { "content": "pub fn make_aggregate_query(start: DateTime<Utc>, end: DateTime<Utc>) -> Vec<bson::Document> {\n\n vec![\n\n doc! {\n\n \"$match\": {\n\n \"time\": {\n\n \"$gte\": start,\n\n \"$lte\": end\n\n }\n\n }\n\n },\n\n doc! {\n\n \"$sort\": {\n\n \"time\": 1\n\n }\n\n },\n\n doc! {\n\n \"$lookup\": {\n\n \"from\": \"history\",\n\n \"localField\": \"time\",\n\n \"foreignField\": \"time\",\n", "file_path": "src/aggregate.rs", "rank": 0, "score": 158000.05639397242 }, { "content": "pub fn get_uri_escaped_datetime(datetime: DateTime<Utc>) -> String {\n\n let formatted = datetime.to_rfc3339_opts(SecondsFormat::Secs, true);\n\n formatted.replace(\":\", \"%3A\")\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 1, "score": 118203.96395617942 }, { "content": "pub fn split_timespan_max_days(timespan: &TimeSpan, max_days: i64) -> Vec<TimeSpan> {\n\n if (timespan.end - timespan.start).num_days() < max_days {\n\n return vec![TimeSpan {\n\n start: timespan.start,\n\n end: timespan.end,\n\n }];\n\n }\n\n\n\n let mut timespans: Vec<TimeSpan> = vec![];\n\n\n\n let mut current_start = timespan.start;\n\n let mut current_end = timespan.start + Duration::days(max_days - 1);\n\n\n\n while current_end < timespan.end {\n\n timespans.push(TimeSpan {\n\n start: current_start,\n\n end: current_end,\n\n });\n\n\n\n current_start = current_end + Duration::days(1);\n", "file_path": "src/utils.rs", "rank": 2, "score": 116763.03020513375 }, { "content": "pub fn split_all_timespans_max_days(timespans: &[TimeSpan], max_days: i64) -> Vec<TimeSpan> {\n\n let mut results: Vec<TimeSpan> = vec![];\n\n\n\n for timespan in timespans {\n\n let mut intermediate_results = split_timespan_max_days(timespan, max_days);\n\n results.append(&mut intermediate_results);\n\n }\n\n\n\n results\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 3, "score": 114074.72010590954 }, { "content": "pub fn get_missing_timespans(needed: TimeSpan, available: TimeSpan) -> Vec<TimeSpan> {\n\n assert!(needed.start <= needed.end);\n\n assert!(available.start <= available.end);\n\n\n\n if needed.start >= available.start && needed.end <= available.end {\n\n return vec![];\n\n }\n\n\n\n if needed.start >= available.end && needed.end >= available.end {\n\n let start = if needed.start == available.end {\n\n needed.start + Duration::days(1)\n\n } else {\n\n needed.start\n\n };\n\n\n\n return vec![TimeSpan { start, ..needed }];\n\n }\n\n\n\n if needed.start <= available.start && needed.end <= available.start {\n\n let end = if needed.end == available.start {\n", "file_path": "src/utils.rs", "rank": 4, "score": 112191.69485296324 }, { "content": "pub fn get_mac_sha256(data: &String, secret: &String) -> String {\n\n let mut mac = HmacSha256::new_from_slice(secret.as_bytes()).unwrap();\n\n mac.update(data.as_bytes());\n\n\n\n let hash_message = mac.finalize().into_bytes();\n\n hex::encode(&hash_message)\n\n}\n", "file_path": "src/utils.rs", "rank": 5, "score": 109825.60195761104 }, { "content": "pub fn get_timespans_to_retrieve(\n\n snapshots: Vec<database::Snapshot>,\n\n start: DateTime<Utc>,\n\n end: DateTime<Utc>,\n\n) -> Vec<TimeSpan> {\n\n if snapshots.is_empty() {\n\n println!(\"No available data.\");\n\n return vec![TimeSpan { start, end }];\n\n }\n\n\n\n let database_start: DateTime<Utc> = snapshots.first().unwrap().time;\n\n let database_end: DateTime<Utc> = snapshots.last().unwrap().time;\n\n println!(\n\n \"Database start: {}\\nDatabase end: {}\",\n\n database_start, database_end\n\n );\n\n\n\n let needed = TimeSpan { start, end };\n\n\n\n let available = TimeSpan {\n", "file_path": "src/utils.rs", "rank": 6, "score": 103312.70446023237 }, { "content": "fn compute_value(asset_identifier: &str, amount_identifier: &str) -> bson::Document {\n\n doc! {\n\n \"$mergeObjects\": [\n\n {\n\n \"asset\": asset_identifier,\n\n \"amount\": amount_identifier,\n\n },\n\n {\n\n \"$let\": {\n\n \"vars\": {\n\n \"found\": {\n\n \"$first\": {\n\n \"$filter\": {\n\n \"input\": \"$prices\",\n\n \"as\": \"price\",\n\n \"cond\": {\n\n \"$eq\": [\"$$price.asset\", asset_identifier]\n\n }\n\n }\n\n }\n", "file_path": "src/aggregate.rs", "rank": 7, "score": 93364.02799592202 }, { "content": "type HmacSha256 = Hmac<Sha256>;\n\n\n", "file_path": "src/utils.rs", "rank": 8, "score": 60458.56674866275 }, { "content": "pub mod binance;\n\npub mod database;\n\npub mod nomics;", "file_path": "src/model/mod.rs", "rank": 10, "score": 45896.160397677246 }, { "content": "use chrono::{DateTime, Utc};\n\nuse mongodb::bson;\n\nuse mongodb::bson::doc;\n\n\n", "file_path": "src/aggregate.rs", "rank": 12, "score": 24032.186250832958 }, { "content": " \"as\": \"prices\"\n\n }\n\n },\n\n doc! {\n\n \"$project\": {\n\n \"time\": 1,\n\n \"total_asset_of_btc\": compute_value(\"BTC\", \"$total_asset_of_btc\"),\n\n \"balances\": {\n\n \"$map\": {\n\n \"input\": \"$balances\",\n\n \"as\": \"balance\",\n\n \"in\": compute_value(\"$$balance.asset\", \"$$balance.amount\")\n\n }\n\n }\n\n }\n\n },\n\n ]\n\n}\n\n\n", "file_path": "src/aggregate.rs", "rank": 13, "score": 24024.61549195697 }, { "content": " }\n\n },\n\n \"in\": {\n\n \"price\": \"$$found.price\",\n\n \"value\": { \"$multiply\": [amount_identifier, \"$$found.price\"] }\n\n }\n\n }\n\n }\n\n ]\n\n }\n\n}\n", "file_path": "src/aggregate.rs", "rank": 14, "score": 24024.484517151246 }, { "content": "use crate::model::{binance, database, nomics};\n\nuse crate::utils;\n\nuse crate::Environment;\n\nuse crate::TimeSpan;\n\nuse crate::{BINANCE_API_BASE_URL, NOMICS_API_BASE_URL};\n\nuse std::error::Error;\n\n\n\nuse chrono::{DateTime, Duration, TimeZone, Utc};\n\nuse reqwest::Client;\n\nuse std::time::SystemTime;\n\n\n\npub async fn get_all_snapshots(\n\n auth: &Environment,\n\n account_type: &str,\n\n limit: u8,\n\n timespans: &[TimeSpan],\n\n) -> Result<Vec<database::Snapshot>, Box<dyn Error>> {\n\n let mut snapshots = Vec::new();\n\n\n\n for timespan in timespans {\n", "file_path": "src/requests.rs", "rank": 15, "score": 23451.096681232222 }, { "content": " let mut snapshots = Vec::new();\n\n\n\n for timespan in timespans {\n\n let mut intermediate_results =\n\n match get_history(auth, ids, convert, timespan.start, timespan.end).await {\n\n Ok(intermediate_results) => intermediate_results,\n\n Err(e) => return Err(e),\n\n };\n\n\n\n snapshots.append(&mut intermediate_results);\n\n }\n\n\n\n Ok(snapshots)\n\n}\n\n\n\nasync fn get_snapshots(\n\n auth: &Environment,\n\n account_type: &str,\n\n limit: u8,\n\n start: DateTime<Utc>,\n", "file_path": "src/requests.rs", "rank": 16, "score": 23442.749906973295 }, { "content": " let intermediate_results =\n\n get_snapshots(auth, account_type, limit, timespan.start, timespan.end).await;\n\n\n\n let mut intermediate_results = match intermediate_results {\n\n Ok(intermediate_results) => intermediate_results,\n\n Err(e) => return Err(e),\n\n };\n\n\n\n snapshots.append(&mut intermediate_results);\n\n }\n\n\n\n Ok(snapshots)\n\n}\n\n\n\npub async fn get_all_history(\n\n auth: &Environment,\n\n ids: &[String],\n\n convert: &str,\n\n timespans: &[TimeSpan],\n\n) -> Result<Vec<database::CurrencyHistory>, Box<dyn Error>> {\n", "file_path": "src/requests.rs", "rank": 17, "score": 23442.69126325969 }, { "content": " convert: &str,\n\n start: DateTime<Utc>,\n\n end: DateTime<Utc>,\n\n) -> Result<Vec<database::CurrencyHistory>, Box<dyn Error>> {\n\n let client = Client::new();\n\n\n\n println!(\"Call Nomics API (start: {}, end: {})\", start, end);\n\n\n\n let params = format!(\n\n \"ids={}&convert={}&start={}&end={}\",\n\n ids.join(\",\"),\n\n convert,\n\n utils::get_uri_escaped_datetime(start),\n\n utils::get_uri_escaped_datetime(end),\n\n );\n\n\n\n let url = format!(\"{}?key={}&{}\", NOMICS_API_BASE_URL, auth.nomics_key, params);\n\n let res = client.get(url).send().await?;\n\n\n\n let json = match res.text().await {\n", "file_path": "src/requests.rs", "rank": 18, "score": 23441.961359993464 }, { "content": " .send()\n\n .await?;\n\n\n\n let json = match res.text().await {\n\n Ok(res) => res,\n\n Err(e) => return Err(Box::new(e)),\n\n };\n\n\n\n let obj = match serde_json::from_str::<binance::RootObject>(&json) {\n\n Ok(obj) => obj,\n\n Err(e) => return Err(Box::new(e)),\n\n };\n\n\n\n let snapshots: Vec<database::Snapshot> = obj\n\n .snapshots\n\n .iter()\n\n .map(|snapshot| database::Snapshot {\n\n time: chrono::Utc.timestamp_millis(snapshot.update_time) + Duration::seconds(1),\n\n balances: snapshot\n\n .data\n", "file_path": "src/requests.rs", "rank": 19, "score": 23441.320704387133 }, { "content": " end: DateTime<Utc>,\n\n) -> Result<Vec<database::Snapshot>, Box<dyn Error>> {\n\n let client = Client::new();\n\n\n\n let now = SystemTime::now()\n\n .duration_since(SystemTime::UNIX_EPOCH)\n\n .unwrap()\n\n .as_millis();\n\n\n\n let shifted_start = if start == end {\n\n start - Duration::days(1)\n\n } else {\n\n start - Duration::seconds(1)\n\n };\n\n\n\n let shifted_end = end - Duration::seconds(1);\n\n\n\n println!(\n\n \"Call Binance API (start: {}, end: {})\",\n\n shifted_start, shifted_end\n", "file_path": "src/requests.rs", "rank": 20, "score": 23440.19139354299 }, { "content": " Ok(res) => res,\n\n Err(e) => return Err(Box::new(e)),\n\n };\n\n\n\n let obj = match serde_json::from_str::<Vec<nomics::Sparkline>>(&json) {\n\n Ok(obj) => obj,\n\n Err(e) => return Err(Box::new(e)),\n\n };\n\n\n\n let history: Vec<database::CurrencyHistory> = obj\n\n .iter()\n\n .map(|history| {\n\n history\n\n .timestamps\n\n .iter()\n\n .enumerate()\n\n .map(|(i, timestamp)| database::CurrencyHistory {\n\n asset: history.currency.to_owned(),\n\n time: DateTime::parse_from_rfc3339(timestamp)\n\n .unwrap()\n", "file_path": "src/requests.rs", "rank": 21, "score": 23439.112356019945 }, { "content": " );\n\n\n\n let params = format!(\n\n \"type={}&limit={}&timestamp={}&startTime={}&endTime={}\",\n\n account_type,\n\n limit,\n\n now,\n\n shifted_start.timestamp_millis(),\n\n shifted_end.timestamp_millis(),\n\n );\n\n\n\n let signature = utils::get_mac_sha256(&params, &auth.binance_secret);\n\n\n\n let url = format!(\n\n \"{}?{}&signature={}\",\n\n BINANCE_API_BASE_URL, params, signature\n\n );\n\n let res = client\n\n .get(url)\n\n .header(\"X-MBX-APIKEY\", auth.binance_key.to_owned())\n", "file_path": "src/requests.rs", "rank": 22, "score": 23437.563368993164 }, { "content": " .with_timezone(&Utc),\n\n price: history.prices[i].parse::<f32>().unwrap(),\n\n })\n\n .collect::<Vec<database::CurrencyHistory>>()\n\n })\n\n .flatten()\n\n .collect();\n\n\n\n println!(\"Got {} currencies history.\", history.len());\n\n\n\n Ok(history)\n\n}\n", "file_path": "src/requests.rs", "rank": 23, "score": 23436.377991114736 }, { "content": " .balances\n\n .iter()\n\n .filter(|balance| balance.free.parse::<f32>().unwrap() > 0.)\n\n .map(|balance| database::Balance {\n\n asset: balance.asset.to_owned(),\n\n amount: balance.free.parse::<f32>().unwrap(),\n\n })\n\n .collect(),\n\n total_asset_of_btc: snapshot.data.total_asset_of_btc.parse::<f32>().unwrap(),\n\n })\n\n .collect();\n\n\n\n println!(\"Got {} snapshots.\", snapshots.len());\n\n\n\n Ok(snapshots)\n\n}\n\n\n\nasync fn get_history(\n\n auth: &Environment,\n\n ids: &[String],\n", "file_path": "src/requests.rs", "rank": 24, "score": 23435.589795824322 }, { "content": "use crate::model::database;\n\nuse crate::TimeSpan;\n\n\n\nuse chrono::{DateTime, Duration, SecondsFormat, Utc};\n\n\n\nuse hmac::{Hmac, Mac, NewMac};\n\nuse sha2::Sha256;\n\n\n", "file_path": "src/utils.rs", "rank": 25, "score": 23323.62139954478 }, { "content": " current_end = current_start + Duration::days(max_days - 1);\n\n }\n\n\n\n timespans.push(TimeSpan {\n\n start: current_start,\n\n end: timespan.end,\n\n });\n\n\n\n timespans\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 26, "score": 23316.039437298794 }, { "content": " start: database_start,\n\n end: database_end,\n\n };\n\n\n\n let missing = get_missing_timespans(needed, available);\n\n\n\n println!(\"Missing: {:?}\", missing);\n\n\n\n missing\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 27, "score": 23315.148905314923 }, { "content": " needed.end - Duration::days(1)\n\n } else {\n\n needed.end\n\n };\n\n\n\n return vec![TimeSpan { end, ..needed }];\n\n }\n\n\n\n if needed.start <= available.start && needed.end <= available.end {\n\n let end = available.start - Duration::days(1);\n\n return vec![TimeSpan { end, ..needed }];\n\n }\n\n\n\n if needed.start >= available.start && needed.end >= available.end {\n\n let start = available.end + Duration::days(1);\n\n return vec![TimeSpan { start, ..needed }];\n\n }\n\n\n\n if needed.start <= available.start && needed.end >= available.end {\n\n let end = available.start - Duration::days(1);\n\n let start = available.end + Duration::days(1);\n\n return vec![TimeSpan { end, ..needed }, TimeSpan { start, ..needed }];\n\n }\n\n\n\n panic!(\"Unsupported case!\");\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 28, "score": 23313.428891407275 }, { "content": "use bson::serde_helpers::chrono_datetime_as_bson_datetime;\n\nuse chrono::{DateTime, Utc};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/* Collection: snapshots */\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Snapshot {\n\n #[serde(with = \"chrono_datetime_as_bson_datetime\")]\n\n pub time: DateTime<Utc>,\n\n pub total_asset_of_btc: f32,\n\n pub balances: Vec<Balance>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Balance {\n\n pub asset: String,\n\n pub amount: f32,\n\n}\n\n\n", "file_path": "src/model/database.rs", "rank": 29, "score": 22693.528860520615 }, { "content": "/* Collection: history */\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct CurrencyHistory {\n\n #[serde(with = \"chrono_datetime_as_bson_datetime\")]\n\n pub time: DateTime<Utc>,\n\n pub asset: String,\n\n pub price: f32,\n\n}\n\n\n\n/* Aggregation results */\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct ComputedSnapshot {\n\n #[serde(with = \"chrono_datetime_as_bson_datetime\")]\n\n pub time: DateTime<Utc>,\n\n pub total_asset_of_btc: ComputedBalance,\n\n pub balances: Vec<ComputedBalance>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct ComputedBalance {\n\n pub asset: String,\n\n pub amount: f32,\n\n pub price: Option<f32>,\n\n pub value: Option<f32>,\n\n}\n", "file_path": "src/model/database.rs", "rank": 30, "score": 22692.081352810084 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Sparkline {\n\n pub currency: String,\n\n pub timestamps: Vec<String>,\n\n pub prices: Vec<String>,\n\n}", "file_path": "src/model/nomics.rs", "rank": 31, "score": 22691.99335891715 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct RootObject {\n\n pub code: i64,\n\n pub msg: String,\n\n #[serde(rename = \"snapshotVos\")]\n\n pub snapshots: Vec<Snapshot>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Snapshot {\n\n #[serde(rename = \"type\")]\n\n pub account_type: String,\n\n #[serde(rename = \"updateTime\")]\n\n pub update_time: i64,\n\n pub data: Data,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n", "file_path": "src/model/binance.rs", "rank": 32, "score": 22690.276977097365 }, { "content": "pub struct Data {\n\n #[serde(rename = \"totalAssetOfBtc\")]\n\n pub total_asset_of_btc: String,\n\n pub balances: Vec<Balance>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Balance {\n\n pub asset: String,\n\n pub free: String,\n\n pub locked: String,\n\n}\n", "file_path": "src/model/binance.rs", "rank": 33, "score": 22687.582353059675 }, { "content": "use crate::aggregate::make_aggregate_query;\n\nuse crate::model::database;\n\nuse crate::rocket::futures::StreamExt;\n\n\n\nuse bson::Bson;\n\nuse bson::Document;\n\nuse chrono::{DateTime, Utc};\n\nuse mongodb::bson::doc;\n\nuse mongodb::options::FindOptions;\n\nuse mongodb::Collection;\n\nuse mongodb::Database;\n\nuse std::iter::Iterator;\n\n\n\npub async fn get_snapshots(\n\n database: &Database,\n\n start: DateTime<Utc>,\n\n end: DateTime<Utc>,\n\n) -> Vec<database::Snapshot> {\n\n let collection: Collection<Document> = database.collection(\"snapshots\");\n\n\n", "file_path": "src/database.rs", "rank": 37, "score": 17.972965032584852 }, { "content": " .iter()\n\n .map(|history| bson::ser::to_document(history).unwrap())\n\n .collect();\n\n\n\n let res = collection.insert_many(docs, None).await;\n\n res.expect(\"Could not insert the history.\");\n\n}\n\n\n\npub async fn get_computed_snapshots(\n\n database: &Database,\n\n start: DateTime<Utc>,\n\n end: DateTime<Utc>,\n\n) -> Vec<database::ComputedSnapshot> {\n\n let collection: Collection<Document> = database.collection(\"snapshots\");\n\n\n\n let computed_snapshots: Vec<database::ComputedSnapshot> = collection\n\n .aggregate(make_aggregate_query(start, end), None)\n\n .await\n\n .unwrap()\n\n .collect::<Vec<_>>()\n\n .await\n\n .into_iter()\n\n .flatten()\n\n .map(|document| bson::from_bson(Bson::Document(document)).unwrap())\n\n .collect();\n\n\n\n computed_snapshots\n\n}\n", "file_path": "src/database.rs", "rank": 42, "score": 7.621164688374066 }, { "content": "### Why Binance?\n\n\n\nBecause that's what I use personally! 😅\n\n\n\nMaximum time range for one request: 30 days.\n\n\n\nGo [here](https://binance.zendesk.com/hc/en-us/articles/360002502072-How-to-create-API) to know how to create a Binance API key.\n\n\n\n### Why Nomics?\n\n\n\nIt's the only API I found which can give an history anywhere in time **for free**. And the free plan accepts 1 request per second, which is reasonable.\n\n\n\nMaximum time range for one request: 45 days.\n\n\n\nGo [here](https://p.nomics.com/cryptocurrency-bitcoin-api) to know how to create a Nomics API key.\n\n\n\n### How it works?\n\n\n\n- Receives an API request from the frontend\n\n- Gets data stored in the MongoDB database\n\n- Computes the time range of this data\n\n- Computes the needed timespans to fill in the blanks (returns vector of timespans)\n\n- If the vector is empty\n\n - It's up to date\n\n - Aggregates data (wallet snapshots with price history) and returns as JSON\n\n- If the vector contains 1 (before or after) or 2 (before and after) timespans\n\n - Split the timespans in multiple timespans of N days maximum (because of API's time range limit)\n\n - Do as many API requests as timespans\n\n - Upload the results to the database (the time is a primary key)\n\n - Aggregates data (wallet snapshots with price history) and returns as JSON\n\n\n\n## Frontend\n\n\n\n- Built with React\n\n- Charts are made with [Recharts](https://recharts.org/en-US/)\n\n- Built with [esbuild](https://esbuild.github.io/)\n", "file_path": "README.md", "rank": 43, "score": 7.004629981581419 }, { "content": " .collect()\n\n}\n\n\n\npub async fn push_snapshots(database: &Database, snapshots: Vec<database::Snapshot>) {\n\n let collection: Collection<Document> = database.collection(\"snapshots\");\n\n\n\n let docs: Vec<bson::Document> = snapshots\n\n .iter()\n\n .map(|history| bson::ser::to_document(history).unwrap())\n\n .collect();\n\n\n\n let res = collection.insert_many(docs, None).await;\n\n res.expect(\"Could not insert the snapshots.\");\n\n}\n\n\n\npub async fn get_possessed_assets(database: &Database) -> Vec<String> {\n\n let collection: Collection<Document> = database.collection(\"snapshots\");\n\n\n\n let mut assets: Vec<String> = collection\n\n .distinct(\"balances.asset\", None, None)\n", "file_path": "src/database.rs", "rank": 47, "score": 5.330570423693241 }, { "content": " .await\n\n .unwrap()\n\n .iter()\n\n .map(|document| bson::from_bson(document.to_owned()).unwrap())\n\n .collect();\n\n\n\n let bitcoin_asset = \"BTC\".to_string();\n\n\n\n if !assets.contains(&bitcoin_asset) {\n\n assets.push(bitcoin_asset);\n\n }\n\n\n\n assets.sort_unstable();\n\n assets\n\n}\n\n\n\npub async fn push_history(database: &Database, price_history: Vec<database::CurrencyHistory>) {\n\n let collection: Collection<Document> = database.collection(\"history\");\n\n\n\n let docs: Vec<bson::Document> = price_history\n", "file_path": "src/database.rs", "rank": 49, "score": 4.217521003353917 }, { "content": " // Sort with older first.\n\n let find_options = FindOptions::builder().sort(doc! {\"time\": 1}).build();\n\n\n\n collection\n\n .find(\n\n doc! {\n\n \"time\": {\n\n \"$gte\": start,\n\n \"$lte\": end,\n\n }\n\n },\n\n find_options,\n\n )\n\n .await\n\n .unwrap()\n\n .collect::<Vec<_>>()\n\n .await\n\n .into_iter()\n\n .flatten()\n\n .map(|document| bson::from_bson(Bson::Document(document)).unwrap())\n", "file_path": "src/database.rs", "rank": 50, "score": 2.9187549324312165 }, { "content": "# Crypto Balance\n\n\n\nA web application made with Rust and React.\n\n\n\nYou can see an area chart with all your cryptocurrencies in the same chart. And a **full pie chart** whereas the one Binance offers groups every little amount of crypto in a single \"Others\" section...\n\n\n\n![Demo of the frontend](demo.gif)\n\n\n\n## Usage\n\n\n\nYou can use this web application with docker-compose.\n\n\n\nEdit the `.env` file at the root of the project with your personal API keys.\n\n\n\nOnce done, simply do the following:\n\n\n\n```bash\n\ndocker-compose build\n\ndocker-compose up -d\n\n```\n\n\n\nThis will start `mongo` (the database), `mongo-express` (a web interface to manage the database) and `crypto-balance`.\n\n\n\nYou can access:\n\n\n\n- The web app via http://127.0.0.1/\n\n- The MongoDB web interface via http://127.0.0.1:8081/\n\n- The database via the port 27017\n\n\n\n**Note:** You might need to build with either `DOCKER_BUILDKIT=1` or `COMPOSE_DOCKER_CLI_BUILD=1` depending on your docker-compose version. Upgrade to at least v2.0.0 to build without any of these flags.\n\n\n\n## Backend\n\n\n\nThe backend (in Rust) gets daily snapshots of your wallets and the price history for each cryptocurrency you have in your wallets.\n\n\n\nUsed APIs:\n\n\n\n- Binance API for the wallets snapshots (with the [Daily Account Snapshot](https://binance-docs.github.io/apidocs/spot/en/#daily-account-snapshot-user_data) endpoint)\n\n- Nomics API for the price history (with the [Currencies Sparkline](https://nomics.com/docs/#operation/getCurrenciesSparkline) endpoint)\n\n\n\nThe Binance API **does not give the fiat amount** corresponding to each cryptocurrency you have, hence the need of the Nomics API to get the price history.\n\n\n\nA MongoDB database is used to store this data as NoSQL. So when we ask for data in a time range, only the missing data is requested to APIs. The database contains **daily** data points and it is normalized to be at midnight (00:00) for each day.\n\n\n\n- Binance API saves snapshots each day at 23:59 (stored in MongoDB as +1 minute, which is the next day)\n\n- Nomics API gives a price history with data points at 00:00 (stored in MongoDB as is)\n\n\n", "file_path": "README.md", "rank": 51, "score": 2.6615320672727893 } ]
Rust
src/tests/unit.rs
asomers/async-weighted-semaphore
2085026ab5f4a5b9b6bc8a724e7cdca75a212c32
use std::sync::{Arc, Barrier, Mutex}; use rand::{thread_rng, Rng, SeedableRng}; use std::sync::atomic::{AtomicIsize, AtomicBool}; use std::sync::atomic::Ordering::Relaxed; use std::task::{Context, Waker, Poll}; use std::future::Future; use std::{mem, thread, fmt}; use futures_test::task::{AwokenCount, new_count_waker}; use std::pin::Pin; use futures_test::std_reexport::panic::catch_unwind; use futures_test::std_reexport::collections::BTreeMap; use rand_xorshift::XorShiftRng; use std::fmt::Debug; use futures_test::futures_core_reexport::core_reexport::fmt::Formatter; use crate::{Semaphore, AcquireFuture, PoisonError, SemaphoreGuard}; struct TestFuture<'a> { waker: Waker, count: AwokenCount, old_count: usize, inner: Pin<Box<AcquireFuture<'a>>>, amount: usize, } impl<'a> Debug for TestFuture<'a> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("TF") .field("w", &(self.count.get() != self.old_count)) .field("p", &(self.inner.as_ref().get_ref() as *const AcquireFuture)) .field("i", &self.inner) .finish() } } impl<'a> TestFuture<'a> { fn new(sem: &'a Semaphore, amount: usize) -> Self { let (waker, count) = new_count_waker(); TestFuture { waker, count, old_count: 0, inner: Box::pin(sem.acquire(amount)), amount } } fn count(&self) -> usize { self.count.get() } fn poll(&mut self) -> Option<Result<SemaphoreGuard<'a>, PoisonError>> { match self.inner.as_mut().poll(&mut Context::from_waker(&self.waker)) { Poll::Pending => None, Poll::Ready(x) => Some(x) } } fn poll_if_woken(&mut self) -> Option<Result<SemaphoreGuard<'a>, PoisonError>> { let count = self.count.get(); if self.old_count != count { self.old_count = count; self.poll() } else { None } } fn into_inner(self) -> Pin<Box<AcquireFuture<'a>>> { self.inner } } #[test] fn test_simple() { let semaphore = Semaphore::new(1); let mut a1 = TestFuture::new(&semaphore, 1); let g1 = a1.poll().unwrap().unwrap(); let mut a2 = TestFuture::new(&semaphore, 1); assert_eq!(a1.count(), 0); assert_eq!(a2.count(), 0); assert!(a2.poll().is_none()); mem::drop(g1); assert_eq!(a1.count(), 0); assert_eq!(a2.count(), 1); assert!(a2.poll().is_some()); } #[test] fn test_zero_now() { let semaphore = Semaphore::new(1); let mut a1 = TestFuture::new(&semaphore, 0); let g1 = a1.poll().unwrap().unwrap(); assert_eq!(a1.count(), 0); mem::drop(g1); } #[test] fn test_zero_pending() { let semaphore = Semaphore::new(0); println!("A {:?}", semaphore); let mut a1 = TestFuture::new(&semaphore, 1); println!("B {:?}", semaphore); assert!(a1.poll().is_none()); println!("C {:?}", semaphore); let mut a2 = TestFuture::new(&semaphore, 0); let _g2 = a2.poll(); assert!(a2.poll().is_none()); assert_eq!(a1.count(), 0); assert_eq!(a2.count(), 0); mem::drop(a1); assert_eq!(a2.count(), 1); assert!(a2.poll().is_some()); } #[test] fn test_cancel() { let semaphore = Semaphore::new(1); let mut a1 = TestFuture::new(&semaphore, 2); assert!(a1.poll().is_none()); let mut a2 = TestFuture::new(&semaphore, 1); assert!(a2.poll().is_none()); assert_eq!(a1.count(), 0); assert_eq!(a2.count(), 0); mem::drop(a1); assert_eq!(a2.count(), 1); assert!(a2.poll().is_some()); } #[test] fn test_leak() { let semaphore = Semaphore::new(1); let mut a1 = TestFuture::new(&semaphore, 2); assert!(a1.poll().is_none()); lazy_static! { static ref SUPPRESS: Mutex<usize> = Mutex::new(0); } unsafe { *SUPPRESS.lock().unwrap() = Box::into_raw(Pin::into_inner_unchecked(a1.into_inner())) as usize; } mem::drop(semaphore); } #[test] fn test_poison_panic() { let semaphore = Semaphore::new(1); assert!(catch_unwind( || { let _guard = TestFuture::new(&semaphore, 1).poll().unwrap().unwrap(); panic!("Expected panic"); } ).is_err()); TestFuture::new(&semaphore, 2).poll().unwrap().err().unwrap(); } #[test] fn test_poison_new() { let semaphore = Semaphore::new(usize::MAX); TestFuture::new(&semaphore, 2).poll().unwrap().err().unwrap(); } #[test] fn test_poison_release_immediate() { let semaphore = Semaphore::new(0); semaphore.release(usize::MAX); TestFuture::new(&semaphore, 2).poll().unwrap().err().unwrap(); } #[test] fn test_poison_release_add() { let semaphore = Semaphore::new(0); semaphore.release(Semaphore::MAX_AVAILABLE / 2); semaphore.release(Semaphore::MAX_AVAILABLE / 2 + 2); TestFuture::new(&semaphore, 2).poll().unwrap().err().unwrap(); } #[test] fn test_poison_release_concurrent() { let semaphore = Semaphore::new(0); let mut future = TestFuture::new(&semaphore, 2); assert!(future.poll().is_none()); semaphore.release(usize::MAX); future.poll_if_woken().expect("done").err().expect("AcquireError"); } #[test] fn test_sequential() { let semaphore = Semaphore::new(0); let mut time = 0; let mut available = 0usize; let mut futures = BTreeMap::<usize, TestFuture>::new(); let mut rng = XorShiftRng::seed_from_u64(954360855); for _ in 0..100000 { if rng.gen_bool(0.1) && futures.len() < 5 { let amount = rng.gen_range(0, 10); let mut fut = TestFuture::new(&semaphore, amount); if let Some(guard) = fut.poll() { guard.unwrap().forget(); available = available.checked_sub(amount).unwrap(); } else { futures.insert(time, fut); } time += 1; } else if rng.gen_bool(0.1) { let mut blocked = false; let mut ready = vec![]; for (time, fut) in futures.iter_mut() { if rng.gen_bool(0.5) { if let Some(guard) = fut.poll_if_woken() { assert!(!blocked); guard.unwrap().forget(); available = available.checked_sub(fut.amount).unwrap(); ready.push(*time); } else { blocked = true; } } } for time in ready { futures.remove(&time); } } else if rng.gen_bool(0.1) && available < 30 { let amount = rng.gen_range(0, 10); available = available.checked_add(amount).unwrap(); semaphore.release(amount); } } } #[test] fn test_parallel() { for i in 0..1000 { println!("iteration {:?}", i); test_parallel_impl(); } } fn test_parallel_impl() { let threads = 10; let semaphore = Arc::new(Semaphore::new(0)); let resource = Arc::new(AtomicIsize::new(0)); let barrier = Arc::new(Barrier::new(threads)); let poisoned = Arc::new(AtomicBool::new(false)); let pending_max = Arc::new(AtomicIsize::new(-1)); (0..threads).map(|index| thread::Builder::new().name(format!("test_parallel_impl_{}", index)).spawn({ let semaphore = semaphore.clone(); let resource = resource.clone(); let barrier = barrier.clone(); let poisoned = poisoned.clone(); let pending_max = pending_max.clone(); move || { let mut time = 0; let mut futures = BTreeMap::<usize, TestFuture>::new(); let on_guard = |guard: Result<SemaphoreGuard, PoisonError>| { match guard { Err(PoisonError) => {} Ok(guard) => { let amount = guard.forget() as isize; resource.fetch_sub(amount, Relaxed).checked_sub(amount).unwrap(); } } }; for _ in 0.. { if thread_rng().gen_bool(0.1) { if futures.len() < 5 { let amount = thread_rng().gen_range(0, 10); let mut fut = TestFuture::new(&semaphore, amount); match fut.poll() { None => { futures.insert(time, fut); } Some(guard) => on_guard(guard), } time += 1; } } if thread_rng().gen_bool(0.001) { if barrier.wait().is_leader() { pending_max.store(-1, Relaxed); } let was_poisoned = poisoned.load(Relaxed); let mut ready = vec![]; for (time, fut) in futures.iter_mut() { if let Some(guard) = fut.poll_if_woken() { on_guard(guard); ready.push(*time); } } for time in ready { futures.remove(&time); } barrier.wait(); print!("{:?} ", futures.len()); if let Some(front) = futures.values_mut().next() { pending_max.fetch_max(front.amount as isize, Relaxed); } let leader = barrier.wait(); let pending_amount = pending_max.load(Relaxed); if pending_amount >= 0 && pending_amount <= resource.load(Relaxed) { for (time, fut) in futures.iter_mut() { println!("{:?} {:?}", time, fut); } if leader.is_leader() { println!("{:?}", semaphore); panic!("Should have acquired. {:?} of {:?}", pending_amount, resource.load(Relaxed)); } } if barrier.wait().is_leader() { println!(); } if was_poisoned { return; } } if thread_rng().gen_bool(0.1) { let mut ready = vec![]; for (time, fut) in futures.iter_mut().rev() { if thread_rng().gen_bool(0.5) { let guard = if time & 1 == 1 && thread_rng().gen_bool(0.5) { fut.poll() } else { fut.poll_if_woken() }; if let Some(guard) = guard { on_guard(guard); ready.push(*time); } else if !ready.is_empty() { println!("{:?}", semaphore); for (time, fut) in futures.iter_mut() { println!("{:?} {:?}", time, fut); } panic!(); } } } for time in ready { futures.remove(&time); } } if thread_rng().gen_bool(0.1) { let mut cancelled = vec![]; for (time, _) in futures.iter_mut().rev() { if time & 2 == 2 && thread_rng().gen_bool(0.5) { cancelled.push(*time); } } for time in cancelled { futures.remove(&time); } } if thread_rng().gen_bool(0.1) && resource.load(Relaxed) < 20 { let amount = thread_rng().gen_range(0, 20); resource.fetch_add(amount as isize, Relaxed); semaphore.release(amount); } if thread_rng().gen_bool(0.1) { if let Ok(guard) = semaphore.try_acquire(thread_rng().gen_range(0, 10)) { on_guard(Ok(guard)); } } if thread_rng().gen_bool(0.0001) { poisoned.store(true, Relaxed); semaphore.poison(); } } } }).unwrap()).collect::<Vec<_>>().into_iter().for_each(|x| x.join().unwrap()); }
use std::sync::{Arc, Barrier, Mutex}; use rand::{thread_rng, Rng, SeedableRng}; use std::sync::atomic::{AtomicIsize, AtomicBool}; use std::sync::atomic::Ordering::Relaxed; use std::task::{Context, Waker, Poll}; use std::future::Future; use std::{mem, thread, fmt}; use futures_test::task::{AwokenCount, new_count_waker}; use std::pin::Pin; use futures_test::std_reexport::panic::catch_unwind; use futures_test::std_reexport::collections::BTreeMap; use rand_xorshift::XorShiftRng; use std::fmt::Debug; use futures_test::futures_core_reexport::core_reexport::fmt::Formatter; use crate::{Semaphore, AcquireFuture, PoisonError, SemaphoreGuard}; struct TestFuture<'a> { waker: Waker, count: AwokenCount, old_count: usize, inner: Pin<Box<AcquireFuture<'a>>>, amount: usize, } impl<'a> Debug for TestFuture<'a> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("TF") .field("w", &(self.count.get() != self.old_count)) .field("p", &(self.inner.as_ref().get_ref() as *const AcquireFuture)) .field("i", &self.inner) .finish() } } impl<'a> TestFuture<'a> { fn new(sem: &'a Semaphore, amount: usize) -> Self { let (waker, count) = new_count_waker(); TestFuture { waker, count, old_count: 0, inner: Box::pin(sem.acquire(amount)), amount } } fn count(&self) -> usize { self.count.get() } fn poll(&mut self) -> Option<Result<SemaphoreGuard<'a>, PoisonError>> { match self.inner.as_mut().poll(&mut Context::from_waker(&self.waker)) { Poll::Pending => None, Poll::Ready(x) => Some(x) } } fn poll_if_woken(&mut self) -> Option<Result<SemaphoreGuard<'a>, PoisonError>> { let count = self.count.get(); if self.old_count != count { self.old_count = count; self.poll() } else { None } } fn into_inner(self) -> Pin<Box<AcquireFuture<'a>>> { self.inner } } #[test] fn test_simple() { let semaphore = Semaphore::new(1); let mut a1 = TestFuture::new(&semaphore, 1); let g1 = a1.poll().unwrap().unwrap(); let mut a2 = TestFuture::new(&semaphore, 1); assert_eq!(a1.count(), 0); assert_eq!(a2.count(), 0); assert!(a2.poll().is_none()); mem::drop(g1); assert_eq!(a1.count(), 0); assert_eq!(a2.count(), 1); assert!(a2.poll().is_some()); } #[test] fn test_zero_now() { let semaphore = Semaphore::new(1); let mut a1 = TestFuture::new(&semaphore, 0); let g1 = a1.poll().unwrap().unwrap(); assert_eq!(a1.count(), 0); mem::drop(g1); } #[test] fn test_zero_pending() { let semaphore = Semaphore::new(0); println!("A {:?}", semaphore); let mut a1 = TestFuture::new(&semaphore, 1); println!("B {:?}", semaphore); assert!(a1.poll().is_none()); println!("C {:?}", semaphore); let mut a2 = TestFuture::new(&semaphore, 0); let _g2 = a2.poll(); assert!(a2.poll().is_none()); assert_eq!(a1.count(), 0); assert_eq!(a2.count(), 0); mem::drop(a1); assert_eq!(a2.count(), 1); assert!(a2.poll().is_some()); } #[test] fn test_cancel() { let semaphore = Semaphore::new(1); let mut a1 = TestFuture::new(&semaphore, 2); assert!(a1.poll().is_none()); let mut a2 = TestFuture::new(&semaphore, 1); assert!(a2.poll().is_none()); assert_eq!(a1.count(), 0); assert_eq!(a2.count(), 0); mem::drop(a1); assert_eq!(a2.count(), 1); assert!(a2.poll().is_some()); } #[test] fn test_leak() { let semaphore = Semaphore::new(1); let mut a1 = TestFuture::new(&semaphore, 2); assert!(a1.poll().is_none()); lazy_static! { static ref SUPPRESS: Mutex<usize> = Mutex::new(0); } unsafe { *SUPPRESS.lock().unwrap() = Box::into_raw(Pin::into_inner_unchecked(a1.into_inner())) as usize; } mem::drop(semaphore); } #[test] fn test_poison_panic() { let semaphore = Semaphore::new(1); assert!(catch_unwind( || { let _guard = TestFuture::new(&semaphore, 1).poll().unwrap().unwrap(); panic!("Expected panic"); } ).is_err()); TestFuture::new(&semaphore, 2).poll().unwrap().err().unwrap(); } #[test] fn test_poison_new() { let semaphore = Semaphore::new(usize::MAX); TestFuture::new(&semaphore, 2).poll().unwrap().err().unwrap(); } #[test] fn test_poison_release_immediate() { let semaphore = Semaphore::new(0); semaphore.release(usize::MAX); TestFuture::new(&semaphore, 2).poll().unwrap().err().unwrap(); } #[test] fn test_poison_release_add() { let semaphore = Semaphore::new(0); semaphore.release(Semaphore::MAX_AVAILABLE / 2); semaphore.release(Semaphore::MAX_AVAILABLE / 2 + 2); TestFuture::new(&semaphore, 2).poll().unwrap().err().unwrap(); } #[test] fn test_poison_release_concurrent() { let semaphore = Semaphore::new(0); let mut future = TestFuture::new(&semaphore, 2); assert!(future.poll().is_none()); semaphore.release(usize::MAX); future.poll_if_woken().expect("done").err().expect("AcquireError"); } #[test] fn test_sequential() { let semaphore = Semaphore::new(0); let mut time = 0; let mut available = 0usize; let mut futures = BTreeMap::<usi
#[test] fn test_parallel() { for i in 0..1000 { println!("iteration {:?}", i); test_parallel_impl(); } } fn test_parallel_impl() { let threads = 10; let semaphore = Arc::new(Semaphore::new(0)); let resource = Arc::new(AtomicIsize::new(0)); let barrier = Arc::new(Barrier::new(threads)); let poisoned = Arc::new(AtomicBool::new(false)); let pending_max = Arc::new(AtomicIsize::new(-1)); (0..threads).map(|index| thread::Builder::new().name(format!("test_parallel_impl_{}", index)).spawn({ let semaphore = semaphore.clone(); let resource = resource.clone(); let barrier = barrier.clone(); let poisoned = poisoned.clone(); let pending_max = pending_max.clone(); move || { let mut time = 0; let mut futures = BTreeMap::<usize, TestFuture>::new(); let on_guard = |guard: Result<SemaphoreGuard, PoisonError>| { match guard { Err(PoisonError) => {} Ok(guard) => { let amount = guard.forget() as isize; resource.fetch_sub(amount, Relaxed).checked_sub(amount).unwrap(); } } }; for _ in 0.. { if thread_rng().gen_bool(0.1) { if futures.len() < 5 { let amount = thread_rng().gen_range(0, 10); let mut fut = TestFuture::new(&semaphore, amount); match fut.poll() { None => { futures.insert(time, fut); } Some(guard) => on_guard(guard), } time += 1; } } if thread_rng().gen_bool(0.001) { if barrier.wait().is_leader() { pending_max.store(-1, Relaxed); } let was_poisoned = poisoned.load(Relaxed); let mut ready = vec![]; for (time, fut) in futures.iter_mut() { if let Some(guard) = fut.poll_if_woken() { on_guard(guard); ready.push(*time); } } for time in ready { futures.remove(&time); } barrier.wait(); print!("{:?} ", futures.len()); if let Some(front) = futures.values_mut().next() { pending_max.fetch_max(front.amount as isize, Relaxed); } let leader = barrier.wait(); let pending_amount = pending_max.load(Relaxed); if pending_amount >= 0 && pending_amount <= resource.load(Relaxed) { for (time, fut) in futures.iter_mut() { println!("{:?} {:?}", time, fut); } if leader.is_leader() { println!("{:?}", semaphore); panic!("Should have acquired. {:?} of {:?}", pending_amount, resource.load(Relaxed)); } } if barrier.wait().is_leader() { println!(); } if was_poisoned { return; } } if thread_rng().gen_bool(0.1) { let mut ready = vec![]; for (time, fut) in futures.iter_mut().rev() { if thread_rng().gen_bool(0.5) { let guard = if time & 1 == 1 && thread_rng().gen_bool(0.5) { fut.poll() } else { fut.poll_if_woken() }; if let Some(guard) = guard { on_guard(guard); ready.push(*time); } else if !ready.is_empty() { println!("{:?}", semaphore); for (time, fut) in futures.iter_mut() { println!("{:?} {:?}", time, fut); } panic!(); } } } for time in ready { futures.remove(&time); } } if thread_rng().gen_bool(0.1) { let mut cancelled = vec![]; for (time, _) in futures.iter_mut().rev() { if time & 2 == 2 && thread_rng().gen_bool(0.5) { cancelled.push(*time); } } for time in cancelled { futures.remove(&time); } } if thread_rng().gen_bool(0.1) && resource.load(Relaxed) < 20 { let amount = thread_rng().gen_range(0, 20); resource.fetch_add(amount as isize, Relaxed); semaphore.release(amount); } if thread_rng().gen_bool(0.1) { if let Ok(guard) = semaphore.try_acquire(thread_rng().gen_range(0, 10)) { on_guard(Ok(guard)); } } if thread_rng().gen_bool(0.0001) { poisoned.store(true, Relaxed); semaphore.poison(); } } } }).unwrap()).collect::<Vec<_>>().into_iter().for_each(|x| x.join().unwrap()); }
ze, TestFuture>::new(); let mut rng = XorShiftRng::seed_from_u64(954360855); for _ in 0..100000 { if rng.gen_bool(0.1) && futures.len() < 5 { let amount = rng.gen_range(0, 10); let mut fut = TestFuture::new(&semaphore, amount); if let Some(guard) = fut.poll() { guard.unwrap().forget(); available = available.checked_sub(amount).unwrap(); } else { futures.insert(time, fut); } time += 1; } else if rng.gen_bool(0.1) { let mut blocked = false; let mut ready = vec![]; for (time, fut) in futures.iter_mut() { if rng.gen_bool(0.5) { if let Some(guard) = fut.poll_if_woken() { assert!(!blocked); guard.unwrap().forget(); available = available.checked_sub(fut.amount).unwrap(); ready.push(*time); } else { blocked = true; } } } for time in ready { futures.remove(&time); } } else if rng.gen_bool(0.1) && available < 30 { let amount = rng.gen_range(0, 10); available = available.checked_add(amount).unwrap(); semaphore.release(amount); } } }
function_block-function_prefixed
[ { "content": "struct SemMutex(Semaphore, UnsafeCell<usize>);\n\n\n\nunsafe impl Send for SemMutex {}\n\n\n\nunsafe impl Sync for SemMutex {}\n\n\n\nimpl Default for SemMutex {\n\n fn default() -> Self {\n\n SemMutex(Semaphore::new(1), UnsafeCell::new(0))\n\n }\n\n}\n\n\n\nimpl AtomicCounter for SemMutex {\n\n fn do_loop<'a>(items: Vec<Arc<Self>>) -> BoxFuture<'static, ()> {\n\n Box::pin(async move {\n\n for x in get_loop(&items) {\n\n let _guard = x.0.acquire(1).await.unwrap();\n\n unsafe { *x.1.get() += 1; }\n\n }\n\n })\n\n }\n\n}\n\n\n", "file_path": "benches/mutex.rs", "rank": 0, "score": 117835.99228394657 }, { "content": "#[bench]\n\nfn run_futures_mutex(bencher: &mut Bencher) {\n\n run_impl::<FuturesMutex<usize>>(bencher);\n\n}\n\n\n", "file_path": "benches/mutex.rs", "rank": 1, "score": 107570.93198252961 }, { "content": "#[test]\n\nfn test() {\n\n let lock = Arc::new(RwLock::new(Vec::new()));\n\n let threads = 10;\n\n let iters = 500;\n\n (0..threads).map(|thread| {\n\n let lock = lock.clone();\n\n thread::spawn(move || {\n\n block_on(async {\n\n for i in (thread..iters * threads + thread).step_by(threads) {\n\n while lock.read().await.unwrap().len() != i {}\n\n lock.write().await.unwrap().push(i)\n\n }\n\n })\n\n })\n\n }).collect::<Vec<_>>().into_iter().for_each(|x| x.join().unwrap());\n\n let vec = Arc::try_unwrap(lock).unwrap().into_inner().unwrap();\n\n assert_eq!(vec, (0..threads*iters).collect::<Vec<_>>());\n\n}", "file_path": "src/tests/lock.rs", "rank": 3, "score": 88567.28810744936 }, { "content": "#[bench]\n\nfn run_sem_mutex(bencher: &mut Bencher) {\n\n run_impl::<SemMutex>(bencher);\n\n}", "file_path": "benches/mutex.rs", "rank": 4, "score": 88060.33189558965 }, { "content": "#[bench]\n\nfn run_sync_mutex(bencher: &mut Bencher) {\n\n run_impl::<SyncMutex<usize>>(bencher);\n\n}\n\n\n", "file_path": "benches/mutex.rs", "rank": 5, "score": 88060.33189558965 }, { "content": "#[bench]\n\nfn run_tokio_mutex(bencher: &mut Bencher) {\n\n run_impl::<TokioMutex<usize>>(bencher);\n\n}\n\n\n", "file_path": "benches/mutex.rs", "rank": 6, "score": 88060.33189558965 }, { "content": "#[bench]\n\nfn run_async_std_mutex(bencher: &mut Bencher) {\n\n run_impl::<AsyncStdMutex<usize>>(bencher);\n\n}\n\n\n", "file_path": "benches/mutex.rs", "rank": 10, "score": 85364.60933135831 }, { "content": "#[bench]\n\nfn run_atomic(bencher: &mut Bencher) {\n\n run_impl::<AtomicUsize>(bencher);\n\n}\n\n\n", "file_path": "benches/mutex.rs", "rank": 13, "score": 82652.63402115341 }, { "content": "struct ReaderInner(Pipe);\n\n\n", "file_path": "src/tests/pipe.rs", "rank": 15, "score": 79686.47005718287 }, { "content": "struct WriterInner(Pipe);\n\n\n\n#[derive(Clone)]\n\npub struct Reader {\n\n inner: Arc<ReaderInner>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Writer {\n\n inner: Arc<WriterInner>\n\n}\n\n\n", "file_path": "src/tests/pipe.rs", "rank": 16, "score": 79686.47005718287 }, { "content": "fn run_impl<T: AtomicCounter>(bencher: &mut Bencher) {\n\n let items = (0..ITEMS).map(|_| Arc::new(T::default())).collect::<Vec<_>>();\n\n let pool = ThreadPool::builder().pool_size(PARALLELISM).create().unwrap();\n\n bencher.iter(|| {\n\n block_on(join_all((0..CONCURRENCY).map(|_| {\n\n let items = items.clone();\n\n pool.spawn_with_handle(T::do_loop(items)).unwrap()\n\n }).collect::<Vec<_>>().into_iter()));\n\n });\n\n}\n\n\n", "file_path": "benches/mutex.rs", "rank": 17, "score": 70477.56168731803 }, { "content": "#[test]\n\nfn test_pipe() {\n\n for _i in 0..100 {\n\n test_pipe_impl();\n\n }\n\n}\n\n\n", "file_path": "src/tests/pipe.rs", "rank": 21, "score": 66354.41608448338 }, { "content": "#[derive(Clone)]\n\nstruct Pipe {\n\n ready: Arc<Semaphore>,\n\n free: Arc<Semaphore>,\n\n buffer: Arc<Mutex<VecDeque<u8>>>,\n\n}\n\n\n", "file_path": "src/tests/pipe.rs", "rank": 22, "score": 65929.35957167023 }, { "content": "fn pipe(capacity: usize) -> (Writer, Reader) {\n\n let ready = Arc::new(Semaphore::new(0));\n\n let free = Arc::new(Semaphore::new(capacity));\n\n let buffer = Arc::new(Mutex::new(VecDeque::with_capacity(capacity)));\n\n let pipe = Pipe { ready, free, buffer };\n\n let reader = Arc::new(ReaderInner(pipe.clone()));\n\n let writer = Arc::new(WriterInner(pipe.clone()));\n\n (Writer { inner: writer }, Reader { inner: reader })\n\n}\n\n\n\nimpl Reader {\n\n async fn read_exact(&self, buf: &mut [u8]) -> usize {\n\n assert!(buf.len() < self.inner.0.buffer.lock().await.capacity());\n\n let total = if let Ok(guard) = self.inner.0.ready.acquire(buf.len()).await {\n\n guard.forget();\n\n let mut lock = self.inner.0.buffer.lock().await;\n\n assert!(buf.len() <= lock.len());\n\n for b in buf.iter_mut() {\n\n *b = lock.pop_front().unwrap();\n\n }\n", "file_path": "src/tests/pipe.rs", "rank": 23, "score": 65460.02394655033 }, { "content": "fn test_pipe_impl() {\n\n let threads = 10;\n\n let iters = 1000;\n\n let (w, r) = pipe(20);\n\n for _ in 0..threads {\n\n let w = w.clone();\n\n thread::spawn(move || block_on(async {\n\n for _ in 0..iters {\n\n let n = thread_rng().gen_range(0, 1000);\n\n w.write_all(format!(\"{}\\n\", n).as_ref()).await.unwrap();\n\n }\n\n }));\n\n }\n\n mem::drop(w);\n\n block_on(async {\n\n let mut result = Vec::new();\n\n loop {\n\n let mut buf = vec![0u8; thread_rng().gen_range(0, 10)];\n\n let n = r.read_exact(&mut buf).await;\n\n result.extend_from_slice(&buf[0..n]);\n", "file_path": "src/tests/pipe.rs", "rank": 26, "score": 64075.3884601653 }, { "content": "fn run_impl(bencher: &mut Bencher) {\n\n let pool = ThreadPool::builder().pool_size(PARALLELISM).create().unwrap();\n\n bencher.iter(|| {\n\n let items = (0..ITEMS)\n\n .map(|i| Arc::new(Semaphore::new(if i == 0 { TOKENS } else { 0 })))\n\n .collect::<Vec<_>>();\n\n block_on(join_all((0..ITEMS).map(|index| {\n\n let items = items.clone();\n\n pool.spawn_with_handle(async move {\n\n let mut rng = XorShiftRng::from_entropy();\n\n for _ in 0..ITERS {\n\n let amount = rng.gen_range(1usize, 1 << MAX_ACQUIRE + 1).trailing_zeros() as usize;\n\n match items[index].acquire(amount).await {\n\n Ok(guard) => guard.forget(),\n\n Err(PoisonError) => break,\n\n };\n\n items[(index + 1) % ITEMS].release(amount);\n\n }\n\n items[(index + 1) % ITEMS].poison();\n\n }).unwrap()\n\n })));\n\n });\n\n}\n\n\n", "file_path": "benches/ring.rs", "rank": 27, "score": 62483.13681927817 }, { "content": "#[bench]\n\nfn run_weighted(bencher: &mut Bencher) {\n\n run_impl(bencher);\n\n}", "file_path": "benches/ring.rs", "rank": 28, "score": 62483.13681927817 }, { "content": "#[test]\n\nfn test_readme_deps() {\n\n version_sync::assert_markdown_deps_updated!(\"README.md\");\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 31, "score": 58523.9709353066 }, { "content": "#[test]\n\nfn test_html_root_url() {\n\n version_sync::assert_html_root_url_updated!(\"src/lib.rs\");\n\n}", "file_path": "src/lib.rs", "rank": 32, "score": 56193.70940578151 }, { "content": "trait AtomicCounter: Default + 'static + Send + Sync {\n\n fn do_loop<'a>(items: Vec<Arc<Self>>) -> BoxFuture<'static, ()>;\n\n}\n\n\n\nimpl AtomicCounter for AtomicUsize {\n\n fn do_loop<'a>(items: Vec<Arc<Self>>) -> BoxFuture<'static, ()> {\n\n Box::pin(async move {\n\n for x in get_loop(&items) {\n\n x.fetch_add(1, SeqCst);\n\n }\n\n })\n\n }\n\n}\n\n\n\nimpl AtomicCounter for SyncMutex<usize> {\n\n fn do_loop<'a>(items: Vec<Arc<Self>>) -> BoxFuture<'static, ()> {\n\n Box::pin(async move {\n\n for x in get_loop(&items) {\n\n *x.lock().unwrap() += 1;\n\n }\n", "file_path": "benches/mutex.rs", "rank": 33, "score": 50416.710819529704 }, { "content": "fn get_loop<'a, T>(v: &'a Vec<Arc<T>>) -> impl Iterator<Item=&'a T> {\n\n (0..ITERS).map(move |i| &*v[i % ITEMS])\n\n}\n\n\n", "file_path": "benches/mutex.rs", "rank": 34, "score": 36709.80773635972 }, { "content": "#![feature(test)]\n\nextern crate test;\n\n\n\nuse test::Bencher;\n\nuse std::sync::{Mutex as SyncMutex, Arc};\n\nuse async_std::sync::Mutex as AsyncStdMutex;\n\nuse futures_locks::Mutex as FuturesMutex;\n\nuse tokio::sync::Mutex as TokioMutex;\n\nuse futures::executor::block_on;\n\nuse async_std::task::spawn;\n\nuse futures_test::std_reexport::sync::atomic::AtomicUsize;\n\nuse futures_test::std_reexport::sync::atomic::Ordering::SeqCst;\n\nuse async_weighted_semaphore::Semaphore;\n\nuse std::cell::UnsafeCell;\n\nuse std::thread;\n\nuse futures::future::BoxFuture;\n\nuse futures::executor::ThreadPool;\n\nuse futures::task::SpawnExt;\n\nuse futures::future::join_all;\n\n\n\nconst PARALLELISM: usize = 8;\n\nconst CONCURRENCY: usize = 100;\n\nconst ITEMS: usize = 2;\n\nconst ITERS: usize = 1000;\n\n\n", "file_path": "benches/mutex.rs", "rank": 35, "score": 27142.420816567716 }, { "content": " })\n\n }\n\n}\n\n\n\nimpl AtomicCounter for AsyncStdMutex<usize> {\n\n fn do_loop<'a>(items: Vec<Arc<Self>>) -> BoxFuture<'static, ()> {\n\n Box::pin(async move {\n\n for x in get_loop(&items) {\n\n *x.lock().await += 1;\n\n }\n\n })\n\n }\n\n}\n\n\n\nimpl AtomicCounter for TokioMutex<usize> {\n\n fn do_loop<'a>(items: Vec<Arc<Self>>) -> BoxFuture<'static, ()> {\n\n Box::pin(async move {\n\n for x in get_loop(&items) {\n\n *x.lock().await += 1;\n\n }\n", "file_path": "benches/mutex.rs", "rank": 36, "score": 27134.1614175233 }, { "content": " })\n\n }\n\n}\n\n\n\nimpl AtomicCounter for FuturesMutex<usize> {\n\n fn do_loop<'a>(items: Vec<Arc<Self>>) -> BoxFuture<'static, ()> {\n\n Box::pin(async move {\n\n for x in get_loop(&items) {\n\n *x.lock().await += 1;\n\n }\n\n })\n\n }\n\n}\n\n\n", "file_path": "benches/mutex.rs", "rank": 37, "score": 27133.9348086802 }, { "content": "unsafe impl Send for Semaphore {}\n\n\n\nimpl UnwindSafe for Semaphore {}\n\n\n\nimpl RefUnwindSafe for Semaphore {}\n\n\n\nimpl Debug for Semaphore {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n match self.acquire.load(Relaxed) {\n\n Available(available) => write!(f, \"Semaphore::Ready({:?})\", available)?,\n\n Queued(_) => match self.release.load(Relaxed) {\n\n Unlocked(available) => write!(f, \"Semaphore::Blocked({:?})\", available)?,\n\n _ => write!(f, \"Semaphore::Unknown\")?,\n\n },\n\n };\n\n Ok(())\n\n }\n\n}\n\n\n\n\n", "file_path": "src/semaphore.rs", "rank": 38, "score": 27054.09376314834 }, { "content": "\n\n /// Wait until there are no older pending calls to [acquire](#method.acquire) and at least `amount` permits available.\n\n /// Then consume the requested permits and return a [`SemaphoreGuard`].\n\n /// # Errors\n\n /// Returns [`PoisonError`] is the semaphore is poisoned.\n\n /// # Examples\n\n /// ```\n\n /// # use futures::executor::block_on;\n\n /// # use std::future::Future;\n\n /// use async_weighted_semaphore::Semaphore;\n\n /// async fn limit_concurrency(semaphore: &Semaphore, future: impl Future<Output=()>) {\n\n /// let guard = semaphore.acquire(1).await.unwrap();\n\n /// future.await\n\n /// }\n\n /// ```\n\n pub fn acquire(&self, amount: usize) -> AcquireFuture {\n\n AcquireFuture(UnsafeCell::new(Waiter {\n\n semaphore: self,\n\n step: UnsafeCell::new(AcquireStep::Entering),\n\n waker: unsafe { AtomicWaker::new() },\n", "file_path": "src/semaphore.rs", "rank": 39, "score": 27053.389786818774 }, { "content": " /// }\n\n /// ```\n\n pub fn acquire_arc(self: &Arc<Self>, amount: usize) -> AcquireFutureArc {\n\n AcquireFutureArc {\n\n arc: self.clone(),\n\n inner: unsafe { mem::transmute::<AcquireFuture, AcquireFuture>(self.acquire(amount)) },\n\n }\n\n }\n\n\n\n\n\n /// Like [try_acquire](#method.try_acquire), but takes an [`Arc`] `<Semaphore>`, and returns a guard that is `'static`,\n\n /// [`Send`] and [`Sync`].\n\n /// # Examples\n\n /// ```\n\n /// # use async_weighted_semaphore::{Semaphore, TryAcquireError, SemaphoreGuardArc};\n\n /// # use std::sync::Arc;\n\n /// use async_channel::{Sender, TrySendError};\n\n /// // Limit size of a producer-consumer queue\n\n /// async fn try_send<T>(semaphore: &Arc<Semaphore>,\n\n /// sender: &Sender<(SemaphoreGuardArc, T)>,\n", "file_path": "src/semaphore.rs", "rank": 40, "score": 27052.378616180587 }, { "content": "use std::{fmt, mem};\n\nuse std::panic::{RefUnwindSafe, UnwindSafe};\n\nuse std::sync::atomic::Ordering::{Relaxed, Acquire};\n\nuse crate::state::ReleaseState::Unlocked;\n\nuse crate::state::AcquireState::{Available, Queued};\n\nuse std::fmt::{Debug, Formatter};\n\nuse crate::state::{AcquireStep, Waiter, Permits, AcquireState, ReleaseState};\n\nuse std::cell::UnsafeCell;\n\nuse crate::{AcquireFuture, TryAcquireError, SemaphoreGuard, AcquireFutureArc, SemaphoreGuardArc};\n\nuse std::marker::{PhantomPinned, PhantomData};\n\nuse crate::waker::AtomicWaker;\n\nuse std::ptr::null;\n\nuse std::sync::Arc;\n\nuse crate::atomic::Atomic;\n\nuse std::mem::size_of;\n\nuse crate::release::ReleaseAction;\n\n#[allow(unused_imports)] // used by docs\n\nuse crate::errors::PoisonError;\n\n\n\n/// An async weighted semaphore. See [crate documentation](index.html) for usage.\n", "file_path": "src/semaphore.rs", "rank": 41, "score": 27050.606479471728 }, { "content": " /// }\n\n /// }\n\n /// ```\n\n pub fn try_acquire(&self, amount: usize) -> Result<SemaphoreGuard, TryAcquireError> {\n\n let mut current = self.acquire.load(Acquire);\n\n loop {\n\n match current {\n\n Queued(_) => return Err(TryAcquireError::WouldBlock),\n\n Available(available) => {\n\n let available = available.into_usize().ok_or(TryAcquireError::Poisoned)?;\n\n if available < amount {\n\n return Err(TryAcquireError::WouldBlock);\n\n }\n\n if self.acquire.cmpxchg_weak_acqrel(&mut current, Available(Permits::new(available - amount))) {\n\n return Ok(SemaphoreGuard::new(self, amount));\n\n }\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/semaphore.rs", "rank": 42, "score": 27049.83569003354 }, { "content": " /// }\n\n /// }\n\n /// ```\n\n pub const MAX_AVAILABLE: usize = (1 << (size_of::<usize>() * 8 - 3)) - 1;\n\n\n\n /// Create a new semaphore with an initial number of permits.\n\n /// # Examples\n\n /// ```\n\n /// use async_weighted_semaphore::Semaphore;\n\n /// let semaphore = Semaphore::new(1024);\n\n /// ```\n\n pub fn new(initial: usize) -> Self {\n\n Semaphore {\n\n acquire: Atomic::new(Available(Permits::new(initial))),\n\n release: Atomic::new(Unlocked(Permits::new(0))),\n\n front: UnsafeCell::new(null()),\n\n middle: UnsafeCell::new(null()),\n\n next_cancel: Atomic::new(null()),\n\n }\n\n }\n", "file_path": "src/semaphore.rs", "rank": 43, "score": 27048.795372347042 }, { "content": " amount,\n\n next: UnsafeCell::new(null()),\n\n prev: UnsafeCell::new(null()),\n\n next_cancel: UnsafeCell::new(null()),\n\n }), PhantomData, PhantomPinned)\n\n }\n\n\n\n /// Like [acquire](#method.acquire), but fails if the call would block.\n\n /// # Errors\n\n /// * Returns [`TryAcquireError::Poisoned`] is the semaphore is poisoned.\n\n /// * Returns [`TryAcquireError::WouldBlock`] if a call to `acquire` would have blocked. This can\n\n /// occur if there are insufficient available permits or if there is another pending call to acquire.\n\n /// # Examples\n\n /// ```\n\n /// # use futures::executor::block_on;\n\n /// # use std::future::Future;\n\n /// use async_weighted_semaphore::Semaphore;\n\n /// async fn run_if_safe(semaphore: &Semaphore, future: impl Future<Output=()>) {\n\n /// if semaphore.try_acquire(1).is_ok() {\n\n /// future.await\n", "file_path": "src/semaphore.rs", "rank": 44, "score": 27048.326276114076 }, { "content": " ReleaseAction { sem: self, releasable: Permits::new(amount) }.release();\n\n }\n\n }\n\n\n\n /// Poison the semaphore, causing all pending and future calls to `acquire` to fail immediately.\n\n /// This can be used to unblock pending acquires when the guarded operation would fail anyway.\n\n /// # Examples\n\n /// ```\n\n /// # use async_weighted_semaphore::{Semaphore, TryAcquireError};\n\n /// # use std::sync::Arc;\n\n /// # use async_std::sync::Mutex;\n\n /// use async_channel::{Receiver, RecvError};\n\n /// async fn consume(semaphore: &Semaphore, receiver: Receiver<usize>){\n\n /// while let Ok(x) = receiver.recv().await {\n\n /// println!(\"{:?}\", x);\n\n /// semaphore.release(1);\n\n /// }\n\n /// // There will be no more calls to recv, so unblock all senders.\n\n /// semaphore.poison();\n\n /// }\n\n /// ```\n\n pub fn poison(&self) {\n\n unsafe {\n\n ReleaseAction { sem: self, releasable: Permits::poison() }.release();\n\n }\n\n }\n\n}\n", "file_path": "src/semaphore.rs", "rank": 45, "score": 27047.361032794728 }, { "content": "\n\n /// Return `amount` permits to the semaphore. This will eventually wake any calls to [acquire](#method.acquire)\n\n /// that can succeed with the additional permits. Calling `release` often makes sense after calling\n\n /// [`SemaphoreGuard::forget`] or when using the semaphore to signal the number of elements that\n\n /// are available for processing.\n\n /// # Examples\n\n /// ```\n\n /// # use async_weighted_semaphore::{Semaphore, TryAcquireError};\n\n /// use async_channel::{Receiver, RecvError};\n\n /// // Limit size of a producer-consumer queue\n\n /// async fn recv<T>(semaphore: &Semaphore, recv: &Receiver<T>) -> Result<T, RecvError>{\n\n /// let result = recv.recv().await?;\n\n /// // Note that this only guards elements in the queue, not those being processed after the\n\n /// // queue.\n\n /// semaphore.release(1);\n\n /// Ok(result)\n\n /// }\n\n /// ```\n\n pub fn release(&self, amount: usize) {\n\n unsafe {\n", "file_path": "src/semaphore.rs", "rank": 46, "score": 27046.25271239278 }, { "content": "impl Semaphore {\n\n /// The maximum number of permits that can be made available. This is slightly smaller than\n\n /// [`usize::MAX`]. If the number of available permits exceeds this number, it may poison the\n\n /// semaphore.\n\n /// # Examples\n\n /// ```\n\n /// # use async_weighted_semaphore::{Semaphore, SemaphoreGuard};\n\n /// struct ReadWriteLock(Semaphore);\n\n /// impl ReadWriteLock {\n\n /// fn new() -> Self {\n\n /// ReadWriteLock(Semaphore::new(Semaphore::MAX_AVAILABLE))\n\n /// }\n\n /// // Acquire one permit, allowing up to MAX_AVAILABLE concurrent readers.\n\n /// async fn read(&self) -> SemaphoreGuard<'_> {\n\n /// self.0.acquire(1).await.unwrap()\n\n /// }\n\n /// // The writer acquires all the permits, prevent any concurrent writers or readers. The\n\n /// // first-in-first-out priority policy prevents writer starvation.\n\n /// async fn write(&self) -> SemaphoreGuard<'_> {\n\n /// self.0.acquire(Semaphore::MAX_AVAILABLE).await.unwrap()\n", "file_path": "src/semaphore.rs", "rank": 47, "score": 27044.79374714544 }, { "content": " /// message: T\n\n /// ) -> Result<(), TrySendError<T>>{\n\n /// match semaphore.try_acquire_arc(1) {\n\n /// Err(TryAcquireError::WouldBlock) => Err(TrySendError::Full(message)),\n\n /// // A semaphore can be poisoned to prevent deadlock when a channel closes.\n\n /// Err(TryAcquireError::Poisoned) => Err(TrySendError::Closed(message)),\n\n /// Ok(guard) => match sender.try_send((guard, message)) {\n\n /// Err(TrySendError::Closed((guard, message))) => Err(TrySendError::Closed(message)),\n\n /// Err(TrySendError::Full((guard, message))) => Err(TrySendError::Full(message)),\n\n /// Ok(()) => Ok(())\n\n /// }\n\n /// }\n\n /// }\n\n /// ```\n\n pub fn try_acquire_arc(self: &Arc<Self>, amount: usize) -> Result<SemaphoreGuardArc, TryAcquireError> {\n\n let guard = self.try_acquire(amount)?;\n\n let result = SemaphoreGuardArc::new(self.clone(), amount);\n\n guard.forget();\n\n Ok(result)\n\n }\n", "file_path": "src/semaphore.rs", "rank": 48, "score": 27043.222183914215 }, { "content": "\n\n /// Like [acquire](#method.acquire), but takes an [`Arc`] `<Semaphore>` and returns a guard that is `'static`, [`Send`] and [`Sync`].\n\n /// # Examples\n\n /// ```\n\n /// # use async_weighted_semaphore::{Semaphore, PoisonError, SemaphoreGuardArc};\n\n /// # use std::sync::Arc;\n\n /// use async_channel::{Sender, SendError};\n\n /// // Limit size of a producer-consumer queue\n\n /// async fn send<T>(semaphore: &Arc<Semaphore>,\n\n /// sender: &Sender<(SemaphoreGuardArc, T)>,\n\n /// message: T\n\n /// ) -> Result<(), SendError<T>>{\n\n /// match semaphore.acquire_arc(1).await {\n\n /// // A semaphore can be poisoned to prevent deadlock when a channel closes.\n\n /// Err(PoisonError) => Err(SendError(message)),\n\n /// Ok(guard) => match sender.send((guard, message)).await{\n\n /// Err(SendError((guard, message))) => Err(SendError(message)),\n\n /// Ok(()) => Ok(())\n\n /// }\n\n /// }\n", "file_path": "src/semaphore.rs", "rank": 49, "score": 27043.21194900267 }, { "content": "// This implementation encodes state (the available counter, acquire queue, and cancel queue) into\n\n// multiple atomic variables and linked lists. Concurrent acquires (and concurrent cancels) synchronize\n\n// by pushing onto a stack with an atomic swap. Releases synchronize with other operations by attempting\n\n// to acquire a lock. If the lock is successfully acquired, the release can proceed. Otherwise\n\n// the lock is marked dirty to indicate that there is additional work for the lock owner to do.\n\npub struct Semaphore {\n\n // The number of available permits or the back of the queue (without next edges).\n\n pub(crate) acquire: Atomic<AcquireState>,\n\n // A number of releasable permits, and the state of the current release lock.\n\n pub(crate) release: Atomic<ReleaseState>,\n\n // The front of the queue (with next edges).\n\n pub(crate) front: UnsafeCell<*const Waiter>,\n\n // The last node swapped from AcquireState (with next edges).\n\n pub(crate) middle: UnsafeCell<*const Waiter>,\n\n // A stack of nodes that are cancelling.\n\n pub(crate) next_cancel: Atomic<*const Waiter>,\n\n}\n\n\n\nunsafe impl Sync for Semaphore {}\n\n\n", "file_path": "src/semaphore.rs", "rank": 50, "score": 27039.84117364435 }, { "content": " f.debug_struct(\"AW\")\n\n .field(\"s\", &self.state.load(SeqCst))\n\n .field(\"d\", &self.data.load(SeqCst))\n\n .field(\"t\", &self.vtable.load(SeqCst))\n\n .field(\"t\", unsafe { &*self.thread.get() })\n\n .finish()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::waker::{WakerResult, AtomicWaker};\n\n use std::future::Future;\n\n use futures::task::{Context, Poll};\n\n use std::pin::Pin;\n\n\n\n use futures::pin_mut;\n\n use futures::poll;\n\n\n\n\n", "file_path": "src/waker.rs", "rank": 51, "score": 26835.47527746393 }, { "content": " use futures::executor::block_on;\n\n use std::thread;\n\n use futures_test::std_reexport::sync::mpsc::sync_channel;\n\n\n\n struct Tester {\n\n waiter: AtomicWaker,\n\n }\n\n\n\n unsafe impl Send for Tester {}\n\n\n\n unsafe impl Sync for Tester {}\n\n\n\n impl Unpin for Tester {}\n\n\n\n impl Future for Tester {\n\n type Output = bool;\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<bool> {\n\n unsafe {\n\n self.waiter.poll(cx)\n\n }\n", "file_path": "src/waker.rs", "rank": 52, "score": 26833.636299903887 }, { "content": " unsafe {\n\n let tester = Tester { waiter: AtomicWaker::new() };\n\n pin_mut!(tester);\n\n assert_eq!(Poll::Pending, poll!(&mut tester));\n\n\n\n match tester.waiter.start_cancel() {\n\n WakerResult::Cancelling => {\n\n AtomicWaker::accept_cancel(&tester.as_mut().waiter);\n\n tester.waiter.wait_cancel();\n\n }\n\n WakerResult::Finished { .. } => panic!(),\n\n }\n\n }\n\n });\n\n }\n\n\n\n #[test]\n\n fn test_finish_cancel() {\n\n block_on(async {\n\n unsafe {\n", "file_path": "src/waker.rs", "rank": 53, "score": 26832.426558412128 }, { "content": " let tester = Tester { waiter: AtomicWaker::new() };\n\n pin_mut!(tester);\n\n assert_eq!(Poll::Pending, poll!(&mut tester));\n\n assert_eq!(WakerResult::Finished { poisoned: true },\n\n AtomicWaker::finish(&tester.waiter, true));\n\n match tester.waiter.start_cancel() {\n\n WakerResult::Cancelling => panic!(),\n\n WakerResult::Finished { poisoned } => { assert!(poisoned) }\n\n }\n\n }\n\n });\n\n }\n\n\n\n #[test]\n\n fn test_cancel_finish() {\n\n block_on(async {\n\n unsafe {\n\n let tester = Tester { waiter: AtomicWaker::new() };\n\n pin_mut!(tester);\n\n assert_eq!(Poll::Pending, poll!(&mut tester));\n", "file_path": "src/waker.rs", "rank": 54, "score": 26831.05430984205 }, { "content": "use std::cell::UnsafeCell;\n\nuse std::task::{Waker, Poll, Context, RawWakerVTable};\n\n\n\nuse std::sync::atomic::Ordering::{Acquire, SeqCst, Relaxed};\n\nuse crate::waker::State::{Cancelled, Cancelling};\n\nuse crate::atomic::{Atomic, Packable};\n\nuse std::{mem, thread, fmt};\n\n\n\nuse std::thread::{Thread, panicking};\n\nuse std::fmt::{Debug, Formatter};\n\nuse std::ptr::null;\n\nuse crate::waker::State::{Pending, Storing, Finished, Loading};\n\n\n\n#[derive(Copy, Clone, Eq, PartialOrd, PartialEq, Ord, Debug)]\n", "file_path": "src/waker.rs", "rank": 55, "score": 26830.383755790626 }, { "content": " }\n\n }\n\n\n\n #[test]\n\n fn test_finish() {\n\n block_on(async {\n\n unsafe {\n\n let tester = Tester { waiter: AtomicWaker::new() };\n\n pin_mut!(tester);\n\n assert_eq!(Poll::Pending, poll!(&mut tester));\n\n assert_eq!(WakerResult::Finished { poisoned: false },\n\n AtomicWaker::finish(&tester.waiter as *const AtomicWaker, false));\n\n assert_eq!(Poll::Ready(false), poll!(&mut tester));\n\n }\n\n });\n\n }\n\n\n\n #[test]\n\n fn test_cancel() {\n\n block_on(async {\n", "file_path": "src/waker.rs", "rank": 56, "score": 26829.080641061864 }, { "content": " // Store a new waker and return the poisoned bit if finished.\n\n #[must_use]\n\n pub unsafe fn poll(&self, context: &mut Context) -> Poll<bool> {\n\n let mut waker = Some(context.waker().clone());\n\n let mut current = self.state.load(Acquire);\n\n loop {\n\n match current {\n\n Pending | Loading => {\n\n if self.state.cmpxchg_weak_acqrel(&mut current, Storing) {\n\n current = Storing;\n\n break;\n\n }\n\n }\n\n Finished { poisoned } => return Poll::Ready(poisoned),\n\n _ => unreachable!()\n\n };\n\n }\n\n let (data, vtable) =\n\n mem::transmute_copy::<_, (*const (), *const RawWakerVTable)>(waker.as_ref().unwrap());\n\n let old_data = self.data.load(Relaxed);\n", "file_path": "src/waker.rs", "rank": 57, "score": 26828.869261061907 }, { "content": "}\n\n\n\nimpl Drop for AtomicWaker {\n\n fn drop(&mut self) {\n\n match self.state.load(Relaxed) {\n\n Pending | Cancelled => unsafe {\n\n let data = self.data.load(Relaxed);\n\n let vtable = self.vtable.load(Relaxed);\n\n if vtable != null() {\n\n mem::drop(mem::transmute::<_, Waker>((data, vtable)));\n\n }\n\n }\n\n Finished { .. } => {}\n\n _ => if !panicking() { unreachable!() }\n\n }\n\n }\n\n}\n\n\n\nimpl Debug for AtomicWaker {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n", "file_path": "src/waker.rs", "rank": 58, "score": 26828.221677188827 }, { "content": " match tester.waiter.start_cancel() {\n\n WakerResult::Cancelling => {\n\n AtomicWaker::accept_cancel(&tester.waiter);\n\n tester.waiter.wait_cancel();\n\n }\n\n WakerResult::Finished { .. } => panic!(),\n\n }\n\n }\n\n });\n\n }\n\n\n\n fn test_race(cancel: bool) {\n\n unsafe {\n\n let iters = 10000;\n\n let mut testers = (0..iters).map(|_| Tester { waiter: AtomicWaker::new() }).collect::<Vec<_>>();\n\n let (send, recv) = sync_channel(0);\n\n let h1 = thread::spawn(move || block_on(async {\n\n let mut results = vec![];\n\n for i in 0..iters {\n\n let mut tester = &mut testers[i];\n", "file_path": "src/waker.rs", "rank": 59, "score": 26828.20354795446 }, { "content": "impl Packable for State {\n\n unsafe fn encode(val: Self) -> usize {\n\n mem::transmute::<_, u8>(val) as usize\n\n }\n\n\n\n unsafe fn decode(val: usize) -> Self {\n\n mem::transmute(val as u8)\n\n }\n\n}\n\n\n\nimpl AtomicWaker {\n\n pub unsafe fn new() -> Self {\n\n AtomicWaker {\n\n state: Atomic::new(Pending),\n\n vtable: Atomic::new(null()),\n\n data: Atomic::new(null()),\n\n thread: UnsafeCell::new(None),\n\n }\n\n }\n\n\n", "file_path": "src/waker.rs", "rank": 60, "score": 26827.60804405663 }, { "content": " }\n\n }\n\n Finished { poisoned } => {\n\n return WakerResult::Finished { poisoned };\n\n }\n\n _ => unreachable!(\"{:?}\", current)\n\n }\n\n }\n\n }\n\n\n\n // Wait for the producer thread to accept cancellation\n\n pub unsafe fn wait_cancel(&self) {\n\n loop {\n\n match self.state.load(Acquire) {\n\n Cancelling => thread::park(),\n\n Cancelled => break,\n\n _ => panic!(),\n\n }\n\n }\n\n }\n", "file_path": "src/waker.rs", "rank": 61, "score": 26827.27697424529 }, { "content": " data: Atomic<*const ()>,\n\n state: Atomic<State>,\n\n // The thread waiting for acceptance of cancellation\n\n thread: UnsafeCell<Option<Thread>>,\n\n}\n\n\n\n// The result of an attempt to finish or cancel.\n\n#[derive(Copy, Clone, Eq, PartialOrd, PartialEq, Ord, Debug)]\n\npub enum WakerResult {\n\n // The future has been cancelled. The producer should accept_cancel.\n\n // The future thread should wait_cancel.\n\n Cancelling,\n\n // The future completed.\n\n Finished { poisoned: bool },\n\n}\n\n\n\nunsafe impl Send for AtomicWaker {}\n\n\n\nunsafe impl Sync for AtomicWaker {}\n\n\n", "file_path": "src/waker.rs", "rank": 62, "score": 26827.234112367303 }, { "content": " assert_eq!(Poll::Pending, poll!(&mut tester));\n\n send.send(&*(&tester.waiter as *const AtomicWaker)).unwrap();\n\n let result = if cancel {\n\n match tester.waiter.start_cancel() {\n\n WakerResult::Cancelling => {\n\n tester.waiter.wait_cancel();\n\n None\n\n }\n\n WakerResult::Finished { poisoned } => Some(poisoned),\n\n }\n\n } else {\n\n Some(tester.await)\n\n };\n\n results.push(result);\n\n }\n\n results\n\n }));\n\n let h2 = thread::spawn(move || block_on(async {\n\n let mut results = vec![];\n\n for i in 0..iters {\n", "file_path": "src/waker.rs", "rank": 63, "score": 26827.232267246927 }, { "content": " }\n\n\n\n // Signal that the future is finished.\n\n pub unsafe fn finish(this: *const Self, poisoned: bool) -> WakerResult {\n\n let mut current = (*this).state.load(Acquire);\n\n loop {\n\n match current {\n\n Pending => {\n\n if (*this).state.cmpxchg_weak_acqrel(&mut current, Loading) {\n\n current = Loading;\n\n }\n\n }\n\n Loading => {\n\n let data = (*this).data.load(Relaxed);\n\n let vtable = (*this).vtable.load(Relaxed);\n\n if (*this).state.cmpxchg_weak_acqrel(&mut current, Finished { poisoned }) {\n\n let waker = mem::transmute::<_, Waker>((data, vtable));\n\n waker.wake();\n\n return WakerResult::Finished { poisoned };\n\n }\n", "file_path": "src/waker.rs", "rank": 64, "score": 26827.064709723905 }, { "content": " }\n\n Storing => {\n\n if (*this).state.cmpxchg_weak_acqrel(&mut current, Finished { poisoned }) {\n\n return WakerResult::Finished { poisoned };\n\n }\n\n }\n\n Cancelling => return WakerResult::Cancelling,\n\n _ => unreachable!()\n\n }\n\n }\n\n }\n\n\n\n pub unsafe fn start_cancel(&self) -> WakerResult {\n\n *self.thread.get() = Some(thread::current());\n\n let mut current = self.state.load(Acquire);\n\n loop {\n\n match current {\n\n Pending | Loading => {\n\n if self.state.cmpxchg_weak_acqrel(&mut current, Cancelling) {\n\n return WakerResult::Cancelling;\n", "file_path": "src/waker.rs", "rank": 65, "score": 26827.025676561854 }, { "content": "\n\n // Accept cancellation on the producer thread, notifying the future thread.\n\n pub unsafe fn accept_cancel(this: *const Self) {\n\n let mut current = (*this).state.load(Acquire);\n\n loop {\n\n match current {\n\n Cancelling => {\n\n let thread = (*(*this).thread.get()).take().unwrap();\n\n if (*this).state.cmpxchg_weak_acqrel(&mut current, Cancelled) {\n\n thread.unpark();\n\n break;\n\n } else {\n\n *(*this).thread.get() = Some(thread);\n\n continue;\n\n }\n\n }\n\n _ => unreachable!(\"{:?}\", current)\n\n }\n\n }\n\n }\n", "file_path": "src/waker.rs", "rank": 66, "score": 26826.067950007717 }, { "content": " let waiter = recv.recv().unwrap();\n\n let result =\n\n match AtomicWaker::finish(waiter, i % 2 == 0) {\n\n WakerResult::Cancelling => {\n\n AtomicWaker::accept_cancel(waiter);\n\n WakerResult::Cancelling\n\n }\n\n WakerResult::Finished { poisoned } =>\n\n WakerResult::Finished { poisoned }\n\n };\n\n results.push(result);\n\n }\n\n results\n\n }));\n\n let r1 = h1.join().unwrap();\n\n let r2 = h2.join().unwrap();\n\n for (send, recv) in r1.into_iter().zip(r2.into_iter()) {\n\n match (cancel, send, recv) {\n\n (_, Some(o), WakerResult::Finished { poisoned: i }) if i == o => {}\n\n (true, None, WakerResult::Cancelling) => {}\n", "file_path": "src/waker.rs", "rank": 67, "score": 26822.59766370551 }, { "content": " let old_vtable = self.vtable.load(Relaxed);\n\n self.data.store(data, Relaxed);\n\n self.vtable.store(vtable, Relaxed);\n\n if old_vtable != null() {\n\n mem::drop(mem::transmute::<_, Waker>((old_data, old_vtable)));\n\n }\n\n loop {\n\n match current {\n\n Storing => {\n\n if self.state.cmpxchg_weak_acqrel(&mut current, Pending) {\n\n mem::forget(waker.take());\n\n return Poll::Pending;\n\n }\n\n }\n\n Finished { poisoned } => {\n\n return Poll::Ready(poisoned);\n\n }\n\n _ => unreachable!(\"{:?}\", current)\n\n }\n\n }\n", "file_path": "src/waker.rs", "rank": 68, "score": 26822.455867146633 }, { "content": " _ => panic!(\"Unexpected outcome {:?}\", (cancel, send, recv))\n\n }\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_poll_race() {\n\n test_race(false);\n\n }\n\n\n\n #[test]\n\n fn test_cancel_race() {\n\n test_race(true);\n\n }\n\n}\n", "file_path": "src/waker.rs", "rank": 69, "score": 26821.1812577683 }, { "content": "#[derive(Copy, Clone, Eq, PartialOrd, PartialEq, Ord, Debug)]\n\nenum State {\n\n Pending,\n\n // A poll is in progress, storing a new waker.\n\n Storing,\n\n // A finish is in progress, loading the waker.\n\n Loading,\n\n // A finish has succeeded, and will call wake if necessary.\n\n Finished { poisoned: bool },\n\n // start_cancel has been called.\n\n Cancelling,\n\n // accept_cancel has been called.\n\n Cancelled,\n\n}\n\n\n\n// A primitive for synchronizing between two threads:\n\n// Future: a thread that polls the a future and may decide to cancel (drop) it.\n\n// Producer: one that marks the future as finished or accepts cancellation.\n\npub struct AtomicWaker {\n\n vtable: Atomic<*const RawWakerVTable>,\n\n // Split the waker among two atomics. These only need to be atomic to prevent data races.\n", "file_path": "src/waker.rs", "rank": 70, "score": 25085.990954612473 }, { "content": "use crate::{Semaphore, PoisonError};\n\nuse std::sync::Arc;\n\nuse futures_test::std_reexport::collections::VecDeque;\n\nuse async_std::sync::Mutex;\n\nuse std::io::{BufReader, BufRead};\n\nuse std::{mem, thread};\n\nuse futures::executor::block_on;\n\nuse rand::{thread_rng, Rng};\n\n\n\n\n\n\n\n#[derive(Clone)]\n", "file_path": "src/tests/pipe.rs", "rank": 79, "score": 24021.312540449435 }, { "content": "use crate::{Semaphore, PoisonError, SemaphoreGuard};\n\nuse std::cell::UnsafeCell;\n\nuse std::ops::{Deref, DerefMut};\n\nuse std::thread;\n\nuse std::sync::Arc;\n\nuse futures::executor::block_on;\n\n\n\n\n\n#[derive(Debug)]\n\npub struct RwLock<T: ?Sized> {\n\n semaphore: Semaphore,\n\n inner: UnsafeCell<T>,\n\n}\n\n\n\npub struct RwLockReadGuard<'a, T: ?Sized> {\n\n _guard: SemaphoreGuard<'a>,\n\n value: *const T,\n\n}\n\n\n\npub struct RwLockWriteGuard<'a, T: ?Sized> {\n", "file_path": "src/tests/lock.rs", "rank": 80, "score": 24020.570759163114 }, { "content": " _guard: SemaphoreGuard<'a>,\n\n value: *mut T,\n\n}\n\n\n\nimpl<T: ?Sized> RwLock<T> {\n\n fn new(inner: T) -> Self where T: Sized {\n\n RwLock {\n\n semaphore: Semaphore::new(Semaphore::MAX_AVAILABLE),\n\n inner: UnsafeCell::new(inner),\n\n }\n\n }\n\n async fn read(&self) -> Result<RwLockReadGuard<'_, T>, PoisonError> {\n\n Ok(RwLockReadGuard { _guard: self.semaphore.acquire(1).await?, value: self.inner.get() })\n\n }\n\n async fn write(&self) -> Result<RwLockWriteGuard<'_, T>, PoisonError> {\n\n Ok(RwLockWriteGuard { _guard: self.semaphore.acquire(Semaphore::MAX_AVAILABLE).await?, value: self.inner.get() })\n\n }\n\n fn into_inner(self) -> Result<T, PoisonError> where T: Sized {\n\n match self.semaphore.try_acquire(0) {\n\n Ok(_) => Ok(self.inner.into_inner()),\n", "file_path": "src/tests/lock.rs", "rank": 82, "score": 24017.30324150991 }, { "content": " fn deref(&self) -> &Self::Target {\n\n unsafe { &*self.value }\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized> Deref for RwLockWriteGuard<'a, T> {\n\n type Target = T;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n unsafe { &*self.value }\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized> DerefMut for RwLockWriteGuard<'a, T> {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n unsafe { &mut *self.value }\n\n }\n\n}\n\n\n\n\n\n#[test]\n", "file_path": "src/tests/lock.rs", "rank": 83, "score": 24015.216474704637 }, { "content": " if n != buf.len() {\n\n break;\n\n }\n\n }\n\n let mut lines = 0;\n\n for line in BufReader::new(&mut result.as_slice()).lines() {\n\n line.unwrap().parse::<usize>().unwrap();\n\n lines += 1;\n\n }\n\n assert_eq!(threads * iters, lines);\n\n });\n\n}", "file_path": "src/tests/pipe.rs", "rank": 86, "score": 24011.398737788382 }, { "content": " mem::drop(lock);\n\n self.inner.0.ready.release(buf.len());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Drop for ReaderInner {\n\n fn drop(&mut self) {\n\n self.0.free.poison();\n\n }\n\n}\n\n\n\nimpl Drop for WriterInner {\n\n fn drop(&mut self) {\n\n self.0.ready.poison();\n\n }\n\n}\n\n\n", "file_path": "src/tests/pipe.rs", "rank": 87, "score": 24010.176401552213 }, { "content": " buf.len()\n\n } else {\n\n let mut lock = self.inner.0.buffer.lock().await;\n\n let length = lock.len().min(buf.len());\n\n for (i, b) in lock.drain(..length).enumerate() {\n\n buf[i] = b;\n\n }\n\n length\n\n };\n\n self.inner.0.free.release(total);\n\n total\n\n }\n\n}\n\n\n\nimpl Writer {\n\n async fn write_all(&self, buf: &[u8]) -> Result<(), PoisonError> {\n\n assert!(buf.len() < self.inner.0.buffer.lock().await.capacity());\n\n self.inner.0.free.acquire(buf.len()).await?.forget();\n\n let mut lock = self.inner.0.buffer.lock().await;\n\n lock.extend(buf.iter().cloned());\n", "file_path": "src/tests/pipe.rs", "rank": 88, "score": 24009.331195305032 }, { "content": " Err(_) => Err(PoisonError),\n\n }\n\n }\n\n}\n\n\n\nunsafe impl<T: ?Sized + Send> Send for RwLock<T> {}\n\n\n\nunsafe impl<T: ?Sized + Sync + Send> Sync for RwLock<T> {}\n\n\n\nunsafe impl<'a, T: ?Sized + Sync> Send for RwLockReadGuard<'a, T> {}\n\n\n\nunsafe impl<'a, T: ?Sized + Sync> Sync for RwLockReadGuard<'a, T> {}\n\n\n\nunsafe impl<'a, T: ?Sized + Sync + Send> Send for RwLockWriteGuard<'a, T> {}\n\n\n\nunsafe impl<'a, T: ?Sized + Sync + Send> Sync for RwLockWriteGuard<'a, T> {}\n\n\n\nimpl<'a, T: ?Sized> Deref for RwLockReadGuard<'a, T> {\n\n type Target = T;\n\n\n", "file_path": "src/tests/lock.rs", "rank": 89, "score": 24006.71589428367 }, { "content": "mod unit;\n\nmod pipe;\n\nmod lock;\n", "file_path": "src/tests/mod.rs", "rank": 90, "score": 24003.529597530236 }, { "content": "# async-weighted-semaphore\n\n[![Build](https://travis-ci.com/nathdobson/async-weighted-semaphore.svg?branch=master)](https://travis-ci.com/github/nathdobson/async-weighted-semaphore)\n\n[![License](https://img.shields.io/badge/license-MIT%2FApache--2.0-blue.svg)](\n\nhttps://github.com/nathdobson/async-weighted-semaphore)\n\n[![Cargo](https://img.shields.io/crates/v/async-weighted-semaphore.svg)](\n\nhttps://crates.io/crates/async-weighted-semaphore)\n\n[![Documentation](https://docs.rs/async-weighted-semaphore/badge.svg)](\n\nhttps://docs.rs/async-weighted-semaphore)\n\n\n\nAn async semaphore where any number of permits can be acquired or released in one call.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0\n\n ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license\n\n ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n## Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be\n", "file_path": "README.md", "rank": 91, "score": 14800.016795005908 }, { "content": "impl Debug for AcquireFutureArc {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n f.debug_tuple(\"AcquireFutureArc\").field(&unsafe { self.inner.waiter() }.amount).finish()\n\n }\n\n}\n\n\n\nimpl<'a> AcquireFuture<'a> {\n\n unsafe fn waiter(&self) -> &Waiter {\n\n &*self.0.get()\n\n }\n\n // Try to acquire or add to queue.\n\n unsafe fn poll_enter(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<SemaphoreGuard<'a>, PoisonError>> {\n\n let acquire = &(*self.waiter().semaphore).acquire;\n\n let mut current = acquire.load(Acquire);\n\n loop {\n\n let (available, back) = match current {\n\n Queued(back) => (0, back),\n\n Available(available) => {\n\n let available = match available.into_usize() {\n\n None => {\n", "file_path": "src/acquire.rs", "rank": 92, "score": 30.077490934641194 }, { "content": "/// A [`Future`] returned by [`Semaphore::acquire_arc`] that produces a [`SemaphoreGuardArc`].\n\npub struct AcquireFutureArc {\n\n pub(crate) arc: Arc<Semaphore>,\n\n pub(crate) inner: AcquireFuture<'static>,\n\n}\n\n\n\nunsafe impl<'a> Sync for AcquireFuture<'a> {}\n\n\n\nunsafe impl<'a> Send for AcquireFuture<'a> {}\n\n\n\nimpl<'a> UnwindSafe for AcquireFuture<'a> {}\n\n\n\nimpl<'a> RefUnwindSafe for AcquireFuture<'a> {}\n\n\n\nimpl<'a> Debug for AcquireFuture<'a> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n f.debug_tuple(\"AcquireFuture\").field(&unsafe { self.waiter() }.amount).finish()\n\n }\n\n}\n\n\n", "file_path": "src/acquire.rs", "rank": 93, "score": 22.002653795416553 }, { "content": " }\n\n pub fn poison() -> Self {\n\n Permits(usize::max_value())\n\n }\n\n pub fn into_usize(self) -> Option<usize> {\n\n if self.0 <= Semaphore::MAX_AVAILABLE {\n\n Some(self.0)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl Debug for Permits {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n if self.0 <= Semaphore::MAX_AVAILABLE {\n\n write!(f, \"{:?}\", self.0)\n\n } else {\n\n write!(f, \"poison\")\n\n }\n", "file_path": "src/state.rs", "rank": 94, "score": 21.523271239726352 }, { "content": "use crate::state::{Waiter, AcquireStep, Permits};\n\nuse std::cell::UnsafeCell;\n\nuse std::marker::{PhantomPinned, PhantomData};\n\nuse crate::{Semaphore, SemaphoreGuard, SemaphoreGuardArc};\n\nuse std::sync::Arc;\n\nuse std::panic::{UnwindSafe, RefUnwindSafe};\n\nuse std::fmt::{Debug, Formatter};\n\nuse std::{fmt};\n\nuse std::task::{Context, Poll};\n\nuse crate::state::AcquireState::{Available, Queued};\n\nuse std::ptr::null;\n\nuse std::pin::Pin;\n\nuse std::future::Future;\n\nuse crate::waker::{WakerResult};\n\nuse crate::errors::PoisonError;\n\nuse std::sync::atomic::Ordering::Acquire;\n\n\n\n/// A [`Future`] returned by [`Semaphore::acquire`] that produces a [`SemaphoreGuard`].\n\npub struct AcquireFuture<'a>(pub(crate) UnsafeCell<Waiter>, pub(crate) PhantomData<&'a Semaphore>, pub(crate) PhantomPinned);\n\n\n", "file_path": "src/acquire.rs", "rank": 95, "score": 20.290775078997033 }, { "content": "#![feature(test)]\n\nextern crate test;\n\n\n\nuse test::Bencher;\n\nuse async_weighted_semaphore::{Semaphore, PoisonError};\n\nuse std::sync::Arc;\n\nuse futures::executor::{ThreadPool, block_on};\n\nuse futures::task::SpawnExt;\n\nuse rand::{Rng, SeedableRng};\n\nuse rand_xorshift::XorShiftRng;\n\nuse futures::future::join_all;\n\n\n\nconst PARALLELISM: usize = 8;\n\nconst ITEMS: usize = 10;\n\nconst ITERS: usize = 10000;\n\nconst MAX_ACQUIRE: usize = 5;\n\nconst TOKENS: usize = (MAX_ACQUIRE - 1) * ITEMS + 1;\n\n\n", "file_path": "benches/ring.rs", "rank": 96, "score": 20.10443204995533 }, { "content": "\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n unsafe {\n\n let this = self.get_unchecked_mut();\n\n match Pin::new_unchecked(&mut this.inner).poll(cx) {\n\n Poll::Ready(guard) => {\n\n let result =\n\n SemaphoreGuardArc::new(this.arc.clone(), guard?.forget());\n\n Poll::Ready(Ok(result))\n\n }\n\n Poll::Pending => Poll::Pending,\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Drop for AcquireFuture<'a> {\n\n fn drop(&mut self) {\n\n unsafe {\n\n match *self.waiter().step.get() {\n", "file_path": "src/acquire.rs", "rank": 97, "score": 19.688032170619092 }, { "content": " (*self.waiter().semaphore).release(available);\n\n return Poll::Pending;\n\n }\n\n }\n\n\n\n unsafe fn poll_waiting(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<SemaphoreGuard<'a>, PoisonError>> {\n\n match self.waiter().waker.poll(cx) {\n\n Poll::Pending => Poll::Pending,\n\n Poll::Ready(poisoned) => {\n\n *self.waiter().step.get() = AcquireStep::Done;\n\n if poisoned {\n\n Poll::Ready(Err(PoisonError))\n\n } else {\n\n Poll::Ready(Ok(SemaphoreGuard::new(&*self.waiter().semaphore, self.waiter().amount)))\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/acquire.rs", "rank": 98, "score": 19.475100558070963 }, { "content": "impl<'a> Future for AcquireFuture<'a> {\n\n type Output = Result<SemaphoreGuard<'a>, PoisonError>;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n unsafe {\n\n match *(*self.0.get()).step.get() {\n\n AcquireStep::Entering => {\n\n self.poll_enter(cx)\n\n }\n\n AcquireStep::Waiting => {\n\n self.poll_waiting(cx)\n\n }\n\n AcquireStep::Done => panic!(\"Polling completed future.\")\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Future for AcquireFutureArc {\n\n type Output = Result<SemaphoreGuardArc, PoisonError>;\n", "file_path": "src/acquire.rs", "rank": 99, "score": 19.051491558666093 } ]
Rust
src/indexer/mod.rs
sinhrks/brassfibre
86028eaf7271b2cd560ef064969ef1cbf47396d4
use std::cell::RefCell; use std::collections::HashMap; use std::collections::hash_map::Entry; use std::hash::Hash; use std::iter::FromIterator; use std::slice; use std::vec; use nullvec::prelude::dev::algos::Indexing; use traits::{Slicer, IndexerIndex, Append}; mod convert; mod formatting; mod indexing; mod ops; mod sort; #[derive(Clone)] pub struct Indexer<U: Clone + Hash> { pub values: Vec<U>, htable: RefCell<HashMap<U, usize>>, } impl<U> Indexer<U> where U: Clone + Eq + Hash, { pub fn from_len(len: usize) -> Indexer<usize> { (0..len).collect() } pub fn new(values: Vec<U>) -> Self { Indexer { values: values, htable: RefCell::new(HashMap::new()), } } } impl<U> Slicer for Indexer<U> where U: Clone + Eq + Hash, { type Scalar = U; fn len(&self) -> usize { self.values.len() } fn iloc(&self, location: &usize) -> Self::Scalar { self.values[*location].clone() } unsafe fn iloc_unchecked(&self, location: &usize) -> Self::Scalar { self.values.get_unchecked(*location).clone() } fn ilocs(&self, locations: &[usize]) -> Self { let new_values = Indexing::reindex(&self.values, locations); Indexer::new(new_values) } unsafe fn ilocs_unchecked(&self, locations: &[usize]) -> Self { let new_values = Indexing::reindex_unchecked(&self.values, locations); Indexer::new(new_values) } fn ilocs_forced(&self, _locations: &[usize]) -> Self { unimplemented!() } fn blocs(&self, flags: &[bool]) -> Self { let new_values: Vec<U> = Indexing::blocs(&self.values, flags); Indexer::new(new_values) } } impl<U> IndexerIndex for Indexer<U> where U: Clone + Eq + Hash, { type Key = U; fn contains(&self, label: &U) -> bool { self.init_state(); self.htable.borrow().contains_key(label) } fn push(&mut self, label: U) { let loc = self.len(); let mut htable = self.htable.borrow_mut(); match htable.entry(label.clone()) { Entry::Occupied(_) => panic!("duplicates are not allowed"), Entry::Vacant(e) => e.insert(loc), }; self.values.push(label); } fn get_loc(&self, label: &U) -> usize { self.init_state(); *self.htable.borrow().get(label).unwrap() } fn get_locs(&self, labels: &[U]) -> Vec<usize> { labels.iter().map(|label| self.get_loc(label)).collect() } fn init_state(&self) { let mut htable = self.htable.borrow_mut(); if htable.len() != 0 { return; } for (loc, label) in self.values.iter().enumerate() { match htable.entry(label.clone()) { Entry::Occupied(_) => panic!("duplicates are not allowed"), Entry::Vacant(e) => e.insert(loc), }; } } } impl<'a, T> Append<'a> for Indexer<T> where T: Clone + Eq + Hash, { fn append(&self, other: &Self) -> Self { let mut new_values: Vec<T> = self.values.clone(); new_values.append(&mut other.values.clone()); Indexer::new(new_values) } } impl<U> PartialEq for Indexer<U> where U: Clone + Eq + Hash, { fn eq(&self, other: &Indexer<U>) -> bool { self.values == other.values } } impl<U> IntoIterator for Indexer<U> where U: Clone + Eq + Hash, { type Item = U; type IntoIter = vec::IntoIter<U>; fn into_iter(self) -> Self::IntoIter { self.values.into_iter() } } impl<U> Indexer<U> where U: Clone + Eq + Hash, { pub fn iter(&self) -> slice::Iter<U> { self.values.iter() } } impl<U> FromIterator<U> for Indexer<U> where U: Clone + Eq + Hash, { fn from_iter<T>(iter: T) -> Self where T: IntoIterator<Item = U>, { let values: Vec<U> = iter.into_iter().collect(); Indexer::new(values) } }
use std::cell::RefCell; use std::collections::HashMap; use std::collections::hash_map::Entry; use std::hash::Hash; use std::iter::FromIterator; use std::slice; use std::vec; use nullvec::prelude::dev::algos::Indexing; use traits::{Slicer, IndexerIndex, Append}; mod convert; mod formatting; mod indexing; mod ops; mod sort; #[derive(Clone)] pub struct Indexer<U: Clone + Hash> { pub values: Vec<U>, htable: RefCell<HashMap<U, usize>>, } impl<U> Indexer<U> where U: Clone + Eq + Hash, { pub fn from_len(len: usize) -> Indexer<usize> { (0..len).collect() } pub fn new(values: Vec<U>) -> Self { Indexer { values: values, htable: RefCell::new(HashMap::new()), } } } impl<U> Slicer for Indexer<U> where U: Clone + Eq + Hash, { type Scalar = U; fn len(&self) -> usize { self.values.len() } fn iloc(&self, location: &usize) -> Self::Scalar { self.values[*location].clone() } unsafe fn iloc_unchecked(&self, location: &usize) -> Self::Scalar { self.values.get_unchecked(*location).clone() } fn ilocs(&self, locations: &[usize]) -> Self { let new_values = Indexing::reindex(&self.values, locations); Indexer::new(new_values) } unsafe fn ilocs_unchecked(&self, locations: &[usize]) -> Self { let new_values = Indexing::reindex_unchecked(&self.values, locations); Indexer::new(new_values) } fn ilocs_forced(&self, _locations: &[usize]) -> Self { unimplemented!() } fn blocs(&self, flags: &[bool]) -> Self { let new_values: Vec<U> = Indexing::blocs(&self.values, flags); Indexer::new(new_values) } } impl<U> IndexerIndex for Indexer<U> where U: Clone + Eq + Hash, { type Key = U; fn contains(&self, label: &U) -> bool { self.init_state(); self.htable.borrow().contains_key(label) } fn push(&mut self, label: U) { let loc = self.len(); let mut htable = self.htable.borrow_mut(); match htable.entry(label.clone()) { Entry::Occupied(_) => panic!("duplicates are not allowed"), Entry::Vacant(e) => e.insert(loc), }; self.values.push(label); } fn get_loc(&self, label: &U) -> usize { self.init_state(); *self.htable.borrow().get(label).unwrap() } fn get_locs(&self, labels: &[U]) -> Vec<usize> { labels.iter().map(|label| self.get_loc(label)).collect() } fn init_state(&self) { let mut htable = self.htable.borrow_mut(); if htable.len() != 0 { return; } for (loc, label) in self.values.iter().enumerate() { match htable.entry(label.clone()) { Entry::Occupied(_) => panic!("duplicates are not allowed"), Entry::Vacant(e) => e.insert(loc), }; } } } impl<'a, T> Append<'a> for Indexer<T> where T: Clone + Eq + Hash, { fn append(&self, other: &Self) -> Self { let mut new_values: Vec<T> = self.values.clone(); new_values.append(&mut other.values.clone()); Indexer::new(new_values) } } impl<U> PartialEq for Indexer<U> where U: Clone + Eq + Hash, { fn eq(&self, other: &Indexer<U>) -> bool { self.values == other.values } } impl<U> IntoIterator for Indexer<U> where U: Clone + Eq + Hash, { type Item = U; type IntoIter = vec::IntoIter<U>; fn into_iter(self) -> Self::IntoIter { self.values.into_iter() } } impl<U> Indexer<U> where U: Clone + Eq + Hash, { pub fn iter(&self) -> slice::Iter<U> { self.values.iter() } } impl<U> FromIterator<U> for Indexer<U> where U: Clone + Eq + Hash, {
}
fn from_iter<T>(iter: T) -> Self where T: IntoIterator<Item = U>, { let values: Vec<U> = iter.into_iter().collect(); Indexer::new(values) }
function_block-full_function
[ { "content": "/// Indexing methods for Indexer\n\npub trait IndexerIndex: Slicer {\n\n type Key;\n\n\n\n fn contains(&self, label: &Self::Key) -> bool;\n\n fn push(&mut self, label: Self::Key);\n\n fn get_loc(&self, label: &Self::Key) -> usize;\n\n fn get_locs(&self, labels: &[Self::Key]) -> Vec<usize>;\n\n\n\n // temp\n\n fn init_state(&self);\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 0, "score": 157291.8134599632 }, { "content": "/// Get max number of characters in a vector of String\n\nfn get_width(values: &[String]) -> usize {\n\n values.iter().map(|x| x.len()).max().unwrap()\n\n}\n\n\n", "file_path": "src/formatting.rs", "rank": 1, "score": 124450.86924839573 }, { "content": "#[test]\n\nfn test_index_append() {\n\n let index1: Indexer<i64> = Indexer::new(vec![1, 2, 3]);\n\n let index2: Indexer<i64> = Indexer::new(vec![4, 5, 6]);\n\n let res = index1.append(&index2);\n\n\n\n let exp: Indexer<i64> = Indexer::new(vec![1, 2, 3, 4, 5, 6]);\n\n assert_eq!(res, exp)\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 2, "score": 116513.63204324586 }, { "content": "/// Convert passed values to Vec of equally padded String\n\npub fn pad_string_vector<T: ToString>(values: &[T]) -> Vec<String> {\n\n let strs = to_string_vector(values);\n\n let pad = get_width(&strs);\n\n strs.iter().map(|x| pad_str(x, pad)).collect()\n\n}\n\n\n", "file_path": "src/formatting.rs", "rank": 3, "score": 113888.41398575073 }, { "content": "#[test]\n\nfn test_index_loc_str() {\n\n let values: Vec<&str> = vec![\"A\", \"B\", \"C\"];\n\n let idx = Indexer::<&str>::new(values);\n\n\n\n assert_eq!(idx.get_loc(&\"B\"), 1);\n\n assert_eq!(idx.get_loc(&\"C\"), 2);\n\n\n\n assert_eq!(idx.get_locs(&vec![\"B\", \"C\"]), vec![1, 2]);\n\n assert_eq!(idx.get_locs(&vec![\"A\", \"C\"]), vec![0, 2]);\n\n\n\n assert_eq!(idx.contains(&\"C\"), true);\n\n assert_eq!(idx.contains(&\"X\"), false);\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 4, "score": 112794.00298848879 }, { "content": "#[test]\n\nfn test_index_loc_int64() {\n\n let values: Vec<i64> = vec![1, 2, 3];\n\n let idx = Indexer::<i64>::new(values);\n\n\n\n assert_eq!(idx.get_loc(&1), 0);\n\n assert_eq!(idx.get_loc(&3), 2);\n\n\n\n assert_eq!(idx.get_locs(&vec![1, 3]), vec![0, 2]);\n\n assert_eq!(idx.get_locs(&vec![3, 2]), vec![2, 1]);\n\n\n\n assert_eq!(idx.contains(&1), true);\n\n assert_eq!(idx.contains(&5), false);\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 5, "score": 112794.00298848879 }, { "content": "#[test]\n\nfn test_clone() {\n\n let i = Scalar::i64(1);\n\n assert_eq!(i, i.clone());\n\n}\n", "file_path": "tests/scalar.rs", "rank": 6, "score": 112290.25309379966 }, { "content": "/// Convert passed values and header to Vec of equally padded String\n\npub fn pad_string_vector_with_header<T: ToString>(values: &[T], header: String) -> Vec<String> {\n\n let mut strs = to_string_vector(values);\n\n strs.insert(0, header);\n\n pad_string_vector(&strs)\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n #[test]\n\n fn test_to_string_vector_int() {\n\n let values: Vec<i64> = vec![1, 2, 3, 4, 5];\n\n let s = super::to_string_vector(&values);\n\n\n\n let exp_values: Vec<&str> = vec![\"1\", \"2\", \"3\", \"4\", \"5\"];\n\n assert_eq!(&s, &exp_values);\n\n assert_eq!(&super::get_width(&s), &1);\n\n\n\n let values: Vec<i64> = vec![10, 200, 30, 4, 50];\n", "file_path": "src/formatting.rs", "rank": 7, "score": 102691.81124462563 }, { "content": "/// Concatenate along row\n\npub trait Append<'s>: Sized {\n\n fn append<'o>(&'s self, other: &'o Self) -> Self;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 8, "score": 97282.89918835193 }, { "content": "fn default_columns(n: usize) -> Vec<String> {\n\n let columns: Vec<usize> = (0..n).collect();\n\n columns.into_iter().map(|x| x.to_string()).collect()\n\n}\n\n\n\nimpl<'a, I, C> DataFrame<'a, 'a, 'a, I, C>\n\nwhere\n\n I: Clone + Eq + Hash,\n\n C: Clone + Eq + Hash,\n\n{\n\n pub fn read_csv<R: Read>(\n\n mut reader: csv::Reader<R>,\n\n ) -> Result<DataFrame<'a, 'a, 'a, usize, String>, csv::Error> {\n\n\n\n // headers read 1st row regardless of has_headers property. Need to clone to avoid double\n\n // borrow in this function\n\n let header = reader.headers()?.clone();\n\n let columns: Vec<String> = if reader.has_headers() {\n\n header.iter().map(|s| s.to_string()).collect()\n\n } else {\n", "file_path": "src/io/mod.rs", "rank": 9, "score": 90595.34363922616 }, { "content": "fn pad_str(s: &str, pad: usize) -> String {\n\n let len = s.len();\n\n // ToDo: fix, can use dynamic formatter in format! macro?\n\n if len < pad {\n\n let mut new = \"\".to_string();\n\n for _ in 0..(pad - len) {\n\n new.push(' ');\n\n }\n\n new.push_str(s);\n\n new\n\n } else {\n\n s.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/formatting.rs", "rank": 10, "score": 90393.06191062622 }, { "content": "#[test]\n\nfn test_dataframe_format() {\n\n let values = vec![\n\n array![1, 2, 3, 4, 5],\n\n array![6.1, 7.1, 8.1, 9.1, 10.1],\n\n array![11, 12, 13, 14, 15],\n\n ];\n\n let df = DataFrame::from_vec(values, vec![10, 20, 30, 40, 50], vec![\"X\", \"YYY\", \"ZZ\"]);\n\n\n\n // better way?\n\n let mut buf = Vec::new();\n\n let _ = write!(&mut buf, \"{}\", df);\n\n let res = str::from_utf8(&buf).unwrap();\n\n assert_eq!(res, \"DataFrame(columns=[\\\"X\\\", \\\"YYY\\\", \\\"ZZ\\\"])\");\n\n}\n\n\n", "file_path": "tests/format.rs", "rank": 11, "score": 85776.07296302333 }, { "content": "#[test]\n\nfn test_series_format() {\n\n let s = Series::new(vec![1, 10, 100], vec![\"XX\", \"Y\", \"ZZ\"]);\n\n\n\n // better way?\n\n let mut buf = Vec::new();\n\n let _ = write!(&mut buf, \"{}\", s);\n\n assert_eq!(&buf, b\"Series([1, 10, 100])\");\n\n}\n\n\n", "file_path": "tests/format.rs", "rank": 12, "score": 85776.07296302333 }, { "content": "#[test]\n\nfn test_index_iter() {\n\n let index: Indexer<i64> = Indexer::new(vec![1, 2, 3]);\n\n let mut it = index.iter();\n\n assert_eq!(it.next(), Some(&1));\n\n assert_eq!(it.next(), Some(&2));\n\n assert_eq!(it.next(), Some(&3));\n\n assert_eq!(it.next(), None);\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 13, "score": 85001.64562710834 }, { "content": "#[test]\n\nfn test_index_from_iter() {\n\n let index: Indexer<i64> = (3..6).collect();\n\n let exp: Indexer<i64> = Indexer::new(vec![3, 4, 5]);\n\n assert_eq!(index, exp);\n\n}\n", "file_path": "tests/indexer.rs", "rank": 14, "score": 85001.64562710834 }, { "content": "#[test]\n\nfn test_index_push() {\n\n let values: Vec<&str> = vec![\"A\", \"B\", \"C\"];\n\n let mut idx = Indexer::<&str>::new(values);\n\n\n\n let exp_index: Vec<&str> = vec![\"A\", \"B\", \"C\"];\n\n assert_eq!(idx.values, exp_index);\n\n assert_eq!(idx.len(), 3);\n\n assert_eq!(idx.get_loc(&\"C\"), 2);\n\n\n\n idx.push(\"D\");\n\n assert_eq!(idx.len(), 4);\n\n assert_eq!(idx.get_loc(&\"C\"), 2);\n\n assert_eq!(idx.get_loc(&\"D\"), 3);\n\n\n\n idx.push(\"E\");\n\n assert_eq!(idx.len(), 5);\n\n assert_eq!(idx.get_loc(&\"D\"), 3);\n\n assert_eq!(idx.get_loc(&\"E\"), 4);\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 15, "score": 85001.64562710834 }, { "content": "#[test]\n\nfn test_index_into_iter() {\n\n let index: Indexer<i64> = Indexer::new(vec![1, 2, 3]);\n\n let mut it = index.into_iter();\n\n assert_eq!(it.next(), Some(1));\n\n assert_eq!(it.next(), Some(2));\n\n assert_eq!(it.next(), Some(3));\n\n assert_eq!(it.next(), None);\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 16, "score": 85001.64562710834 }, { "content": "/// Convert each element in a vector to String\n\nfn to_string_vector<T: ToString>(values: &[T]) -> Vec<String> {\n\n values.iter().map(|x| x.to_string()).collect()\n\n}\n\n\n", "file_path": "src/formatting.rs", "rank": 17, "score": 83736.98946383604 }, { "content": "#[test]\n\nfn test_series_format_debug() {\n\n let s = Series::new(vec![1, 10, 100], vec![\"XX\", \"Y\", \"ZZ\"]);\n\n\n\n // better way?\n\n let mut buf = Vec::new();\n\n let _ = write!(&mut buf, \"{:?}\", s);\n\n assert_eq!(\n\n &buf,\n\n b\"XX 1\n\n Y 10\n\nZZ 100\"\n\n );\n\n}\n\n\n", "file_path": "tests/format.rs", "rank": 18, "score": 83346.90997452577 }, { "content": "#[test]\n\nfn test_dataframe_format_debug() {\n\n let values = vec![\n\n array![1, 2, 3, 4, 5],\n\n array![6.1, 7.1, 8.1, 9.1, 10.1],\n\n array![11, 12, 13, 14, 15],\n\n ];\n\n let df = DataFrame::from_vec(values, vec![10, 20, 30, 40, 50], vec![\"X\", \"YYY\", \"ZZ\"]);\n\n\n\n // better way?\n\n let mut buf = Vec::new();\n\n let _ = write!(&mut buf, \"{:?}\", df);\n\n let res = str::from_utf8(&buf).unwrap();\n\n assert_eq!(\n\n res,\n\n \" X YYY ZZ\n\n10 1 6.1 11\n\n20 2 7.1 12\n\n30 3 8.1 13\n\n40 4 9.1 14\n\n50 5 10.1 15\"\n\n );\n\n}\n", "file_path": "tests/format.rs", "rank": 19, "score": 83346.90997452577 }, { "content": "#[test]\n\nfn test_index_iloc_ilocs() {\n\n let values: Vec<i64> = vec![1, 2, 3];\n\n let idx = Indexer::<i64>::new(values);\n\n\n\n assert_eq!(idx.iloc(&0), 1);\n\n assert_eq!(idx.iloc(&2), 3);\n\n\n\n assert_eq!(idx.ilocs(&[2, 0]), Indexer::<i64>::new(vec![3, 1]));\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 20, "score": 82594.31152828998 }, { "content": "#[test]\n\nfn test_index_creation_string() {\n\n let values: Vec<String> = vec![\"A\".to_string(), \"B\".to_string(), \"C\".to_string()];\n\n let idx = Indexer::<String>::new(values);\n\n\n\n let exp_index: Vec<String> = vec![\"A\".to_string(), \"B\".to_string(), \"C\".to_string()];\n\n assert_eq!(idx.values, exp_index);\n\n assert_eq!(idx.len(), 3);\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 21, "score": 82594.31152828998 }, { "content": "#[test]\n\nfn test_index_creation_str() {\n\n let values: Vec<&str> = vec![\"A\", \"B\", \"C\"];\n\n let idx = Indexer::<&str>::new(values);\n\n\n\n let exp_index: Vec<&str> = vec![\"A\", \"B\", \"C\"];\n\n assert_eq!(idx.values, exp_index);\n\n assert_eq!(idx.len(), 3);\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 22, "score": 82594.31152828998 }, { "content": "#[test]\n\nfn test_index_creation_int64() {\n\n let values: Vec<i64> = vec![1, 2, 3];\n\n let idx = Indexer::<i64>::new(values);\n\n\n\n let exp_index: Vec<i64> = vec![1, 2, 3];\n\n assert_eq!(idx.values, exp_index);\n\n assert_eq!(idx.len(), 3);\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 23, "score": 82594.31152828998 }, { "content": "#[test]\n\nfn test_index_creation_from_len() {\n\n let idx: Indexer<usize> = Indexer::<usize>::from_len(3);\n\n assert_eq!(idx.values, vec![0, 1, 2]);\n\n assert_eq!(idx.len(), 3);\n\n\n\n let idx: Indexer<usize> = Indexer::<usize>::from_len(0);\n\n assert_eq!(idx.values, vec![]);\n\n assert_eq!(idx.len(), 0);\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 24, "score": 82594.31152828998 }, { "content": "fn main() {\n\n // cargo build --example indexer\n\n // ./target/debug/examples/indexer\n\n\n\n let values: Vec<i64> = vec![1, 2, 3, 4, 5];\n\n let indexer: Indexer<i64> = Indexer::new(values);\n\n // println!(\"{:}\", &s);\n\n println!(\"{:?}\", &indexer);\n\n println!(\"{:?}\", &indexer[2]);\n\n\n\n println!(\"{:?}\", &(indexer + 1));\n\n}\n", "file_path": "examples/indexer.rs", "rank": 25, "score": 81479.08507896558 }, { "content": "#[test]\n\nfn test_eq() {\n\n let iarr1 = array![1i64, 2, 3];\n\n let iarr2 = array![2i64, 3, 4];\n\n let iarr3 = array![1i64, 2, 3, 4, 5];\n\n let iarr4 = array![1i64, 2, 3];\n\n assert_eq!(iarr1, iarr1);\n\n assert_eq!(iarr1 == iarr2, false);\n\n assert_eq!(iarr1 == iarr3, false);\n\n assert_eq!(iarr1, iarr4);\n\n\n\n let farr1 = array![1.0f64, 2., 3.];\n\n let farr2 = array![2.0f64, 3., 4.];\n\n let farr3 = array![1.0f64, 2., 3., 4., 5.];\n\n let farr4 = array![1.0f64, 2., 3.];\n\n assert_eq!(farr1, farr1);\n\n assert_eq!(farr1 == farr2, false);\n\n assert_eq!(farr1 == farr3, false);\n\n assert_eq!(farr1, farr4);\n\n\n\n // different types\n\n assert_eq!(iarr1 == farr1, false);\n\n assert_eq!(iarr2 == farr2, false);\n\n assert_eq!(iarr3 == farr3, false);\n\n assert_eq!(iarr4 == farr4, false);\n\n}\n\n\n", "file_path": "tests/array.rs", "rank": 26, "score": 79271.89581355159 }, { "content": "#[test]\n\nfn test_clone() {\n\n let iarr1 = array![1i64, 2, 3];\n\n assert_eq!(iarr1, iarr1.clone());\n\n}\n\n\n\n\n", "file_path": "tests/array.rs", "rank": 27, "score": 79255.63667595378 }, { "content": "#[test]\n\nfn test_creation() {\n\n let i = Scalar::i64(1);\n\n assert_eq!(i.dtype(), \"i64\");\n\n}\n\n\n", "file_path": "tests/scalar.rs", "rank": 28, "score": 79240.42752200202 }, { "content": "#[test]\n\nfn test_append() {\n\n let iarr1 = array![1i64, 2, 3];\n\n assert_eq!(iarr1.dtype(), \"i64\");\n\n\n\n let iarr2 = array![1i64, 2, 3];\n\n assert_eq!(iarr2.dtype(), \"i64\");\n\n\n\n let res = iarr1.append(&iarr2);\n\n assert_eq!(res.dtype(), \"i64\");\n\n\n\n let exp = array![1i64, 2, 3, 1, 2, 3];\n\n assert_eq!(res, exp);\n\n}\n", "file_path": "tests/array.rs", "rank": 29, "score": 79181.3898005327 }, { "content": "#[test]\n\nfn test_copy() {\n\n let values: Vec<&str> = vec![\"A\", \"B\", \"C\"];\n\n let idx = Indexer::<&str>::new(values);\n\n\n\n // copy Indexer\n\n let copied = idx.clone();\n\n let exp_values: Vec<&str> = vec![\"A\", \"B\", \"C\"];\n\n assert_eq!(&copied.values, &exp_values);\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 30, "score": 78383.1821271088 }, { "content": "#[test]\n\nfn test_reindex() {\n\n let idx = Indexer::<&str>::new(vec![\"A\", \"B\", \"C\"]);\n\n\n\n let res = idx.reindex(&vec![1, 0, 2]);\n\n assert_eq!(res, Indexer::new(vec![\"B\", \"A\", \"C\"]));\n\n\n\n let res = idx.reindex(&vec![1, 0, 2]);\n\n assert_eq!(res, Indexer::new(vec![\"B\", \"A\", \"C\"]));\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 31, "score": 78383.1821271088 }, { "content": "#[test]\n\nfn test_equals() {\n\n let idx = Indexer::<&str>::new(vec![\"A\", \"B\", \"C\"]);\n\n\n\n let other = Indexer::<&str>::new(vec![\"A\", \"B\"]);\n\n assert_eq!(idx == other, false);\n\n\n\n let other = Indexer::<&str>::new(vec![\"A\", \"B\", \"X\"]);\n\n assert_eq!(idx == other, false);\n\n\n\n let other = Indexer::<&str>::new(vec![\"A\", \"B\", \"C\"]);\n\n assert_eq!(idx == other, true);\n\n assert_eq!(idx, other);\n\n}\n\n\n", "file_path": "tests/indexer.rs", "rank": 32, "score": 78383.1821271088 }, { "content": "/// Indexing methods for Index(Row)\n\npub trait RowIndex<'s>: Sized {\n\n // 's: lifetime of myself\n\n\n\n type Key;\n\n type Row;\n\n\n\n fn len(&'s self) -> usize;\n\n\n\n fn head(&'s self, n: usize) -> Self {\n\n let indexer: Vec<usize> = (0..n).collect();\n\n self.ilocs(&indexer)\n\n }\n\n\n\n fn tail(&'s self, n: usize) -> Self {\n\n let len = self.len();\n\n let indexer: Vec<usize> = ((len - n)..len).collect();\n\n self.ilocs(&indexer)\n\n }\n\n\n\n fn reindex<'l>(&'s self, labels: &'l [Self::Key]) -> Self;\n", "file_path": "src/traits.rs", "rank": 33, "score": 76410.16528266536 }, { "content": "/// Indexing methods for Columns\n\npub trait ColIndex<'s>: Sized {\n\n // 's: lifetime of myself\n\n\n\n type Key;\n\n type Column;\n\n\n\n /// Get column using label\n\n fn get<'l>(&'s self, label: &'l Self::Key) -> Self::Column;\n\n\n\n /// Get column using given index\n\n fn iget<'l>(&'s self, label: &'l usize) -> Self::Column;\n\n\n\n /// Slice columns using labels\n\n fn gets<'l>(&'s self, labels: &'l [Self::Key]) -> Self;\n\n\n\n /// Slice columns given indices\n\n fn igets<'l>(&'s self, locations: &'l [usize]) -> Self;\n\n\n\n // ToDo: Add .insert\n\n // ToDo bgets\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 34, "score": 76409.58082950533 }, { "content": "#[test]\n\nfn test_series_append() {\n\n let values: Vec<f64> = vec![1., 2., 3., 4., 5.];\n\n let index: Vec<i64> = vec![10, 20, 30, 40, 50];\n\n\n\n let s1 = Series::new(values, index);\n\n\n\n let values: Vec<f64> = vec![11., 12., 13., 14., 15.];\n\n let index: Vec<i64> = vec![110, 120, 130, 140, 150];\n\n\n\n let s2 = Series::new(values, index);\n\n\n\n let res = s1.append(&s2);\n\n let exp: Series<f64, i64> = Series::new(\n\n vec![1., 2., 3., 4., 5., 11., 12., 13., 14., 15.],\n\n vec![10, 20, 30, 40, 50, 110, 120, 130, 140, 150],\n\n );\n\n assert_eq!(res, exp);\n\n}\n\n\n", "file_path": "tests/series.rs", "rank": 35, "score": 76324.87699873961 }, { "content": "pub trait JoinOp<T> {\n\n fn inner(left: &[T], right: &[T]) -> (Vec<T>, Vec<usize>, Vec<usize>);\n\n fn left(left: &[T], right: &[T]) -> (Vec<T>, Vec<usize>, Vec<usize>);\n\n fn right(left: &[T], right: &[T]) -> (Vec<T>, Vec<usize>, Vec<usize>);\n\n fn outer(left: &[T], right: &[T]) -> (Vec<T>, Vec<usize>, Vec<usize>);\n\n\n\n fn keep_first(keep: &[T], other: &[T]) -> (Vec<T>, Vec<usize>, Vec<usize>);\n\n}\n\n\n\nimpl<T> JoinOp<T> for HashJoin\n\nwhere\n\n T: Clone + Hash + Eq,\n\n{\n\n fn inner(left: &[T], right: &[T]) -> (Vec<T>, Vec<usize>, Vec<usize>) {\n\n\n\n let exp_capacity = cmp::min(left.len(), right.len());\n\n\n\n let mut indexer: Vec<T> = Vec::with_capacity(exp_capacity);\n\n let mut lindexer: Vec<usize> = Vec::with_capacity(exp_capacity);\n\n let mut rindexer: Vec<usize> = Vec::with_capacity(exp_capacity);\n", "file_path": "src/algos/join.rs", "rank": 36, "score": 75944.44124600218 }, { "content": "#[test]\n\nfn test_series_slice_locs() {\n\n let values: Vec<f64> = vec![1., 2., 3., 4., 5.];\n\n let index: Vec<i64> = vec![10, 20, 30, 40, 50];\n\n\n\n let s = Series::new(values, index);\n\n\n\n // test internal constructions\n\n let exp_values: Cow<Vec<f64>> = Cow::Owned(vec![1., 2., 3., 4., 5.]);\n\n let exp_index: Cow<Indexer<i64>> = Cow::Owned(Indexer::new(vec![10, 20, 30, 40, 50]));\n\n assert_eq!(s.values, exp_values);\n\n assert_eq!(s.index, exp_index);\n\n\n\n // test label slice\n\n let res = s.locs(&vec![20, 30, 50]);\n\n let exp: Series<f64, i64> = Series::new(vec![2., 3., 5.], vec![20, 30, 50]);\n\n assert_eq!(res, exp);\n\n}\n\n\n", "file_path": "tests/series.rs", "rank": 37, "score": 73738.05390204594 }, { "content": "#[test]\n\nfn test_frame_slice_locs() {\n\n let values = vec![\n\n array![1i64, 2, 3, 4, 5],\n\n array![6.0f64, 7., 8., 9., 10.],\n\n array![11i64, 12, 13, 14, 15],\n\n ];\n\n let df = DataFrame::from_vec(\n\n values,\n\n vec![\"A\", \"BB\", \"CC\", \"D\", \"EEE\"],\n\n vec![\"X\", \"YYY\", \"ZZ\"],\n\n );\n\n assert_eq!(df.len(), 5);\n\n\n\n let res = df.locs(&vec![\"A\", \"D\", \"CC\"]);\n\n let exp_values = vec![\n\n array![1i64, 4, 3],\n\n array![6.0f64, 9., 8.],\n\n array![11i64, 14, 13],\n\n ];\n\n let exp = DataFrame::from_vec(exp_values, vec![\"A\", \"D\", \"CC\"], vec![\"X\", \"YYY\", \"ZZ\"]);\n\n assert_eq!(res.values, exp.values);\n\n assert_eq!(res.index, exp.index);\n\n assert_eq!(res.columns, exp.columns);\n\n}\n\n\n", "file_path": "tests/frame.rs", "rank": 38, "score": 73738.05390204594 }, { "content": "#[test]\n\nfn test_dataframe_append() {\n\n let values1 = vec![array![1, 2, 3], array![4.1, 5.1, 6.1], array![1, 2, 3]];\n\n let df1 = DataFrame::from_vec(values1, vec![\"A\", \"B\", \"C\"], vec![\"X\", \"Y\", \"Z\"]);\n\n\n\n let values2 = vec![array![4, 5], array![7.1, 8.1], array![4, 5]];\n\n let df2 = DataFrame::from_vec(values2, vec![\"D\", \"E\"], vec![\"X\", \"Y\", \"Z\"]);\n\n\n\n let res = df1.append(&df2);\n\n\n\n let exp_values = vec![\n\n array![1, 2, 3, 4, 5],\n\n array![4.1, 5.1, 6.1, 7.1, 8.1],\n\n array![1, 2, 3, 4, 5],\n\n ];\n\n let exp = DataFrame::from_vec(\n\n exp_values,\n\n vec![\"A\", \"B\", \"C\", \"D\", \"E\"],\n\n vec![\"X\", \"Y\", \"Z\"],\n\n );\n\n assert_eq!(res, exp);\n\n}\n\n\n", "file_path": "tests/frame_reshape.rs", "rank": 39, "score": 73711.15576053833 }, { "content": "#[test]\n\nfn test_series_creation_from_index() {\n\n let values: Vec<f64> = vec![1., 2., 3.];\n\n let index: Vec<i64> = vec![5, 6, 7];\n\n\n\n let s = Series::<f64, i64>::new(values, index);\n\n\n\n let exp_values: Cow<Vec<f64>> = Cow::Owned(vec![1., 2., 3.]);\n\n let exp_index: Cow<Indexer<i64>> = Cow::Owned(Indexer::new(vec![5, 6, 7]));\n\n assert_eq!(s.values, exp_values);\n\n assert_eq!(s.index, exp_index);\n\n\n\n assert_eq!(s.len(), 3);\n\n assert_eq!(s.index.len(), 3);\n\n}\n\n\n", "file_path": "tests/series.rs", "rank": 40, "score": 72980.79237928068 }, { "content": "#[test]\n\nfn test_series_creation_from_into_index() {\n\n let values: Vec<f64> = vec![1., 2., 3.];\n\n let index: Indexer<i64> = Indexer::new(vec![5, 6, 7]);\n\n\n\n let s = Series::<f64, i64>::new(values, index);\n\n\n\n let exp_values: Cow<Vec<f64>> = Cow::Owned(vec![1., 2., 3.]);\n\n let exp_index: Cow<Indexer<i64>> = Cow::Owned(Indexer::new(vec![5, 6, 7]));\n\n assert_eq!(s.values, exp_values);\n\n assert_eq!(s.index, exp_index);\n\n\n\n assert_eq!(s.len(), 3);\n\n assert_eq!(s.index.len(), 3);\n\n}\n\n\n", "file_path": "tests/series.rs", "rank": 41, "score": 72980.79237928068 }, { "content": "#[test]\n\nfn test_series_reindex_by_index() {\n\n let s: Series<&str, &str> = Series::new(vec![\"a\", \"b\", \"c\", \"d\"], vec![\"A\", \"B\", \"C\", \"D\"]);\n\n let res = s.reindex_by_index(&vec![1, 3, 0]);\n\n let exp: Series<&str, &str> = Series::new(vec![\"b\", \"d\", \"a\"], vec![\"B\", \"D\", \"A\"]);\n\n assert_eq!(res, exp);\n\n}\n\n\n", "file_path": "tests/series.rs", "rank": 42, "score": 72980.79237928068 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_frame_slice_locs_panic() {\n\n let values = vec![\n\n array![1i64, 2, 3, 4, 5],\n\n array![6.0f64, 7., 8., 9., 10.],\n\n array![11i64, 12, 13, 14, 15],\n\n ];\n\n let df = DataFrame::from_vec(\n\n values,\n\n vec![\"A\", \"BB\", \"CC\", \"D\", \"EEE\"],\n\n vec![\"X\", \"YYY\", \"ZZ\"],\n\n );\n\n\n\n df.locs(&vec![\"A\", \"D\", \"X\"]);\n\n}\n\n\n", "file_path": "tests/frame.rs", "rank": 43, "score": 71336.33519097991 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_series_reindex_by_index_panic() {\n\n let s: Series<&str, &str> = Series::new(vec![\"a\", \"b\", \"c\", \"d\"], vec![\"A\", \"B\", \"C\", \"D\"]);\n\n s.reindex_by_index(&vec![1, 3, 10]);\n\n}\n\n\n", "file_path": "tests/series.rs", "rank": 44, "score": 70609.94377453244 }, { "content": "#[test]\n\nfn test_series_value_counts_str() {\n\n let values: Vec<&str> = vec![\"a\", \"bb\", \"bb\", \"c\", \"a\", \"a\"];\n\n let s = Series::<&str, usize>::from_vec(values);\n\n\n\n let d = s.value_counts();\n\n let exp: Series<usize, &str> = Series::new(vec![3, 2, 1], vec![\"a\", \"bb\", \"c\"]);\n\n assert_eq!(d, exp);\n\n}\n", "file_path": "tests/series_aggregation.rs", "rank": 45, "score": 69110.35904498925 }, { "content": "#[test]\n\nfn test_series_value_counts_int() {\n\n let values: Vec<i64> = vec![1, 1, 3, 4, 2, 1, 1, 2, 3, 3];\n\n let s = Series::<i64, usize>::from_vec(values);\n\n\n\n let d = s.value_counts();\n\n let exp: Series<usize, i64> = Series::new(vec![4, 3, 2, 1], vec![1, 3, 2, 4]);\n\n assert_eq!(d, exp);\n\n}\n\n\n", "file_path": "tests/series_aggregation.rs", "rank": 46, "score": 69110.35904498925 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_dataframe_append_different_columns() {\n\n // different columns\n\n let values1 = vec![array![1, 2, 3], array![4.1, 5.1, 6.1], array![1, 2, 3]];\n\n let df1 = DataFrame::from_vec(values1, vec![\"A\", \"B\", \"C\"], vec![\"X\", \"Y\", \"Z\"]);\n\n\n\n let values2 = vec![array![4, 5], array![7.1, 8.1], array![4, 5]];\n\n let df2 = DataFrame::from_vec(values2, vec![\"D\", \"E\"], vec![\"XX\", \"Y\", \"Z\"]);\n\n\n\n df1.append(&df2);\n\n}\n\n\n", "file_path": "tests/frame_reshape.rs", "rank": 47, "score": 69097.96996054036 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_dataframe_append_different_dtype() {\n\n // different columns\n\n let values1 = vec![array![1, 2, 3], array![4.1, 5.1, 6.1], array![1, 2, 3]];\n\n let df1 = DataFrame::from_vec(values1, vec![\"A\", \"B\", \"C\"], vec![\"X\", \"Y\", \"Z\"]);\n\n\n\n let values2 = vec![array![4, 5], array![7.1, 8.1], array![4.1, 5.1]];\n\n let df2 = DataFrame::from_vec(values2, vec![\"D\", \"E\"], vec![\"X\", \"Y\", \"Z\"]);\n\n\n\n df1.append(&df2);\n\n}\n\n\n", "file_path": "tests/frame_reshape.rs", "rank": 48, "score": 69097.96996054036 }, { "content": "use std::hash::Hash;\n\n\n\nuse super::Indexer;\n\nuse nullvec::prelude::dev::algos::Sorter;\n\n\n\n//**********************************************\n\n//*Soat\n\n//**********************************************\n\n\n\nimpl<U> Indexer<U>\n\nwhere\n\n U: Clone + Eq + Hash + Ord,\n\n{\n\n pub fn argsort(&self) -> (Vec<usize>, Self) {\n\n let (indexer, sorted) = Sorter::argsort(&self.values);\n\n let sorted = Indexer::new(sorted);\n\n (indexer, sorted)\n\n }\n\n\n\n pub fn sort(&self) -> Self {\n", "file_path": "src/indexer/sort.rs", "rank": 49, "score": 68378.31397301417 }, { "content": " Indexer::new(Sorter::sort(&self.values))\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::super::Indexer;\n\n\n\n #[test]\n\n fn test_index_argsort_int() {\n\n let idx = Indexer::new(vec![5, 4, 3, 2, 1]);\n\n let (indexer, sorted) = idx.argsort();\n\n assert_eq!(indexer, vec![4, 3, 2, 1, 0]);\n\n assert_eq!(sorted, Indexer::new(vec![1, 2, 3, 4, 5]));\n\n }\n\n\n\n #[test]\n\n fn test_index_sort_int() {\n", "file_path": "src/indexer/sort.rs", "rank": 50, "score": 68361.20121356468 }, { "content": " let idx = Indexer::new(vec![5, 4, 3, 2, 1]);\n\n let sorted = idx.sort();\n\n assert_eq!(sorted, Indexer::new(vec![1, 2, 3, 4, 5]));\n\n }\n\n\n\n #[test]\n\n fn test_index_argsort_str() {\n\n let idx = Indexer::new(vec![\"d\", \"b\", \"a\", \"c\"]);\n\n let (indexer, sorted) = idx.argsort();\n\n assert_eq!(indexer, vec![2, 1, 3, 0]);\n\n assert_eq!(sorted, Indexer::new(vec![\"a\", \"b\", \"c\", \"d\"]));\n\n }\n\n\n\n #[test]\n\n fn test_index_sort_str() {\n\n let idx = Indexer::new(vec![\"d\", \"b\", \"a\", \"c\"]);\n\n let sorted = idx.sort();\n\n assert_eq!(sorted, Indexer::new(vec![\"a\", \"b\", \"c\", \"d\"]));\n\n }\n\n}\n", "file_path": "src/indexer/sort.rs", "rank": 51, "score": 68355.1590755059 }, { "content": "use std::borrow::Cow;\n\nuse std::hash::Hash;\n\n\n\nuse super::Indexer;\n\n\n\n/// /////////////////////////////////////////////////////////////////////////////\n\n/// From / Into\n\n/// /////////////////////////////////////////////////////////////////////////////\n\n\n\nimpl<T: Clone + Eq + Hash> From<Vec<T>> for Indexer<T> {\n\n fn from(values: Vec<T>) -> Self {\n\n Indexer::new(values)\n\n }\n\n}\n\n\n\nimpl<T: Clone + Hash> Into<Vec<T>> for Indexer<T> {\n\n fn into(self) -> Vec<T> {\n\n self.values\n\n }\n\n}\n", "file_path": "src/indexer/convert.rs", "rank": 52, "score": 68319.80805874459 }, { "content": "\n\n/// /////////////////////////////////////////////////////////////////////////////\n\n/// Clone on Write\n\n/// /////////////////////////////////////////////////////////////////////////////\n\n\n\nimpl<'a, T: Clone + Hash> Into<Cow<'a, Indexer<T>>> for Indexer<T> {\n\n fn into(self) -> Cow<'a, Self> {\n\n Cow::Owned(self)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::super::Indexer;\n\n\n\n #[test]\n\n fn test_i64_vec_to_indexer() {\n\n let exp: Indexer<i64> = Indexer::new(vec![1, 2, 3]);\n\n\n", "file_path": "src/indexer/convert.rs", "rank": 53, "score": 68314.0404092785 }, { "content": " let idx: Indexer<i64> = vec![1, 2, 3].into();\n\n assert_eq!(idx, exp);\n\n\n\n let idx: Indexer<i64> = Indexer::from(vec![1, 2, 3]);\n\n assert_eq!(idx, exp);\n\n }\n\n\n\n #[test]\n\n fn test_i64_indexer_to_vec() {\n\n let exp: Vec<i64> = vec![1, 2, 3];\n\n\n\n let idx: Indexer<i64> = Indexer::new(vec![1, 2, 3]);\n\n let conv: Vec<i64> = idx.into();\n\n assert_eq!(conv, exp);\n\n\n\n // let idx: Indexer<i64> = Indexer::new(vec![1, 2, 3]);\n\n // let conv: Vec<i64> = Vec::from(idx.into());\n\n // assert_eq!(conv, exp);\n\n //\n\n }\n", "file_path": "src/indexer/convert.rs", "rank": 54, "score": 68303.77169829237 }, { "content": "\n\n #[test]\n\n fn test_str_vec_to_indexer() {\n\n let exp: Indexer<&str> = Indexer::new(vec![\"a\", \"b\", \"c\"]);\n\n\n\n let idx: Indexer<&str> = vec![\"a\", \"b\", \"c\"].into();\n\n assert_eq!(idx, exp);\n\n\n\n let idx: Indexer<&str> = Indexer::from(vec![\"a\", \"b\", \"c\"]);\n\n assert_eq!(idx, exp);\n\n }\n\n\n\n #[test]\n\n fn test_str_indexer_to_vec() {\n\n let exp: Vec<&str> = vec![\"a\", \"b\", \"c\"];\n\n\n\n let idx: Indexer<&str> = Indexer::new(vec![\"a\", \"b\", \"c\"]);\n\n let conv: Vec<&str> = idx.into();\n\n assert_eq!(conv, exp);\n\n\n", "file_path": "src/indexer/convert.rs", "rank": 55, "score": 68303.64731766502 }, { "content": " // let idx: Indexer<&str> = Indexer::new(vec![\"a\", \"b\", \"c\"]);\n\n // let conv: Vec<&str> = Vec::from(idx.into());\n\n // assert_eq!(conv, exp);\n\n //\n\n }\n\n\n\n #[test]\n\n fn test_string_vec_to_indexer() {\n\n let exp: Indexer<String> =\n\n Indexer::new(vec![\"a\".to_string(), \"b\".to_string(), \"c\".to_string()]);\n\n\n\n let idx: Indexer<String> = vec![\"a\".to_string(), \"b\".to_string(), \"c\".to_string()].into();\n\n assert_eq!(idx, exp);\n\n\n\n let idx: Indexer<String> =\n\n Indexer::from(vec![\"a\".to_string(), \"b\".to_string(), \"c\".to_string()]);\n\n assert_eq!(idx, exp);\n\n }\n\n\n\n #[test]\n", "file_path": "src/indexer/convert.rs", "rank": 56, "score": 68303.49768667895 }, { "content": " fn test_string_indexer_to_vec() {\n\n let exp: Vec<String> = vec![\"a\".to_string(), \"b\".to_string(), \"c\".to_string()];\n\n\n\n let idx: Indexer<String> =\n\n Indexer::new(vec![\"a\".to_string(), \"b\".to_string(), \"c\".to_string()]);\n\n let conv: Vec<String> = idx.into();\n\n assert_eq!(conv, exp);\n\n\n\n }\n\n}\n", "file_path": "src/indexer/convert.rs", "rank": 57, "score": 68302.2684425825 }, { "content": "use std::fmt;\n\nuse std::hash::Hash;\n\n\n\nuse super::Indexer;\n\n\n\n// Formatting\n\n\n\nimpl<U> fmt::Display for Indexer<U>\n\nwhere\n\n U: Clone + Eq + Hash + fmt::Debug,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n return write!(f, \"Index({:?})\", &self.values);\n\n }\n\n}\n\n\n\nimpl<U> fmt::Debug for Indexer<U>\n\nwhere\n\n U: Clone + Eq + Hash + fmt::Debug,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n return write!(f, \"Index({:?})\", &self.values);\n\n }\n\n}\n", "file_path": "src/indexer/formatting.rs", "rank": 58, "score": 68169.9845212978 }, { "content": "pub fn union<T>(a: &[T], b: &[T]) -> Vec<T>\n\nwhere\n\n T: Clone + Hash + Eq,\n\n{\n\n // Use HashMap to keep the order\n\n\n\n // Clone for result Vec\n\n let mut res: Vec<T> = a.to_vec();\n\n\n\n let set = CowCollections::to_hashset(a);\n\n\n\n for key in b.iter() {\n\n if !set.contains(&Cow::Borrowed(key)) {\n\n // do not clone if no need to insert\n\n res.push((*key).clone());\n\n }\n\n }\n\n res\n\n}\n\n\n", "file_path": "src/algos/set.rs", "rank": 59, "score": 68004.14272212569 }, { "content": "use std::hash::Hash;\n\nuse std::ops::{Add, Mul, Sub, Div, Rem, BitAnd, BitOr, BitXor};\n\n\n\nuse super::Indexer;\n\nuse nullvec::prelude::dev::algos::Elemwise;\n\n\n\nmacro_rules! define_numeric_op {\n\n ($t:ident, $m:ident) => {\n\n\n\n // Broadcast\n\n impl<U, O> $t<U> for Indexer<U>\n\n where U: Clone + Eq + Hash + $t<Output=O>,\n\n O: Clone + Eq + Hash {\n\n\n\n type Output = Indexer<O>;\n\n fn $m(self, _rhs: U) -> Self::Output {\n\n let new_values: Vec<O> = Elemwise::broadcast_oo(self.values, _rhs, |x, y| x.$m(y));\n\n Indexer::new(new_values)\n\n }\n\n }\n", "file_path": "src/indexer/ops.rs", "rank": 60, "score": 67499.89458388012 }, { "content": "\n\n impl<'a, U, O> $t<&'a U> for Indexer<U>\n\n where U: Clone + Eq + Hash + $t<Output=O>,\n\n O: Clone + Eq + Hash {\n\n\n\n type Output = Indexer<O>;\n\n fn $m(self, _rhs: &U) -> Self::Output {\n\n let new_values: Vec<O> = Elemwise::broadcast_or(self.values, _rhs, |x, y| x.$m(y));\n\n Indexer::new(new_values)\n\n }\n\n }\n\n\n\n impl<'b, U, O> $t<U> for &'b Indexer<U>\n\n where U: Clone + Eq + Hash + $t<Output=O>,\n\n O: Clone + Eq + Hash {\n\n\n\n // can't use self as impl is for reference?\n\n type Output = Indexer<O>;\n\n fn $m(self, _rhs: U) -> Self::Output {\n\n let new_values: Vec<O> = Elemwise::broadcast_ro(&self.values, _rhs, |x, y| x.$m(y));\n", "file_path": "src/indexer/ops.rs", "rank": 61, "score": 67499.29751781044 }, { "content": " }\n\n\n\n impl<'b, U, O> $t<Indexer<U>> for &'b Indexer<U>\n\n where U: Clone + Eq + Hash + $t<Output=O>,\n\n O: Clone + Eq + Hash {\n\n\n\n type Output = Indexer<O>;\n\n fn $m(self, _rhs: Indexer<U>) -> Self::Output {\n\n let new_values: Vec<O> = Elemwise::elemwise_ro(&self.values,\n\n _rhs.values,\n\n |x, y| x.$m(y));\n\n Indexer::new(new_values)\n\n }\n\n }\n\n\n\n impl<'a, 'b, U, O> $t<&'a Indexer<U>> for &'b Indexer<U>\n\n where U: Clone + Eq + Hash + $t<Output=O>,\n\n O: Clone + Eq + Hash {\n\n\n\n type Output = Indexer<O>;\n", "file_path": "src/indexer/ops.rs", "rank": 62, "score": 67497.92647918838 }, { "content": " Indexer::new(new_values)\n\n }\n\n }\n\n\n\n impl<'a, 'b, U, O> $t<&'a U> for &'b Indexer<U>\n\n where U: Clone + Eq + Hash + $t<Output=O>,\n\n O: Clone + Eq + Hash {\n\n\n\n type Output = Indexer<O>;\n\n fn $m(self, _rhs: &U) -> Self::Output {\n\n let new_values: Vec<O> = Elemwise::broadcast_rr(&self.values, _rhs, |x, y| x.$m(y));\n\n Indexer::new(new_values)\n\n }\n\n }\n\n\n\n // Element-wise\n\n impl<U, O> $t<Indexer<U>> for Indexer<U>\n\n where U: Clone + Eq + Hash + $t<Output=O>,\n\n O: Clone + Eq + Hash {\n\n\n", "file_path": "src/indexer/ops.rs", "rank": 63, "score": 67497.6649964894 }, { "content": " type Output = Indexer<O>;\n\n fn $m(self, _rhs: Self) -> Self::Output {\n\n let new_values: Vec<O> = Elemwise::elemwise_oo(self.values,\n\n _rhs.values,\n\n |x, y| x.$m(y));\n\n Indexer::new(new_values)\n\n }\n\n }\n\n\n\n impl<'a, U, O> $t<&'a Indexer<U>> for Indexer<U>\n\n where U: Clone + Eq + Hash + $t<Output=O>,\n\n O: Clone + Eq + Hash {\n\n\n\n type Output = Indexer<O>;\n\n fn $m(self, _rhs: &Self) -> Self::Output {\n\n let new_values: Vec<O> = Elemwise::elemwise_or(self.values,\n\n &_rhs.values,\n\n |x, y| x.$m(y));\n\n Indexer::new(new_values)\n\n }\n", "file_path": "src/indexer/ops.rs", "rank": 64, "score": 67496.82814377139 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n\n\n use super::super::Indexer;\n\n\n\n #[test]\n\n fn test_index_ops_i64_broadcast() {\n\n let idx = Indexer::<i64>::new(vec![1, 2, 3]);\n\n // idx moves by ops\n\n assert_eq!(&(idx + 3).values, &vec![4, 5, 6]);\n\n\n\n let idx = Indexer::<i64>::new(vec![1, 2, 3]);\n\n assert_eq!(&(idx * 2).values, &vec![2, 4, 6]);\n\n\n\n let idx = Indexer::<i64>::new(vec![1, 2, 3]);\n\n assert_eq!(&(idx - 3).values, &vec![-2, -1, 0]);\n\n\n\n let idx = Indexer::<i64>::new(vec![1, 2, 3]);\n\n assert_eq!(&(idx / 2).values, &vec![0, 1, 1]);\n\n\n", "file_path": "src/indexer/ops.rs", "rank": 65, "score": 67490.50944122113 }, { "content": " fn $m(self, _rhs: &Indexer<U>) -> Self::Output {\n\n let new_values: Vec<O> = Elemwise::elemwise_rr(&self.values,\n\n &_rhs.values,\n\n |x, y| x.$m(y));\n\n Indexer::new(new_values)\n\n }\n\n }\n\n }\n\n}\n\n\n\ndefine_numeric_op!(Add, add);\n\ndefine_numeric_op!(Mul, mul);\n\ndefine_numeric_op!(Sub, sub);\n\ndefine_numeric_op!(Div, div);\n\ndefine_numeric_op!(Rem, rem);\n\ndefine_numeric_op!(BitAnd, bitand);\n\ndefine_numeric_op!(BitOr, bitor);\n\ndefine_numeric_op!(BitXor, bitxor);\n\n\n\n\n", "file_path": "src/indexer/ops.rs", "rank": 66, "score": 67486.95095419092 }, { "content": " let idx = Indexer::<i64>::new(vec![1, 2, 3]);\n\n assert_eq!(&(idx % 2).values, &vec![1, 0, 1]);\n\n }\n\n\n\n #[test]\n\n fn test_index_ops_i64_broadcast_refs() {\n\n let idx = Indexer::<i64>::new(vec![1, 2, 3]);\n\n assert_eq!(&(&idx + 3).values, &vec![4, 5, 6]);\n\n assert_eq!(&(&idx + &3).values, &vec![4, 5, 6]);\n\n assert_eq!(&(idx + &3).values, &vec![4, 5, 6]);\n\n }\n\n\n\n // ToDo\n\n // #[test]\n\n // fn test_index_ops_str_broadcast() {\n\n // let idx = Indexer::<String>::new(vec![\"a\".to_string(), \"b\".to_string(), \"c\".to_string()]);\n\n // idx moves by ops\n\n // let exp = Indexer::<String>::new(vec![\"ax\".to_string(), \"bx\".to_string(), \"cx\".to_string()]);\n\n // assert_eq!(idx + \"x\".to_string(), exp);\n\n // }\n", "file_path": "src/indexer/ops.rs", "rank": 67, "score": 67486.36718821581 }, { "content": " //\n\n\n\n #[test]\n\n fn test_index_ops_i64_elemwise() {\n\n let idx = Indexer::<i64>::new(vec![1, 2, 3]);\n\n let r = Indexer::<i64>::new(vec![1, 3, 2]);\n\n // idx moves by ops\n\n assert_eq!(&(idx + r).values, &vec![2, 5, 5]);\n\n\n\n let idx = Indexer::<i64>::new(vec![1, 2, 3]);\n\n let r = Indexer::<i64>::new(vec![1, 3, 2]);\n\n assert_eq!(&(idx * r).values, &vec![1, 6, 6]);\n\n\n\n let idx = Indexer::<i64>::new(vec![1, 2, 3]);\n\n let r = Indexer::<i64>::new(vec![1, 3, 2]);\n\n assert_eq!(&(idx - r).values, &vec![0, -1, 1]);\n\n\n\n let idx = Indexer::<i64>::new(vec![1, 2, 3]);\n\n let r = Indexer::<i64>::new(vec![1, 3, 2]);\n\n assert_eq!(&(idx / r).values, &vec![1, 0, 1]);\n", "file_path": "src/indexer/ops.rs", "rank": 68, "score": 67486.21206993534 }, { "content": "\n\n let idx = Indexer::<i64>::new(vec![1, 2, 3]);\n\n let r = Indexer::<i64>::new(vec![1, 3, 2]);\n\n assert_eq!(&(idx % r).values, &vec![0, 2, 1]);\n\n }\n\n\n\n #[test]\n\n fn test_index_ops_i64_elemwise_refs() {\n\n let idx = Indexer::<i64>::new(vec![1, 2, 3]);\n\n let r = Indexer::<i64>::new(vec![1, 3, 2]);\n\n\n\n assert_eq!(&(&idx + r).values, &vec![2, 5, 5]);\n\n\n\n let r = Indexer::<i64>::new(vec![1, 3, 2]);\n\n assert_eq!(&(&idx + &r).values, &vec![2, 5, 5]);\n\n assert_eq!(&(idx + &r).values, &vec![2, 5, 5]);\n\n }\n\n}\n", "file_path": "src/indexer/ops.rs", "rank": 69, "score": 67485.94311253112 }, { "content": "#[test]\n\nfn test_read_csv_error_different_items() {\n\n let data = \"A,B,C\n\n1,7,1.1\n\n1,3\n\n1,1,4.5\";\n\n let rdr = csv::ReaderBuilder::new().has_headers(true).from_reader(\n\n data.as_bytes(),\n\n );\n\n let res = DataFrame::<usize, String>::read_csv(rdr);\n\n assert!(res.is_err())\n\n}\n\n\n", "file_path": "tests/io_csv.rs", "rank": 80, "score": 67125.64892204807 }, { "content": "pub trait Description<'s>\n\n : BasicAggregation<'s> + NumericAggregation<'s> + ComparisonAggregation<'s> {\n\n type Described;\n\n\n\n fn describe(&'s self) -> Self::Described;\n\n}\n", "file_path": "src/traits.rs", "rank": 81, "score": 51731.70511955398 }, { "content": "/// Join by index\n\npub trait Join: Sized {\n\n fn join_inner(&self, other: &Self) -> Self;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 82, "score": 50278.756690106675 }, { "content": "pub trait NumericAggregation<'s> {\n\n // result which is coerced (to f64 or its container)\n\n type Coerced;\n\n\n\n fn mean(&'s self) -> Self::Coerced;\n\n fn var(&'s self) -> Self::Coerced;\n\n fn unbiased_var(&'s self) -> Self::Coerced;\n\n fn std(&'s self) -> Self::Coerced;\n\n fn unbiased_std(&'s self) -> Self::Coerced;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 83, "score": 50274.68715438919 }, { "content": "pub trait BasicAggregation<'s> {\n\n // result which can keep current dtype\n\n type Kept;\n\n // result for count (to usize or its container)\n\n type Counted;\n\n\n\n fn sum(&'s self) -> Self::Kept;\n\n fn count(&'s self) -> Self::Counted;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 84, "score": 50274.68715438919 }, { "content": "pub trait ComparisonAggregation<'s> {\n\n type Kept;\n\n\n\n fn min(&'s self) -> Self::Kept;\n\n fn max(&'s self) -> Self::Kept;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 85, "score": 50274.68715438919 }, { "content": "pub trait Grouper<T> {\n\n // ToDo: Implement efficient multimap\n\n fn groupby(key: &[T]) -> HashGrouper<T>;\n\n fn get(&self, key: &T) -> Option<&Vec<usize>>;\n\n fn keys(&self) -> Vec<T>;\n\n fn len(&self) -> usize;\n\n}\n\n\n\nimpl<T> Grouper<T> for HashGrouper<T>\n\nwhere\n\n T: Clone + Hash + Eq,\n\n{\n\n fn groupby(key: &[T]) -> HashGrouper<T> {\n\n\n\n let mut map: HashMap<T, Vec<usize>> = HashMap::new();\n\n\n\n for (i, k) in key.iter().enumerate() {\n\n let e = map.entry(k.clone()).or_insert_with(Vec::<usize>::new);\n\n e.push(i);\n\n }\n", "file_path": "src/algos/grouper.rs", "rank": 86, "score": 48941.50958291056 }, { "content": "pub trait Apply<'s, R> {\n\n // R: Type function returns, dummy to avoid unconstrained lifetime parameter\n\n\n\n type In;\n\n type FOut;\n\n type Out;\n\n\n\n fn apply<'f>(&'s self, func: &'f Fn(&Self::In) -> Self::FOut) -> Self::Out;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 87, "score": 48296.33545878603 }, { "content": "/// Concatenate along columns\n\npub trait Concatenation<'s>: Sized {\n\n fn concat<'o>(&'s self, other: &'o Self) -> Self;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 88, "score": 48296.33545878603 }, { "content": "fn main() {\n\n // cargo build --example series\n\n // ./target/debug/examples/series\n\n\n\n println!(\"** Creation **\");\n\n let values: Vec<i64> = vec![1, 2, 3, 4, 3];\n\n let index: Vec<i64> = vec![10, 20, 30, 40, 50];\n\n let s = Series::<i64, i64>::new(values, index);\n\n\n\n println!(\"{:?}\\n\", &s);\n\n\n\n println!(\"** Selection **\");\n\n println!(\"by multiple index labels\\n{:?}\\n\", &s.locs(&[10, 40, 50]));\n\n println!(\"by multiple index locations\\n{:?}\\n\", &s.ilocs(&[2, 3, 4]));\n\n\n\n println!(\"** Calculation **\");\n\n println!(\"elemwise\\n{:?}\\n\", &(&s + 1));\n\n println!(\"sum\\n{:?}\\n\", &s.sum());\n\n println!(\"describe\\n{:?}\\n\", &s.describe());\n\n println!(\"histogram\\n{:?}\\n\", s.value_counts());\n\n\n\n println!(\"** Group By **\");\n\n let sg = s.groupby(&[1, 1, 1, 2, 2]);\n\n // println!(\"{:?}\", sg.grouper);\n\n println!(\"{:?}\\n\", sg.get_group(&1));\n\n println!(\"{:?}\\n\", sg.sum());\n\n}\n", "file_path": "examples/series.rs", "rank": 89, "score": 47734.42391682647 }, { "content": "fn main() {\n\n // cargo build --example frame\n\n // ./target/debug/examples/frame\n\n\n\n println!(\"** Creation **\");\n\n let values = vec![\n\n array![1, 2, 3, 4, 5],\n\n array![6.1, 7.1, 8.1, 9.1, 10.1],\n\n array![11, 12, 13, 14, 15],\n\n ];\n\n let df = DataFrame::from_vec(values, vec![10, 20, 30, 40, 50], vec![\"X\", \"YYY\", \"ZZ\"]);\n\n println!(\"{:?}\\n\", &df);\n\n\n\n println!(\"** Selection **\");\n\n\n\n println!(\"by multiple index labels\\n{:?}\\n\", &df.locs(&[20, 30, 40]));\n\n println!(\"by multiple index locations\\n{:?}\\n\", &df.ilocs(&[0, 2, 1]));\n\n\n\n println!(\"** GroupBy **\");\n\n let dg = df.groupby(&[\"A\", \"A\", \"B\", \"A\", \"B\"]);\n\n println!(\"get group\\n{:?}\\n\", &dg.get_group(&\"A\"));\n\n\n\n println!(\"** Reshaping **\");\n\n let values2 = vec![array![1.1, 2.1, 3.1], array![6, 7, 8]];\n\n let df2 = DataFrame::from_vec(values2, vec![20, 30, 40], vec![\"X2\", \"Y2\"]);\n\n let j = df.join_inner(&df2);\n\n println!(\"inner join\\n{:?}\\n\", &j);\n\n}\n", "file_path": "examples/frame.rs", "rank": 90, "score": 47734.42391682647 }, { "content": "pub trait NanMinMax<A> {\n\n fn nanmin(&self, n: A) -> A;\n\n fn nanmax(&self, n: A) -> A;\n\n fn nanmin_value() -> A;\n\n fn nanmax_value() -> A;\n\n}\n\n\n\nmacro_rules! define_int_stats {\n\n ($t:ident) => {\n\n impl NanMinMax<$t> for $t {\n\n fn nanmin(&self, n: $t) -> $t {\n\n cmp::min(*self, n)\n\n }\n\n fn nanmax(&self, n: $t) -> $t {\n\n cmp::max(*self, n)\n\n }\n\n fn nanmin_value() -> $t {\n\n $t::min_value()\n\n }\n\n fn nanmax_value() -> $t {\n", "file_path": "src/algos/computation.rs", "rank": 91, "score": 47717.02717719646 }, { "content": "#[test]\n\nfn test_creation() {\n\n let iarr = array![1i64, 2, 3];\n\n assert_eq!(iarr.dtype(), \"i64\");\n\n assert_eq!(iarr.len(), 3);\n\n\n\n let farr = array![1.1f64, 2.1, 3.1, 4.1];\n\n assert_eq!(farr.dtype(), \"f64\");\n\n assert_eq!(farr.len(), 4);\n\n\n\n let barr = array![true, false, true];\n\n assert_eq!(barr.dtype(), \"bool\");\n\n assert_eq!(barr.len(), 3);\n\n\n\n let sarr = array![\"a\".to_string(), \"b\".to_string()];\n\n assert_eq!(sarr.dtype(), \"str\");\n\n assert_eq!(sarr.len(), 2);\n\n}\n\n\n", "file_path": "tests/array.rs", "rank": 92, "score": 46205.811104156135 }, { "content": "#[test]\n\nfn test_blocs() {\n\n let iarr = array![1i64, 2, 3];\n\n assert_eq!(iarr.dtype(), \"i64\");\n\n let ires = iarr.blocs(&vec![true, false, true]);\n\n assert_eq!(ires, array![1i64, 3]);\n\n\n\n let farr = array![1.1f64, 2.1, 3.1];\n\n assert_eq!(farr.dtype(), \"f64\");\n\n let fres = farr.blocs(&vec![true, false, true]);\n\n assert_eq!(fres, array![1.1f64, 3.1]);\n\n\n\n let barr = array![true, false, true];\n\n let bres = barr.blocs(&vec![true, false, true]);\n\n assert_eq!(bres, array![true, true]);\n\n\n\n let sarr = array![\"a\".to_string(), \"b\".to_string(), \"c\".to_string()];\n\n let sres = sarr.blocs(&vec![true, false, true]);\n\n assert_eq!(sres, array![\"a\".to_string(), \"c\".to_string()]);\n\n}\n\n\n", "file_path": "tests/array.rs", "rank": 93, "score": 46205.811104156135 }, { "content": "#[test]\n\nfn test_ilocs() {\n\n let iarr = array![1i64, 2, 3, 4, 5];\n\n assert_eq!(iarr.dtype(), \"i64\");\n\n assert_eq!(iarr.iloc(&2), Scalar::i64(3));\n\n let ires = iarr.ilocs(&vec![1, 4, 0]);\n\n assert_eq!(ires, array![2i64, 5, 1]);\n\n\n\n let farr = array![1.1f64, 2.1, 3.1, 4.1, 5.1];\n\n assert_eq!(farr.dtype(), \"f64\");\n\n assert_eq!(farr.iloc(&2), Scalar::f64(3.1));\n\n let fres = farr.ilocs(&vec![1, 4, 0]);\n\n assert_eq!(fres, array![2.1f64, 5.1, 1.1]);\n\n\n\n let barr = array![true, false, true, true];\n\n assert_eq!(barr.iloc(&2), Scalar::bool(true));\n\n let bres = barr.ilocs(&vec![1, 2]);\n\n assert_eq!(bres, array![false, true]);\n\n\n\n let sarr = array![\"a\".to_string(), \"b\".to_string(), \"c\".to_string()];\n\n assert_eq!(sarr.iloc(&2), Scalar::String(\"c\".to_string()));\n\n let sres = sarr.ilocs(&vec![2, 0]);\n\n assert_eq!(sres, array![\"c\".to_string(), \"a\".to_string()]);\n\n}\n\n\n", "file_path": "tests/array.rs", "rank": 94, "score": 46205.811104156135 }, { "content": "#[test]\n\nfn test_astype() {\n\n // let iarr = array![1i64, 2, 3];\n\n // assert_eq!(iarr.astype::<f64>(), array![1.0f64, 2., 3.]);\n\n // assert_eq!(iarr.astype::<i64>(), array![1i64, 2, 3]);\n\n\n\n // let farr = array![1.1f64, 2.1, 3.1];\n\n // assert_eq!(farr.astype::<f64>(), array![1.1f64, 2.1, 3.1]);\n\n // assert_eq!(farr.astype::<i64>(), array![1i64, 2, 3]);\n\n}\n\n\n\n\n", "file_path": "tests/array.rs", "rank": 95, "score": 46205.811104156135 }, { "content": "#[test]\n\nfn test_container() {\n\n let iarr: Array = vec![1i32, 2, 3].into();\n\n let farr: Array = vec![1.1f64, 2.1, 3.1].into();\n\n let barr: Array = vec![true, false, true].into();\n\n let sarr: Array = vec![\"a\".to_string(), \"b\".to_string(), \"c\".to_string()].into();\n\n assert_eq!(iarr.dtype(), \"i32\");\n\n assert_eq!(farr.dtype(), \"f64\");\n\n assert_eq!(barr.dtype(), \"bool\");\n\n assert_eq!(sarr.dtype(), \"str\");\n\n\n\n let container: Vec<Array> = vec![iarr, farr, barr, sarr];\n\n assert_eq!(container.len(), 4);\n\n let dtypes: Vec<String> = container.iter().map(|ref x| x.dtype()).collect();\n\n assert_eq!(dtypes, vec![\"i32\", \"f64\", \"bool\", \"str\"]);\n\n\n\n let i64arr: Array = vec![1i64, 2, 3].into();\n\n assert_eq!(i64arr.dtype(), \"i64\");\n\n}\n\n\n", "file_path": "tests/array.rs", "rank": 96, "score": 46205.811104156135 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_ilocs_panic() {\n\n let iarr = array![1i64, 2, 3, 4, 5];\n\n iarr.ilocs(&vec![1, 5, 0]);\n\n}\n\n\n", "file_path": "tests/array.rs", "rank": 97, "score": 44812.89058260212 }, { "content": "#[test]\n\nfn test_dtype_property() {\n\n let iarr = array![1i64, 2, 3];\n\n assert_eq!(iarr.dtype(), \"i64\");\n\n assert_eq!(iarr.is_numeric(), true);\n\n\n\n let farr = array![1.1f64, 2.1, 3.1, 4.1];\n\n assert_eq!(farr.dtype(), \"f64\");\n\n assert_eq!(farr.is_numeric(), true);\n\n\n\n let barr = array![true, false, true];\n\n assert_eq!(barr.dtype(), \"bool\");\n\n assert_eq!(barr.is_numeric(), false);\n\n\n\n let sarr = array![\"a\".to_string(), \"b\".to_string()];\n\n assert_eq!(sarr.dtype(), \"str\");\n\n assert_eq!(sarr.is_numeric(), false);\n\n}\n\n\n", "file_path": "tests/array.rs", "rank": 98, "score": 44812.89058260212 }, { "content": "#[test]\n\nfn test_creation_macros() {\n\n let iarr = array![1i32, 2, 3];\n\n assert_eq!(iarr.dtype(), \"i32\");\n\n assert_eq!(iarr.len(), 3);\n\n\n\n let i64arr = array![1i64, 2, 3];\n\n assert_eq!(i64arr.dtype(), \"i64\");\n\n assert_eq!(i64arr.len(), 3);\n\n\n\n let farr = array![1.1f64, 2.1, 3.1, 4.1];\n\n assert_eq!(farr.dtype(), \"f64\");\n\n assert_eq!(farr.len(), 4);\n\n\n\n let barr = array![true, false, true];\n\n assert_eq!(barr.dtype(), \"bool\");\n\n assert_eq!(barr.len(), 3);\n\n\n\n let str_arr = array![\"A\".to_string(), \"B\".to_string(), \"C\".to_string()];\n\n assert_eq!(str_arr.dtype(), \"str\");\n\n assert_eq!(str_arr.len(), 3);\n\n\n\n let string_arr = array![\"A\".to_string(), \"B\".to_string(), \"C\".to_string()];\n\n assert_eq!(string_arr.dtype(), \"str\");\n\n assert_eq!(string_arr.len(), 3);\n\n}\n\n\n", "file_path": "tests/array.rs", "rank": 99, "score": 44812.89058260212 } ]
Rust
src/overlay.rs
penumbra-zone/jellyfish-merkle
0f216e2dcd3219c58f1bc4a584c373416cbef5cd
use std::collections::HashMap; use anyhow::Result; use tracing::instrument; use crate::{ storage::{TreeReader, TreeWriter}, JellyfishMerkleTree, KeyHash, MissingRootError, OwnedValue, RootHash, Version, }; pub struct WriteOverlay<R> { reader: R, overlay: HashMap<KeyHash, OwnedValue>, version: Version, } impl<R> WriteOverlay<R> { pub const PRE_GENESIS_VERSION: Version = u64::MAX; } impl<R> WriteOverlay<R> where R: TreeReader + Sync, { pub fn new(reader: R, version: Version) -> Self { Self { reader, version, overlay: Default::default(), } } fn tree(&self) -> JellyfishMerkleTree<'_, R> { JellyfishMerkleTree::new(&self.reader) } #[instrument(name = "WriteOverlay::get", skip(self, key))] pub async fn get(&self, key: KeyHash) -> Result<Option<OwnedValue>> { if let Some(value) = self.overlay.get(&key) { tracing::trace!(?key, value = ?hex::encode(&value), "read from cache"); Ok(Some(value.clone())) } else { match self.tree().get(key, self.version).await { Ok(Some(value)) => { tracing::trace!(version = ?self.version, ?key, value = ?hex::encode(&value), "read from tree"); Ok(Some(value)) } Ok(None) => { tracing::trace!(version = ?self.version, ?key, "key not found in tree"); Ok(None) } Err(e) if e.downcast_ref::<MissingRootError>().is_some() => { tracing::trace!(version = ?self.version, "no data available at this version"); Ok(None) } Err(e) => Err(e), } } } #[instrument(name = "WriteOverlay::get_with_proof", skip(self, key))] pub async fn get_with_proof( &self, key: Vec<u8>, ) -> Result<(OwnedValue, ics23::ExistenceProof)> { if self.overlay.contains_key(&key.clone().into()) { return Err(anyhow::anyhow!("key is not yet committed to tree")); } let proof = self.tree().get_with_ics23_proof(key, self.version).await?; Ok((proof.value.clone(), proof)) } #[instrument(name = "WriteOverlay::put", skip(self, key, value))] pub fn put(&mut self, key: KeyHash, value: OwnedValue) { tracing::trace!(?key, value = ?hex::encode(&value)); *self.overlay.entry(key).or_default() = value; } #[instrument(name = "WriteOverlay::commit", skip(self, writer))] pub async fn commit<W>(&mut self, mut writer: W) -> Result<(RootHash, Version)> where W: TreeWriter + Sync, { let overlay = std::mem::replace(&mut self.overlay, Default::default()); let new_version = self.version.wrapping_add(1); tracing::trace!(old_version = ?self.version, new_version, ?overlay); let (root_hash, batch) = self .tree() .put_value_set(overlay.into_iter().collect(), new_version) .await?; writer.write_node_batch(&batch.node_batch).await?; tracing::trace!(?root_hash, "wrote node batch to backing store"); self.version = new_version; Ok((root_hash, new_version)) } }
use std::collections::HashMap; use anyhow::Result; use tracing::instrument; use crate::{ storage::{TreeReader, TreeWriter}, JellyfishMerkleTree, KeyHash, MissingRootError, OwnedValue, RootHash, Version, }; pub struct WriteOverlay<R> { reader: R, overlay: HashMap<KeyHash, OwnedValue>, version: Version, } impl<R> WriteOverlay<R> { pub const PRE_GENESIS_VERSION: Version = u64::MAX; } impl<R> WriteOverlay<R> where R: TreeReader + Sync, { pub fn new(reader: R, version: Version) -> Self { Self { reader, version, overlay: Default::default(), } } fn tree(&self) -> JellyfishMerkleTree<'_, R> { JellyfishMerkleTree::new(&self.reader) } #[instrument(name = "WriteOverlay::get", skip(self, key))] pub async fn get(&self, key: KeyHash) -> Result<Option<OwnedValue>> { if let Some(value) = self.overlay.get(&key) { tracing::trace!(?key, value = ?hex::encode(&value), "read from cache"); Ok(Some(value.clone())) } else {
} } #[instrument(name = "WriteOverlay::get_with_proof", skip(self, key))] pub async fn get_with_proof( &self, key: Vec<u8>, ) -> Result<(OwnedValue, ics23::ExistenceProof)> { if self.overlay.contains_key(&key.clone().into()) { return Err(anyhow::anyhow!("key is not yet committed to tree")); } let proof = self.tree().get_with_ics23_proof(key, self.version).await?; Ok((proof.value.clone(), proof)) } #[instrument(name = "WriteOverlay::put", skip(self, key, value))] pub fn put(&mut self, key: KeyHash, value: OwnedValue) { tracing::trace!(?key, value = ?hex::encode(&value)); *self.overlay.entry(key).or_default() = value; } #[instrument(name = "WriteOverlay::commit", skip(self, writer))] pub async fn commit<W>(&mut self, mut writer: W) -> Result<(RootHash, Version)> where W: TreeWriter + Sync, { let overlay = std::mem::replace(&mut self.overlay, Default::default()); let new_version = self.version.wrapping_add(1); tracing::trace!(old_version = ?self.version, new_version, ?overlay); let (root_hash, batch) = self .tree() .put_value_set(overlay.into_iter().collect(), new_version) .await?; writer.write_node_batch(&batch.node_batch).await?; tracing::trace!(?root_hash, "wrote node batch to backing store"); self.version = new_version; Ok((root_hash, new_version)) } }
match self.tree().get(key, self.version).await { Ok(Some(value)) => { tracing::trace!(version = ?self.version, ?key, value = ?hex::encode(&value), "read from tree"); Ok(Some(value)) } Ok(None) => { tracing::trace!(version = ?self.version, ?key, "key not found in tree"); Ok(None) } Err(e) if e.downcast_ref::<MissingRootError>().is_some() => { tracing::trace!(version = ?self.version, "no data available at this version"); Ok(None) } Err(e) => Err(e), }
if_condition
[ { "content": "/// Computes the key immediately after `key`.\n\npub fn plus_one(key: KeyHash) -> KeyHash {\n\n assert_ne!(key, KeyHash([0xff; 32]));\n\n\n\n let mut buf = key.0;\n\n for i in (0..32).rev() {\n\n if buf[i] == 255 {\n\n buf[i] = 0;\n\n } else {\n\n buf[i] += 1;\n\n break;\n\n }\n\n }\n\n KeyHash(buf)\n\n}\n\n\n\n/// Initializes a DB with a set of key-value pairs by inserting one key at each version.\n\npub async fn init_mock_db(kvs: &HashMap<KeyHash, OwnedValue>) -> (MockTreeStore, Version) {\n\n assert!(!kvs.is_empty());\n\n\n\n let db = MockTreeStore::default();\n", "file_path": "src/tests/helper.rs", "rank": 0, "score": 152167.96640051677 }, { "content": "fn random_leaf_with_key(next_version: Version) -> (Node, NodeKey) {\n\n let key: [u8; 32] = OsRng.gen();\n\n let value: [u8; 32] = OsRng.gen();\n\n let key_hash: KeyHash = key.as_ref().into();\n\n let node = Node::new_leaf(key_hash, value.to_vec());\n\n let node_key = NodeKey::new(next_version, NibblePath::new(key_hash.0.to_vec()));\n\n (node, node_key)\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_get_node() {\n\n let next_version = 0;\n\n let db = MockTreeStore::default();\n\n let cache = TreeCache::new(&db, next_version).await.unwrap();\n\n\n\n let (node, node_key) = random_leaf_with_key(next_version);\n\n db.put_node(node_key.clone(), node.clone()).await.unwrap();\n\n\n\n assert_eq!(cache.get_node(&node_key).await.unwrap(), node);\n\n}\n", "file_path": "src/tests/tree_cache.rs", "rank": 1, "score": 136313.73815072342 }, { "content": "/// Defines the interface between a\n\n/// [`JellyfishMerkleTree`](crate::JellyfishMerkleTree)\n\n/// and underlying storage holding nodes.\n\npub trait TreeReader: Send + Sync {\n\n /// Gets node given a node key. Returns `None` if the node does not exist.\n\n fn get_node_option<'future, 'a: 'future, 'n: 'future>(\n\n &'a self,\n\n node_key: &'n NodeKey,\n\n ) -> BoxFuture<'future, Result<Option<Node>>>;\n\n\n\n /// Gets the rightmost leaf. Note that this assumes we are in the process of restoring the tree\n\n /// and all nodes are at the same version.\n\n #[allow(clippy::type_complexity)]\n\n fn get_rightmost_leaf<'future, 'a: 'future>(\n\n &'a self,\n\n ) -> BoxFuture<'future, Result<Option<(NodeKey, LeafNode)>>>;\n\n}\n\n\n\n/// Internal helper: Gets node given a node key. Returns error if the node does not exist.\n\npub async fn get_node_async<R: TreeReader>(reader: &R, node_key: &NodeKey) -> Result<Node> {\n\n reader\n\n .get_node_option(node_key)\n\n .await?\n\n .ok_or_else(|| format_err!(\"Missing node at {:?}.\", node_key))\n\n}\n", "file_path": "src/reader.rs", "rank": 2, "score": 128036.22424887976 }, { "content": "fn hash_leaf(key: KeyHash, value_hash: ValueHash) -> [u8; 32] {\n\n SparseMerkleLeafNode::new(key, value_hash).hash()\n\n}\n\n\n", "file_path": "src/tests/node_type.rs", "rank": 3, "score": 121769.57178609373 }, { "content": "// Generate a pair of leaf node key and account key with a passed-in 63-nibble node key and the last\n\n// nibble to be appended.\n\nfn gen_leaf_keys(version: Version, nibble_path: &NibblePath, nibble: Nibble) -> (NodeKey, KeyHash) {\n\n assert_eq!(nibble_path.num_nibbles(), 63);\n\n let mut np = nibble_path.clone();\n\n np.push(nibble);\n\n let account_key = KeyHash(np.bytes().try_into().unwrap());\n\n (NodeKey::new(version, np), account_key)\n\n}\n\n\n", "file_path": "src/tests/node_type.rs", "rank": 4, "score": 119915.06299559216 }, { "content": "pub fn arb_existent_kvs_and_nonexistent_keys(\n\n num_kvs: usize,\n\n num_non_existing_keys: usize,\n\n) -> impl Strategy<Value = (HashMap<KeyHash, OwnedValue>, Vec<KeyHash>)> {\n\n hash_map(any::<KeyHash>(), any::<OwnedValue>(), 1..num_kvs).prop_flat_map(move |kvs| {\n\n let kvs_clone = kvs.clone();\n\n (\n\n Just(kvs),\n\n vec(\n\n any::<KeyHash>().prop_filter(\n\n \"Make sure these keys do not exist in the tree.\",\n\n move |key| !kvs_clone.contains_key(key),\n\n ),\n\n num_non_existing_keys,\n\n ),\n\n )\n\n })\n\n}\n\n\n\npub async fn test_get_with_proof(\n\n (existent_kvs, nonexistent_keys): (HashMap<KeyHash, OwnedValue>, Vec<KeyHash>),\n\n) {\n\n let (db, version) = init_mock_db(&existent_kvs).await;\n\n let tree = JellyfishMerkleTree::new(&db);\n\n\n\n test_existent_keys_impl(&tree, version, &existent_kvs).await;\n\n test_nonexistent_keys_impl(&tree, version, &nonexistent_keys).await;\n\n}\n\n\n", "file_path": "src/tests/helper.rs", "rank": 5, "score": 111722.69077869739 }, { "content": "/// `FrozenTreeCache` is used as a field of `TreeCache` storing all the nodes and values that\n\n/// are generated by earlier transactions so they have to be immutable. The motivation of\n\n/// `FrozenTreeCache` is to let `TreeCache` freeze intermediate results from each transaction to\n\n/// help commit more than one transaction in a row atomically.\n\nstruct FrozenTreeCache {\n\n /// Immutable node_cache.\n\n node_cache: NodeBatch,\n\n\n\n /// Immutable stale_node_index_cache.\n\n stale_node_index_cache: StaleNodeIndexBatch,\n\n\n\n /// the stats vector including the number of new nodes, new leaves, stale nodes and stale leaves.\n\n node_stats: Vec<NodeStats>,\n\n\n\n /// Frozen root hashes after each earlier transaction.\n\n root_hashes: Vec<RootHash>,\n\n}\n\n\n\nimpl FrozenTreeCache {\n\n fn new() -> Self {\n\n Self {\n\n node_cache: BTreeMap::new(),\n\n stale_node_index_cache: BTreeSet::new(),\n\n node_stats: Vec::new(),\n", "file_path": "src/tree_cache.rs", "rank": 6, "score": 91017.94989064043 }, { "content": "pub fn arb_tree_with_index(\n\n tree_size: usize,\n\n) -> impl Strategy<Value = (BTreeMap<KeyHash, OwnedValue>, usize)> {\n\n btree_map(any::<KeyHash>(), any::<OwnedValue>(), 1..tree_size).prop_flat_map(|btree| {\n\n let len = btree.len();\n\n (Just(btree), 0..len)\n\n })\n\n}\n\n\n\npub async fn test_get_range_proof((btree, n): (BTreeMap<KeyHash, OwnedValue>, usize)) {\n\n let (db, version) = init_mock_db(&btree.clone().into_iter().collect()).await;\n\n let tree = JellyfishMerkleTree::new(&db);\n\n\n\n let nth_key = btree.keys().nth(n).unwrap();\n\n\n\n let proof = tree.get_range_proof(*nth_key, version).await.unwrap();\n\n verify_range_proof(\n\n tree.get_root_hash(version).await.unwrap(),\n\n btree.into_iter().take(n + 1).collect(),\n\n proof,\n", "file_path": "src/tests/helper.rs", "rank": 7, "score": 90530.79015690157 }, { "content": "/// Defines the interface used to write a batch of updates from a\n\n/// [`JellyfishMerkleTree`](crate::JellyfishMerkleTree)\n\n/// to the underlying storage holding nodes.\n\npub trait TreeWriter: Send + Sync {\n\n /// Writes a node batch into storage.\n\n fn write_node_batch<'future, 'a: 'future, 'n: 'future>(\n\n &'a mut self,\n\n node_batch: &'n NodeBatch,\n\n ) -> BoxFuture<'future, Result<()>>;\n\n}\n\n\n\n/// Node batch that will be written into db atomically with other batches.\n\npub type NodeBatch = BTreeMap<NodeKey, Node>;\n\n/// [`StaleNodeIndex`](struct.StaleNodeIndex.html) batch that will be written into db atomically\n\n/// with other batches.\n\npub type StaleNodeIndexBatch = BTreeSet<StaleNodeIndex>;\n\n\n\n#[derive(Clone, Debug, Default, Eq, PartialEq)]\n\npub struct NodeStats {\n\n pub new_nodes: usize,\n\n pub new_leaves: usize,\n\n pub stale_nodes: usize,\n\n pub stale_leaves: usize,\n", "file_path": "src/writer.rs", "rank": 8, "score": 89754.57351693374 }, { "content": "fn update_nibble(original_key: &KeyHash, n: usize, nibble: u8) -> KeyHash {\n\n assert!(nibble < 16);\n\n let mut key = original_key.0;\n\n key[n / 2] = if n % 2 == 0 {\n\n key[n / 2] & 0x0f | nibble << 4\n\n } else {\n\n key[n / 2] & 0xf0 | nibble\n\n };\n\n KeyHash(key)\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_insert_to_empty_tree() {\n\n let db = MockTreeStore::default();\n\n let tree = JellyfishMerkleTree::new(&db);\n\n\n\n // Tree is initially empty. Root is a null node. We'll insert a key-value pair which creates a\n\n // leaf node.\n\n let key = b\"testkey\";\n\n let value = vec![1u8, 2u8, 3u8, 4u8];\n", "file_path": "src/tests/jellyfish_merkle.rs", "rank": 9, "score": 85914.90459483003 }, { "content": "pub fn arb_kv_pair_with_distinct_last_nibble(\n\n) -> impl Strategy<Value = ((KeyHash, OwnedValue), (KeyHash, OwnedValue))> {\n\n (\n\n any::<KeyHash>().prop_filter(\"Can't be 0xffffff...\", |key| *key != KeyHash([0xff; 32])),\n\n vec(any::<OwnedValue>(), 2),\n\n )\n\n .prop_map(|(key1, accounts)| {\n\n let key2 = plus_one(key1);\n\n ((key1, accounts[0].clone()), (key2, accounts[1].clone()))\n\n })\n\n}\n\n\n\npub async fn test_get_with_proof_with_distinct_last_nibble(\n\n (kv1, kv2): ((KeyHash, OwnedValue), (KeyHash, OwnedValue)),\n\n) {\n\n let mut kvs = HashMap::new();\n\n kvs.insert(kv1.0, kv1.1);\n\n kvs.insert(kv2.0, kv2.1);\n\n\n\n let (db, version) = init_mock_db(&kvs).await;\n\n let tree = JellyfishMerkleTree::new(&db);\n\n\n\n test_existent_keys_impl(&tree, version, &kvs).await;\n\n}\n\n\n", "file_path": "src/tests/helper.rs", "rank": 10, "score": 84044.98884141081 }, { "content": "pub fn ics23_spec() -> ics23::ProofSpec {\n\n ics23::ProofSpec {\n\n leaf_spec: Some(ics23::LeafOp {\n\n hash: ics23::HashOp::Sha256.into(),\n\n prehash_key: ics23::HashOp::Sha256.into(),\n\n prehash_value: ics23::HashOp::Sha256.into(),\n\n length: ics23::LengthOp::NoPrefix.into(),\n\n prefix: b\"JMT::LeafNode\".to_vec(),\n\n }),\n\n inner_spec: Some(ics23::InnerSpec {\n\n // This is the only field we're sure about\n\n hash: ics23::HashOp::Sha256.into(),\n\n // These fields are apparently used for neighbor tests in range proofs,\n\n // and could be wrong:\n\n child_order: vec![0, 1], //where exactly does this need to be true?\n\n min_prefix_length: 16, //what is this?\n\n max_prefix_length: 48, //and this?\n\n child_size: 32,\n\n empty_child: vec![], //check JMT repo to determine if special value used here\n\n }),\n", "file_path": "src/ics23_impl.rs", "rank": 11, "score": 80147.66093207162 }, { "content": "// Generate a random node key with 63 nibbles.\n\nfn random_63nibbles_node_key() -> NodeKey {\n\n let mut bytes: [u8; 32] = OsRng.gen();\n\n *bytes.last_mut().unwrap() &= 0xf0;\n\n NodeKey::new(0 /* version */, NibblePath::new_odd(bytes.to_vec()))\n\n}\n\n\n", "file_path": "src/tests/node_type.rs", "rank": 12, "score": 74632.84826568725 }, { "content": "/// Returns the key immediately before `key` in `btree`.\n\nfn prev_key<K, V>(btree: &BTreeMap<K, V>, key: &K) -> Option<K>\n\nwhere\n\n K: Clone + Ord,\n\n{\n\n btree\n\n .range((Bound::Unbounded, Bound::Excluded(key)))\n\n .next_back()\n\n .map(|(k, _v)| k.clone())\n\n}\n\n\n", "file_path": "src/tests/helper.rs", "rank": 13, "score": 67296.92578774039 }, { "content": "fn next_key<K, V>(btree: &BTreeMap<K, V>, key: &K) -> Option<K>\n\nwhere\n\n K: Clone + Ord,\n\n{\n\n btree\n\n .range((Bound::Excluded(key), Bound::Unbounded))\n\n .next()\n\n .map(|(k, _v)| k.clone())\n\n}\n\n\n", "file_path": "src/tests/helper.rs", "rank": 14, "score": 67292.64245352519 }, { "content": "/// Advance both iterators if their next nibbles are the same until either reaches the end or\n\n/// the find a mismatch. Return the number of matched nibbles.\n\npub fn skip_common_prefix<I1, I2>(x: &mut I1, y: &mut I2) -> usize\n\nwhere\n\n I1: Iterator + Peekable,\n\n I2: Iterator + Peekable,\n\n <I1 as Iterator>::Item: std::cmp::PartialEq<<I2 as Iterator>::Item>,\n\n{\n\n let mut count = 0;\n\n loop {\n\n let x_peek = x.peek();\n\n let y_peek = y.peek();\n\n if x_peek.is_none()\n\n || y_peek.is_none()\n\n || x_peek.expect(\"cannot be none\") != y_peek.expect(\"cannot be none\")\n\n {\n\n break;\n\n }\n\n count += 1;\n\n x.next();\n\n y.next();\n\n }\n\n count\n\n}\n", "file_path": "src/types/nibble/nibble_path.rs", "rank": 15, "score": 58095.10552090459 }, { "content": "fn arb_sparse_merkle_sibling() -> impl Strategy<Value = [u8; 32]> {\n\n prop_oneof![\n\n arb_non_placeholder_sparse_merkle_sibling(),\n\n Just(SPARSE_MERKLE_PLACEHOLDER_HASH),\n\n ]\n\n}\n\n\n\nimpl Arbitrary for SparseMerkleProof {\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {\n\n (\n\n any::<Option<SparseMerkleLeafNode>>(),\n\n (0..=256usize).prop_flat_map(|len| {\n\n if len == 0 {\n\n Just(vec![]).boxed()\n\n } else {\n\n (\n\n arb_non_placeholder_sparse_merkle_sibling(),\n", "file_path": "src/types/proof/proptest_proof.rs", "rank": 16, "score": 54849.30601458446 }, { "content": "#[derive(Clone, Debug)]\n\nstruct InternalInfo {\n\n /// The node key of this internal node.\n\n node_key: NodeKey,\n\n\n\n /// The existing children. Every time a child appears, the corresponding position will be set\n\n /// to `Some`.\n\n children: [Option<ChildInfo>; 16],\n\n}\n\n\n\nimpl InternalInfo {\n\n /// Creates an empty internal node with no children.\n\n fn new_empty(node_key: NodeKey) -> Self {\n\n Self {\n\n node_key,\n\n children: Default::default(),\n\n }\n\n }\n\n\n\n fn set_child(&mut self, index: usize, child_info: ChildInfo) {\n\n precondition!(index < 16);\n", "file_path": "src/restore.rs", "rank": 17, "score": 54791.64621946222 }, { "content": "#[derive(Debug)]\n\nstruct NodeVisitInfo {\n\n /// The key to this node.\n\n node_key: NodeKey,\n\n\n\n /// The node itself.\n\n node: InternalNode,\n\n\n\n /// The bitmap indicating which children exist. It is generated by running\n\n /// `self.node.generate_bitmaps().0` and cached here.\n\n children_bitmap: u16,\n\n\n\n /// This integer always has exactly one 1-bit. The position of the 1-bit (from LSB) indicates\n\n /// the next child to visit in the iteration process. All the ones on the left have already\n\n /// been visited. All the chilren on the right (including this one) have not been visited yet.\n\n next_child_to_visit: u16,\n\n}\n\n\n\nimpl NodeVisitInfo {\n\n /// Constructs a new `NodeVisitInfo` with given node key and node. `next_child_to_visit` will\n\n /// be set to the leftmost child.\n", "file_path": "src/iterator.rs", "rank": 18, "score": 53235.52533924555 }, { "content": "fn arb_non_placeholder_sparse_merkle_sibling() -> impl Strategy<Value = [u8; 32]> {\n\n any::<[u8; 32]>().prop_filter(\"Filter out placeholder sibling.\", |x| {\n\n *x != SPARSE_MERKLE_PLACEHOLDER_HASH\n\n })\n\n}\n\n\n", "file_path": "src/types/proof/proptest_proof.rs", "rank": 19, "score": 51981.077689793216 }, { "content": "struct NaiveInternalNode {\n\n root: Rc<BinaryTreeNode>,\n\n}\n\n\n\nimpl NaiveInternalNode {\n\n fn from_clever_node(node: &InternalNode) -> Self {\n\n Self {\n\n root: Rc::new(Self::node_for_subtree(0, 16, node.children())),\n\n }\n\n }\n\n\n\n fn node_for_subtree(begin: u8, width: u8, children: &Children) -> BinaryTreeNode {\n\n if width == 1 {\n\n return children\n\n .get(&begin.into())\n\n .map_or(BinaryTreeNode::Null, |child| {\n\n BinaryTreeNode::new_child(begin, child)\n\n });\n\n }\n\n\n", "file_path": "src/tests/node_type.rs", "rank": 20, "score": 50534.346717707325 }, { "content": "/// An internal node in a binary tree corresponding to a `InternalNode` being tested.\n\n///\n\n/// To describe its position in the binary tree, we use a range of level 0 (children level)\n\n/// positions expressed by (`begin`, `width`)\n\n///\n\n/// For example, in the below graph, node A has (begin:0, width:4), while node B has\n\n/// (begin:2, width: 2):\n\n/// ...\n\n/// /\n\n/// [A] ...\n\n/// / \\\n\n/// * [B] ...\n\n/// / \\ / \\\n\n/// 0 1 2 3 ... 15\n\nstruct BinaryTreeInternalNode {\n\n begin: u8,\n\n width: u8,\n\n left: Rc<BinaryTreeNode>,\n\n right: Rc<BinaryTreeNode>,\n\n hash: [u8; 32],\n\n}\n\n\n\nimpl BinaryTreeInternalNode {\n\n fn in_left_subtree(&self, n: u8) -> bool {\n\n assert!(n >= self.begin);\n\n assert!(n < self.begin + self.width);\n\n\n\n n < self.begin + self.width / 2\n\n }\n\n}\n\n\n\n/// A child node, corresponding to one that is in the corresponding `InternalNode` being tested.\n\n///\n\n/// `index` is its key in `InternalNode::children`.\n\n/// N.B. when `is_leaf` is true, in the binary tree represented by a `NaiveInternalNode`, the child\n\n/// node will be brought up to the root of the highest subtree that has only that leaf.\n", "file_path": "src/tests/node_type.rs", "rank": 21, "score": 49357.705364455425 }, { "content": "#[derive(Clone, Copy)]\n\nstruct BinaryTreeChildNode {\n\n version: Version,\n\n index: u8,\n\n hash: [u8; 32],\n\n is_leaf: bool,\n\n}\n\n\n", "file_path": "src/tests/node_type.rs", "rank": 22, "score": 49354.62000392258 }, { "content": "/// The interface used with [`JellyfishMerkleRestore`], taken from the Diem `storage-interface` crate.\n\npub trait StateSnapshotReceiver {\n\n fn add_chunk<'future, 'a: 'future>(\n\n &'a mut self,\n\n chunk: Vec<(KeyHash, OwnedValue)>,\n\n proof: SparseMerkleRangeProof,\n\n ) -> BoxFuture<'future, Result<()>>;\n\n\n\n fn finish(self) -> BoxFuture<'static, Result<()>>;\n\n\n\n fn finish_box(self: Box<Self>) -> BoxFuture<'static, Result<()>>;\n\n}\n\n\n\nimpl StateSnapshotReceiver for JellyfishMerkleRestore {\n\n fn add_chunk<'future, 'a: 'future>(\n\n &'a mut self,\n\n chunk: Vec<(KeyHash, OwnedValue)>,\n\n proof: SparseMerkleRangeProof,\n\n ) -> BoxFuture<'future, Result<()>> {\n\n self.add_chunk_impl(chunk, proof).boxed()\n\n }\n\n\n\n fn finish(self) -> BoxFuture<'static, Result<()>> {\n\n self.finish_impl().boxed()\n\n }\n\n\n\n fn finish_box(self: Box<Self>) -> BoxFuture<'static, Result<()>> {\n\n self.finish_impl().boxed()\n\n }\n\n}\n", "file_path": "src/restore.rs", "rank": 23, "score": 48985.97798208194 }, { "content": "struct EscapedByteSlice<'a>(&'a [u8]);\n\n\n\nimpl<'a> std::fmt::Debug for EscapedByteSlice<'a> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"b\\\"\")?;\n\n for &b in self.0 {\n\n // https://doc.rust-lang.org/reference/tokens.html#byte-escapes\n\n if b == b'\\n' {\n\n write!(f, \"\\\\n\")?;\n\n } else if b == b'\\r' {\n\n write!(f, \"\\\\r\")?;\n\n } else if b == b'\\t' {\n\n write!(f, \"\\\\t\")?;\n\n } else if b == b'\\\\' || b == b'\"' {\n\n write!(f, \"\\\\{}\", b as char)?;\n\n } else if b == b'\\0' {\n\n write!(f, \"\\\\0\")?;\n\n // ASCII printable\n\n } else if b >= 0x20 && b < 0x7f {\n\n write!(f, \"{}\", b as char)?;\n\n } else {\n\n write!(f, \"\\\\x{:02x}\", b)?;\n\n }\n\n }\n\n write!(f, \"\\\"\")?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 24, "score": 46123.94359335159 }, { "content": "/// Checks if we can construct the expected root hash using the entries in the btree and the proof.\n\nfn verify_range_proof(\n\n expected_root_hash: RootHash,\n\n btree: BTreeMap<KeyHash, OwnedValue>,\n\n proof: SparseMerkleRangeProof,\n\n) {\n\n // For example, given the following sparse Merkle tree:\n\n //\n\n // root\n\n // / \\\n\n // / \\\n\n // / \\\n\n // o o\n\n // / \\ / \\\n\n // a o o h\n\n // / \\ / \\\n\n // o d e X\n\n // / \\ / \\\n\n // b c f g\n\n //\n\n // we transform the keys as follows:\n", "file_path": "src/tests/helper.rs", "rank": 25, "score": 45826.809026759496 }, { "content": "pub trait Peekable: Iterator {\n\n /// Returns the `next()` value without advancing the iterator.\n\n fn peek(&self) -> Option<Self::Item>;\n\n}\n\n\n\n/// BitIterator iterates a nibble path by bit.\n\npub struct BitIterator<'a> {\n\n nibble_path: &'a NibblePath,\n\n pos: std::ops::Range<usize>,\n\n}\n\n\n\nimpl<'a> Peekable for BitIterator<'a> {\n\n /// Returns the `next()` value without advancing the iterator.\n\n fn peek(&self) -> Option<Self::Item> {\n\n if self.pos.start < self.pos.end {\n\n Some(self.nibble_path.get_bit(self.pos.start))\n\n } else {\n\n None\n\n }\n\n }\n", "file_path": "src/types/nibble/nibble_path.rs", "rank": 26, "score": 44696.96976882307 }, { "content": "#[test]\n\nfn test_leaf_hash() {\n\n {\n\n let address = KeyHash(OsRng.gen());\n\n let blob = vec![0x02];\n\n let value_hash: ValueHash = blob.as_slice().into();\n\n let hash = hash_leaf(address, value_hash);\n\n let leaf_node = Node::new_leaf(address, blob);\n\n assert_eq!(leaf_node.hash(), hash);\n\n }\n\n}\n\n\n\nproptest! {\n\n #[test]\n\n fn two_leaves_test1(index1 in (0..8u8).prop_map(Nibble::from), index2 in (8..16u8).prop_map(Nibble::from)) {\n\n let internal_node_key = random_63nibbles_node_key();\n\n let mut children = Children::default();\n\n\n\n let leaf1_node_key = gen_leaf_keys(0 /* version */, internal_node_key.nibble_path(), index1).0;\n\n let leaf2_node_key = gen_leaf_keys(1 /* version */, internal_node_key.nibble_path(), index2).0;\n\n let hash1 = OsRng.gen();\n", "file_path": "src/tests/node_type.rs", "rank": 27, "score": 44549.014821985504 }, { "content": "#[test]\n\n#[allow(clippy::bool_assert_comparison)]\n\nfn test_get_bit() {\n\n let bytes = vec![0x01, 0x02];\n\n let nibble_path = NibblePath::new(bytes);\n\n assert_eq!(nibble_path.get_bit(0), false);\n\n assert_eq!(nibble_path.get_bit(1), false);\n\n assert_eq!(nibble_path.get_bit(2), false);\n\n assert_eq!(nibble_path.get_bit(7), true);\n\n assert_eq!(nibble_path.get_bit(8), false);\n\n assert_eq!(nibble_path.get_bit(14), true);\n\n}\n\n\n", "file_path": "src/tests/nibble_path.rs", "rank": 28, "score": 44549.014821985504 }, { "content": "#[test]\n\nfn test_encode_decode() {\n\n let internal_node_key = random_63nibbles_node_key();\n\n\n\n let leaf1_keys = gen_leaf_keys(0, internal_node_key.nibble_path(), Nibble::from(1));\n\n let leaf1_node = Node::new_leaf(leaf1_keys.1, vec![0x00]);\n\n let leaf2_keys = gen_leaf_keys(0, internal_node_key.nibble_path(), Nibble::from(2));\n\n let leaf2_node = Node::new_leaf(leaf2_keys.1, vec![0x01]);\n\n\n\n let mut children = Children::default();\n\n children.insert(\n\n Nibble::from(1),\n\n Child::new(leaf1_node.hash(), 0 /* version */, NodeType::Leaf),\n\n );\n\n children.insert(\n\n Nibble::from(2),\n\n Child::new(leaf2_node.hash(), 0 /* version */, NodeType::Leaf),\n\n );\n\n\n\n let account_key = KeyHash(OsRng.gen());\n\n let nodes = vec![\n", "file_path": "src/tests/node_type.rs", "rank": 29, "score": 44549.014821985504 }, { "content": "#[test]\n\nfn test_internal_validity() {\n\n let result = panic::catch_unwind(|| {\n\n let children = Children::default();\n\n InternalNode::new(children)\n\n });\n\n assert!(result.is_err());\n\n\n\n let result = panic::catch_unwind(|| {\n\n let mut children = Children::default();\n\n children.insert(\n\n Nibble::from(1),\n\n Child::new(OsRng.gen(), 0 /* version */, NodeType::Leaf),\n\n );\n\n InternalNode::new(children);\n\n });\n\n assert!(result.is_err());\n\n}\n\n\n", "file_path": "src/tests/node_type.rs", "rank": 30, "score": 44549.014821985504 }, { "content": "#[test]\n\nfn test_get_nibble() {\n\n let bytes = vec![0x12, 0x34];\n\n let nibble_path = NibblePath::new(bytes);\n\n assert_eq!(nibble_path.get_nibble(0), Nibble::from(0x01));\n\n assert_eq!(nibble_path.get_nibble(1), Nibble::from(0x02));\n\n assert_eq!(nibble_path.get_nibble(2), Nibble::from(0x03));\n\n assert_eq!(nibble_path.get_nibble(3), Nibble::from(0x04));\n\n}\n\n\n", "file_path": "src/tests/nibble_path.rs", "rank": 31, "score": 44549.014821985504 }, { "content": "#[test]\n\nfn test_bit_iter() {\n\n let bytes = vec![0xc3, 0xa0];\n\n let nibble_path = NibblePath::new_odd(bytes);\n\n let mut iter = nibble_path.bits();\n\n // c: 0b1100\n\n assert_eq!(iter.next(), Some(true));\n\n assert_eq!(iter.next(), Some(true));\n\n assert_eq!(iter.next(), Some(false));\n\n assert_eq!(iter.next(), Some(false));\n\n // 3: 0b0011\n\n assert_eq!(iter.next(), Some(false));\n\n assert_eq!(iter.next(), Some(false));\n\n assert_eq!(iter.next(), Some(true));\n\n assert_eq!(iter.next(), Some(true));\n\n // a: 0b1010\n\n assert_eq!(iter.next_back(), Some(false));\n\n assert_eq!(iter.next_back(), Some(true));\n\n assert_eq!(iter.next_back(), Some(false));\n\n assert_eq!(iter.next_back(), Some(true));\n\n\n\n assert_eq!(iter.next(), None);\n\n}\n\n\n", "file_path": "src/tests/nibble_path.rs", "rank": 32, "score": 44549.014821985504 }, { "content": "#[test]\n\nfn test_nibble_iterator() {\n\n let bytes = vec![0x12, 0x30];\n\n let nibble_path = NibblePath::new_odd(bytes);\n\n let mut iter = nibble_path.nibbles();\n\n assert_eq!(iter.next().unwrap(), Nibble::from(0x01));\n\n assert_eq!(iter.next().unwrap(), Nibble::from(0x02));\n\n assert_eq!(iter.next().unwrap(), Nibble::from(0x03));\n\n assert_eq!(iter.next(), None);\n\n}\n\n\n", "file_path": "src/tests/nibble_path.rs", "rank": 33, "score": 44549.014821985504 }, { "content": "/// Computes the root hash of a sparse Merkle tree. `kvs` consists of the entire set of key-value\n\n/// pairs stored in the tree.\n\nfn compute_root_hash(kvs: Vec<(Vec<bool>, [u8; 32])>) -> RootHash {\n\n let mut kv_ref = vec![];\n\n for (key, value) in &kvs {\n\n kv_ref.push((&key[..], *value));\n\n }\n\n RootHash(compute_root_hash_impl(kv_ref))\n\n}\n\n\n", "file_path": "src/tests/helper.rs", "rank": 34, "score": 44384.221928185434 }, { "content": "#[test]\n\n#[should_panic(expected = \"Should have odd number of nibbles.\")]\n\nfn test_empty_nibble_path() {\n\n NibblePath::new_odd(vec![]);\n\n}\n\n\n", "file_path": "src/tests/nibble_path.rs", "rank": 35, "score": 43382.553861390516 }, { "content": "#[test]\n\nfn test_skip_common_prefix() {\n\n {\n\n let nibble_path1 = NibblePath::new(vec![0x12, 0x34, 0x56]);\n\n let nibble_path2 = NibblePath::new(vec![0x12, 0x34, 0x56]);\n\n let mut iter1 = nibble_path1.nibbles();\n\n let mut iter2 = nibble_path2.nibbles();\n\n assert_eq!(skip_common_prefix(&mut iter1, &mut iter2), 6);\n\n assert!(iter1.is_finished());\n\n assert!(iter2.is_finished());\n\n }\n\n {\n\n let nibble_path1 = NibblePath::new(vec![0x12, 0x35]);\n\n let nibble_path2 = NibblePath::new(vec![0x12, 0x34, 0x56]);\n\n let mut iter1 = nibble_path1.nibbles();\n\n let mut iter2 = nibble_path2.nibbles();\n\n assert_eq!(skip_common_prefix(&mut iter1, &mut iter2), 3);\n\n assert_eq!(\n\n iter1.visited_nibbles().get_nibble_path(),\n\n iter2.visited_nibbles().get_nibble_path()\n\n );\n", "file_path": "src/tests/nibble_path.rs", "rank": 36, "score": 43382.553861390516 }, { "content": "#[test]\n\nfn test_visited_nibble_iter() {\n\n let bytes = vec![0x12, 0x34, 0x56];\n\n let nibble_path = NibblePath::new(bytes);\n\n let mut iter = nibble_path.nibbles();\n\n assert_eq!(iter.next().unwrap(), 0x01.into());\n\n assert_eq!(iter.next().unwrap(), 0x02.into());\n\n assert_eq!(iter.next().unwrap(), 0x03.into());\n\n let mut visited_nibble_iter = iter.visited_nibbles();\n\n assert_eq!(visited_nibble_iter.next().unwrap(), 0x01.into());\n\n assert_eq!(visited_nibble_iter.next().unwrap(), 0x02.into());\n\n assert_eq!(visited_nibble_iter.next().unwrap(), 0x03.into());\n\n}\n\n\n", "file_path": "src/tests/nibble_path.rs", "rank": 37, "score": 43382.553861390516 }, { "content": "#[test]\n\nfn test_internal_hash_and_proof() {\n\n // non-leaf case 1\n\n {\n\n let internal_node_key = random_63nibbles_node_key();\n\n let mut children = Children::default();\n\n\n\n let index1 = Nibble::from(4);\n\n let index2 = Nibble::from(15);\n\n let hash1 = OsRng.gen();\n\n let hash2 = OsRng.gen();\n\n let child1_node_key = gen_leaf_keys(\n\n 0, /* version */\n\n internal_node_key.nibble_path(),\n\n index1,\n\n )\n\n .0;\n\n let child2_node_key = gen_leaf_keys(\n\n 1, /* version */\n\n internal_node_key.nibble_path(),\n\n index2,\n", "file_path": "src/tests/node_type.rs", "rank": 38, "score": 43382.553861390516 }, { "content": "#[test]\n\nfn test_nibble_path_fmt() {\n\n let nibble_path = NibblePath::new(vec![0x12, 0x34, 0x56]);\n\n assert_eq!(format!(\"{:?}\", nibble_path), \"123456\");\n\n\n\n let nibble_path = NibblePath::new(vec![0x12, 0x34, 0x50]);\n\n assert_eq!(format!(\"{:?}\", nibble_path), \"123450\");\n\n\n\n let nibble_path = NibblePath::new_odd(vec![0x12, 0x34, 0x50]);\n\n assert_eq!(format!(\"{:?}\", nibble_path), \"12345\");\n\n}\n\n\n", "file_path": "src/tests/nibble_path.rs", "rank": 39, "score": 43382.553861390516 }, { "content": "#[test]\n\n#[should_panic(expected = \"Last nibble must be 0.\")]\n\nfn test_create_nibble_path_failure() {\n\n let bytes = vec![0x12, 0x34, 0x56];\n\n let _nibble_path = NibblePath::new_odd(bytes);\n\n}\n\n\n", "file_path": "src/tests/nibble_path.rs", "rank": 40, "score": 42310.491190025714 }, { "content": "#[test]\n\nfn test_create_nibble_path_success() {\n\n let nibble_path = NibblePath::new(vec![0x12, 0x34, 0x56]);\n\n assert_eq!(nibble_path.num_nibbles(), 6);\n\n\n\n let nibble_path = NibblePath::new(vec![0x12, 0x34, 0x50]);\n\n assert_eq!(nibble_path.num_nibbles(), 6);\n\n\n\n let nibble_path = NibblePath::new_odd(vec![0x12, 0x34, 0x50]);\n\n assert_eq!(nibble_path.num_nibbles(), 5);\n\n\n\n let nibble_path = NibblePath::new(vec![]);\n\n assert_eq!(nibble_path.num_nibbles(), 0);\n\n}\n\n\n", "file_path": "src/tests/nibble_path.rs", "rank": 41, "score": 42310.491190025714 }, { "content": "pub trait Bytes32Ext: Index<usize> + Sized {\n\n /// Returns the `index`-th nibble.\n\n fn get_nibble(&self, index: usize) -> crate::types::nibble::Nibble;\n\n /// Returns the length of common prefix of `self` and `other` in bits.\n\n fn common_prefix_bits_len(&self, other: &[u8; 32]) -> usize;\n\n /// Returns a `HashValueBitIterator` over all the bits that represent this hash value.\n\n fn iter_bits(&self) -> HashValueBitIterator<'_>;\n\n /// Returns the `index`-th nibble in the bytes.\n\n fn nibble(&self, index: usize) -> u8;\n\n /// Returns the length of common prefix of `self` and `other` in nibbles.\n\n fn common_prefix_nibbles_len(&self, other: &[u8; 32]) -> usize {\n\n self.common_prefix_bits_len(other) / 4\n\n }\n\n /// Constructs a `HashValue` from an iterator of bits.\n\n fn from_bit_iter(iter: impl ExactSizeIterator<Item = bool>) -> Option<Self>;\n\n}\n\n\n\nimpl Bytes32Ext for [u8; 32] {\n\n fn get_nibble(&self, index: usize) -> crate::types::nibble::Nibble {\n\n crate::types::nibble::Nibble::from(if index % 2 == 0 {\n", "file_path": "src/bytes32ext.rs", "rank": 42, "score": 41909.569394826474 }, { "content": "use anyhow::{format_err, Result};\n\nuse futures::future::BoxFuture;\n\n\n\nuse crate::node_type::{LeafNode, Node, NodeKey};\n\n\n\n/// Defines the interface between a\n\n/// [`JellyfishMerkleTree`](crate::JellyfishMerkleTree)\n\n/// and underlying storage holding nodes.\n", "file_path": "src/reader.rs", "rank": 43, "score": 35957.033764471744 }, { "content": " stale_node_index_cache: HashSet<NodeKey>,\n\n\n\n /// # of leaves in the `stale_node_index_cache`,\n\n num_stale_leaves: usize,\n\n\n\n /// The immutable part of this cache, which will be committed to the underlying storage.\n\n frozen_cache: FrozenTreeCache,\n\n\n\n /// The underlying persistent storage.\n\n reader: &'a R,\n\n}\n\n\n\nimpl<'a, R> TreeCache<'a, R>\n\nwhere\n\n R: 'a + TreeReader,\n\n{\n\n /// Constructs a new `TreeCache` instance.\n\n pub async fn new(reader: &'a R, next_version: Version) -> Result<TreeCache<'a, R>> {\n\n let mut node_cache = HashMap::new();\n\n let root_node_key = if next_version == 0 {\n", "file_path": "src/tree_cache.rs", "rank": 51, "score": 33798.79493024573 }, { "content": " };\n\n Ok(Self {\n\n node_cache,\n\n stale_node_index_cache: HashSet::new(),\n\n frozen_cache: FrozenTreeCache::new(),\n\n root_node_key,\n\n next_version,\n\n reader,\n\n num_stale_leaves: 0,\n\n num_new_leaves: 0,\n\n })\n\n }\n\n\n\n /// Gets a node with given node key. If it doesn't exist in node cache, read from `reader`.\n\n pub async fn get_node(&self, node_key: &NodeKey) -> Result<Node> {\n\n Ok(if let Some(node) = self.node_cache.get(node_key) {\n\n node.clone()\n\n } else if let Some(node) = self.frozen_cache.node_cache.get(node_key) {\n\n node.clone()\n\n } else {\n", "file_path": "src/tree_cache.rs", "rank": 52, "score": 33797.80948252679 }, { "content": " root_hashes: Vec::new(),\n\n }\n\n }\n\n}\n\n\n\n/// `TreeCache` is a in-memory cache for per-transaction updates of sparse Merkle nodes and values.\n\npub struct TreeCache<'a, R> {\n\n /// `NodeKey` of the current root node in cache.\n\n root_node_key: NodeKey,\n\n\n\n /// The version of the transaction to which the upcoming `put`s will be related.\n\n next_version: Version,\n\n\n\n /// Intermediate nodes keyed by node hash\n\n node_cache: HashMap<NodeKey, Node>,\n\n\n\n /// # of leaves in the `node_cache`,\n\n num_new_leaves: usize,\n\n\n\n /// Partial stale log. `NodeKey` to identify the stale record.\n", "file_path": "src/tree_cache.rs", "rank": 53, "score": 33793.62830457237 }, { "content": "//! - Delete a node.\n\n//! When we apply these operations on a multi-version tree:\n\n//! 1) Put a new node.\n\n//! 2) When we remove a node, if the node is in the previous on-disk version, we don't need to do\n\n//! anything. Otherwise we delete it from the tree cache.\n\n//! Updating node could be operated as deletion of the node followed by insertion of the updated\n\n//! node.\n\n\n\nuse std::collections::{hash_map::Entry, BTreeMap, BTreeSet, HashMap, HashSet};\n\n\n\nuse anyhow::{bail, Result};\n\n\n\nuse crate::{\n\n metrics::DIEM_JELLYFISH_STORAGE_READS,\n\n node_type::{Node, NodeKey},\n\n storage::{\n\n NodeBatch, NodeStats, StaleNodeIndex, StaleNodeIndexBatch, TreeReader, TreeUpdateBatch,\n\n },\n\n types::{Version, PRE_GENESIS_VERSION},\n\n RootHash,\n\n};\n\n\n\n/// `FrozenTreeCache` is used as a field of `TreeCache` storing all the nodes and values that\n\n/// are generated by earlier transactions so they have to be immutable. The motivation of\n\n/// `FrozenTreeCache` is to let `TreeCache` freeze intermediate results from each transaction to\n\n/// help commit more than one transaction in a row atomically.\n", "file_path": "src/tree_cache.rs", "rank": 54, "score": 33793.55668183459 }, { "content": " DIEM_JELLYFISH_STORAGE_READS.inc();\n\n self.reader.get_node_option(node_key).await?.unwrap()\n\n })\n\n }\n\n\n\n /// Gets the current root node key.\n\n pub fn get_root_node_key(&self) -> &NodeKey {\n\n &self.root_node_key\n\n }\n\n\n\n /// Set roots `node_key`.\n\n pub fn set_root_node_key(&mut self, root_node_key: NodeKey) {\n\n self.root_node_key = root_node_key;\n\n }\n\n\n\n /// Puts the node with given hash as key into node_cache.\n\n pub fn put_node(&mut self, node_key: NodeKey, new_node: Node) -> Result<()> {\n\n match self.node_cache.entry(node_key) {\n\n Entry::Vacant(o) => {\n\n if new_node.is_leaf() {\n", "file_path": "src/tree_cache.rs", "rank": 55, "score": 33790.004302972535 }, { "content": "}\n\n\n\nimpl<'a, R> From<TreeCache<'a, R>> for (Vec<RootHash>, TreeUpdateBatch)\n\nwhere\n\n R: 'a + TreeReader,\n\n{\n\n fn from(tree_cache: TreeCache<'a, R>) -> Self {\n\n (\n\n tree_cache.frozen_cache.root_hashes,\n\n TreeUpdateBatch {\n\n node_batch: tree_cache.frozen_cache.node_cache,\n\n stale_node_index_batch: tree_cache.frozen_cache.stale_node_index_cache,\n\n node_stats: tree_cache.frozen_cache.node_stats,\n\n },\n\n )\n\n }\n\n}\n", "file_path": "src/tree_cache.rs", "rank": 56, "score": 33789.14293691721 }, { "content": " self.frozen_cache.node_cache.extend(self.node_cache.drain());\n\n let stale_since_version = self.next_version;\n\n self.frozen_cache\n\n .stale_node_index_cache\n\n .extend(\n\n self.stale_node_index_cache\n\n .drain()\n\n .map(|node_key| StaleNodeIndex {\n\n stale_since_version,\n\n node_key,\n\n }),\n\n );\n\n\n\n // Clean up\n\n self.num_stale_leaves = 0;\n\n self.num_new_leaves = 0;\n\n\n\n // Prepare for the next version after freezing\n\n self.next_version += 1;\n\n }\n", "file_path": "src/tree_cache.rs", "rank": 57, "score": 33786.89824902712 }, { "content": " self.num_new_leaves -= 1;\n\n }\n\n }\n\n\n\n /// Freezes all the contents in cache to be immutable and clear `node_cache`.\n\n pub async fn freeze(&mut self) {\n\n let root_node_key = self.get_root_node_key();\n\n let root_hash = self\n\n .get_node(root_node_key)\n\n .await\n\n .unwrap_or_else(|_| unreachable!(\"Root node with key {:?} must exist\", root_node_key))\n\n .hash();\n\n self.frozen_cache.root_hashes.push(RootHash(root_hash));\n\n let node_stats = NodeStats {\n\n new_nodes: self.node_cache.len(),\n\n new_leaves: self.num_new_leaves,\n\n stale_nodes: self.stale_node_index_cache.len(),\n\n stale_leaves: self.num_stale_leaves,\n\n };\n\n self.frozen_cache.node_stats.push(node_stats);\n", "file_path": "src/tree_cache.rs", "rank": 58, "score": 33786.74423016439 }, { "content": " self.num_new_leaves += 1\n\n }\n\n o.insert(new_node);\n\n }\n\n Entry::Occupied(o) => bail!(\"Node with key {:?} already exists in NodeBatch\", o.key()),\n\n };\n\n Ok(())\n\n }\n\n\n\n /// Deletes a node with given hash.\n\n pub fn delete_node(&mut self, old_node_key: &NodeKey, is_leaf: bool) {\n\n // If node cache doesn't have this node, it means the node is in the previous version of\n\n // the tree on the disk.\n\n if self.node_cache.remove(old_node_key).is_none() {\n\n let is_new_entry = self.stale_node_index_cache.insert(old_node_key.clone());\n\n assert!(is_new_entry, \"Node gets stale twice unexpectedly.\");\n\n if is_leaf {\n\n self.num_stale_leaves += 1;\n\n }\n\n } else if is_leaf {\n", "file_path": "src/tree_cache.rs", "rank": 59, "score": 33786.439865756955 }, { "content": " let pre_genesis_root_key = NodeKey::new_empty_path(PRE_GENESIS_VERSION);\n\n let pre_genesis_root = reader.get_node_option(&pre_genesis_root_key).await?;\n\n\n\n match pre_genesis_root {\n\n Some(_) => {\n\n // This is to support the extreme case where things really went wild,\n\n // and we need to ditch the transaction history and apply a new\n\n // genesis on top of an existing state db.\n\n pre_genesis_root_key\n\n }\n\n None => {\n\n // Hack: We need to start from an empty tree, so we insert\n\n // a null node beforehand deliberately to deal with this corner case.\n\n let genesis_root_key = NodeKey::new_empty_path(0);\n\n node_cache.insert(genesis_root_key.clone(), Node::new_null());\n\n genesis_root_key\n\n }\n\n }\n\n } else {\n\n NodeKey::new_empty_path(next_version - 1)\n", "file_path": "src/tree_cache.rs", "rank": 60, "score": 33785.26404284956 }, { "content": "// Copyright (c) The Diem Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n//! A transaction can have multiple operations on state. For example, it might update values\n\n//! for a few existing keys. Imagine that we have the following tree.\n\n//!\n\n//! ```text\n\n//! root0\n\n//! / \\\n\n//! / \\\n\n//! key1 => value11 key2 => value21\n\n//! ```\n\n//!\n\n//! The next transaction updates `key1`'s value to `value12` and `key2`'s value to `value22`.\n\n//! Let's assume we update key2 first. Then the tree becomes:\n\n//!\n\n//! ```text\n\n//! (on disk) (in memory)\n\n//! root0 root1'\n\n//! / \\ / \\\n", "file_path": "src/tree_cache.rs", "rank": 61, "score": 33783.44427481527 }, { "content": "//! ```text\n\n//! (on disk) (in memory)\n\n//! root0 root1''\n\n//! / \\ / \\\n\n//! / \\ / \\\n\n//! / \\ / \\\n\n//! / \\ / \\\n\n//! / \\ / \\\n\n//! key1 => value11 key2 => value21 key1 => value12 key2 => value22\n\n//! (on disk) (on disk) (in memory) (in memory)\n\n//! ```\n\n//!\n\n//! This means that we need to be able to tell whether to create a new version of a node or to\n\n//! update an existing node by deleting it and creating a new node directly. `TreeCache` provides\n\n//! APIs to cache intermediate nodes and values in memory and simplify the actual tree\n\n//! implementation.\n\n//!\n\n//! If we are dealing with a single-version tree, any complex tree operation can be seen as a\n\n//! collection of the following operations:\n\n//! - Put a new node.\n", "file_path": "src/tree_cache.rs", "rank": 62, "score": 33783.26754669364 }, { "content": "//! / ___ \\ _____________/ \\\n\n//! / _/ \\ \\\n\n//! / _/ \\ \\\n\n//! / / \\ \\\n\n//! key1 => value11 key2 => value21 key2 => value22\n\n//! (on disk) (on disk) (in memory)\n\n//! ```\n\n//!\n\n//! Note that\n\n//! 1) we created a new version of the tree with `root1'` and the new `key2` node generated;\n\n//! 2) both `root1'` and the new `key2` node are still held in memory within a batch of nodes\n\n//! that will be written into db atomically.\n\n//!\n\n//! Next, we need to update `key1`'s value. This time we are dealing with the tree starting from\n\n//! the new root. Part of the tree is in memory and the rest of it is in database. We'll update the\n\n//! left child and the new root. We should\n\n//! 1) create a new version for `key1` child.\n\n//! 2) update `root1'` directly instead of making another version.\n\n//! The resulting tree should look like:\n\n//!\n", "file_path": "src/tree_cache.rs", "rank": 63, "score": 33780.10330761305 }, { "content": "fn hash_internal(left: [u8; 32], right: [u8; 32]) -> [u8; 32] {\n\n SparseMerkleInternalNode::new(left, right).hash()\n\n}\n\n\n", "file_path": "src/tests/node_type.rs", "rank": 64, "score": 32401.24642622741 }, { "content": "fn compute_root_hash_impl(kvs: Vec<(&[bool], [u8; 32])>) -> [u8; 32] {\n\n assert!(!kvs.is_empty());\n\n\n\n // If there is only one entry, it is the root.\n\n if kvs.len() == 1 {\n\n return kvs[0].1;\n\n }\n\n\n\n // Otherwise the tree has more than one leaves, which means we can find which ones are in the\n\n // left subtree and which ones are in the right subtree. So we find the first key that starts\n\n // with a 1-bit.\n\n let left_hash;\n\n let right_hash;\n\n match kvs.iter().position(|(key, _value)| key[0]) {\n\n Some(0) => {\n\n // Every key starts with a 1-bit, i.e., they are all in the right subtree.\n\n left_hash = SPARSE_MERKLE_PLACEHOLDER_HASH;\n\n right_hash = compute_root_hash_impl(reduce(&kvs));\n\n }\n\n Some(index) => {\n", "file_path": "src/tests/helper.rs", "rank": 65, "score": 32112.080526055666 }, { "content": "// Copyright (c) The Diem Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse rand::{rngs::OsRng, Rng};\n\n\n\nuse crate::{\n\n mock::MockTreeStore,\n\n node_type::{Node, NodeKey},\n\n tree_cache::TreeCache,\n\n types::{nibble::nibble_path::NibblePath, Version, PRE_GENESIS_VERSION},\n\n KeyHash,\n\n};\n\n\n", "file_path": "src/tests/tree_cache.rs", "rank": 66, "score": 32078.65538945547 }, { "content": "\n\n#[tokio::test]\n\nasync fn test_root_node() {\n\n let next_version = 0;\n\n let db = MockTreeStore::default();\n\n let mut cache = TreeCache::new(&db, next_version).await.unwrap();\n\n assert_eq!(*cache.get_root_node_key(), NodeKey::new_empty_path(0));\n\n\n\n let (node, node_key) = random_leaf_with_key(next_version);\n\n db.put_node(node_key.clone(), node).await.unwrap();\n\n cache.set_root_node_key(node_key.clone());\n\n\n\n assert_eq!(*cache.get_root_node_key(), node_key);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_pre_genesis() {\n\n let next_version = 0;\n\n let db = MockTreeStore::default();\n\n let pre_genesis_root_key = NodeKey::new_empty_path(PRE_GENESIS_VERSION);\n", "file_path": "src/tests/tree_cache.rs", "rank": 67, "score": 32076.423889660575 }, { "content": " let (pre_genesis_only_node, _) = random_leaf_with_key(PRE_GENESIS_VERSION);\n\n db.put_node(pre_genesis_root_key.clone(), pre_genesis_only_node)\n\n .await\n\n .unwrap();\n\n\n\n let cache = TreeCache::new(&db, next_version).await.unwrap();\n\n assert_eq!(*cache.get_root_node_key(), pre_genesis_root_key);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_freeze_with_delete() {\n\n let next_version = 0;\n\n let db = MockTreeStore::default();\n\n let mut cache = TreeCache::new(&db, next_version).await.unwrap();\n\n\n\n assert_eq!(*cache.get_root_node_key(), NodeKey::new_empty_path(0));\n\n\n\n let (node1, node1_key) = random_leaf_with_key(next_version);\n\n cache.put_node(node1_key.clone(), node1.clone()).unwrap();\n\n let (node2, node2_key) = random_leaf_with_key(next_version);\n", "file_path": "src/tests/tree_cache.rs", "rank": 68, "score": 32075.742009499212 }, { "content": " cache.put_node(node2_key.clone(), node2.clone()).unwrap();\n\n assert_eq!(cache.get_node(&node1_key).await.unwrap(), node1);\n\n assert_eq!(cache.get_node(&node2_key).await.unwrap(), node2);\n\n cache.freeze().await;\n\n assert_eq!(cache.get_node(&node1_key).await.unwrap(), node1);\n\n assert_eq!(cache.get_node(&node2_key).await.unwrap(), node2);\n\n\n\n cache.delete_node(&node1_key, true /* is_leaf */);\n\n cache.freeze().await;\n\n let (_, update_batch) = cache.into();\n\n assert_eq!(update_batch.node_batch.len(), 3);\n\n assert_eq!(update_batch.stale_node_index_batch.len(), 1);\n\n}\n", "file_path": "src/tests/tree_cache.rs", "rank": 69, "score": 32070.016525561336 }, { "content": "/// Reduces the problem by removing the first bit of every key.\n\nfn reduce<'a>(kvs: &'a [(&[bool], [u8; 32])]) -> Vec<(&'a [bool], [u8; 32])> {\n\n kvs.iter().map(|(key, value)| (&key[1..], *value)).collect()\n\n}\n\n\n", "file_path": "src/tests/helper.rs", "rank": 70, "score": 31978.82939507071 }, { "content": "# Penumbra's Jellyfish Merkle Tree\n\n\n\nThis repository is a fork of the [Diem Jellyfish Merkle Tree](https://github.com/diem/diem/tree/main/storage/jellyfish-merkle)\n\ncrate, modified to inline dependencies and trim the parts not needed for Penumbra's uses.\n", "file_path": "README.md", "rank": 71, "score": 21277.444807220905 }, { "content": "use anyhow::{anyhow, Result};\n\n\n\nuse crate::{storage::TreeReader, JellyfishMerkleTree, Version};\n\n\n\nimpl<'a, R> JellyfishMerkleTree<'a, R>\n\nwhere\n\n R: 'a + TreeReader + Sync,\n\n{\n\n /// Returns the value and an [`ics23::ExistenceProof`].\n\n pub async fn get_with_ics23_proof(\n\n &self,\n\n key: Vec<u8>,\n\n version: Version,\n\n ) -> Result<ics23::ExistenceProof> {\n\n let key_hash = key.as_slice().into();\n\n let (value, proof) = self.get_with_proof(key_hash, version).await?;\n\n let value = value.ok_or_else(|| {\n\n anyhow!(\n\n \"Requested proof of inclusion for non-existent key {:?}\",\n\n key\n", "file_path": "src/ics23_impl.rs", "rank": 72, "score": 31.864502597823495 }, { "content": " },\n\n KeyHash, MissingRootError, OwnedValue, RootHash,\n\n};\n\n\n\n/// The Jellyfish Merkle tree data structure. See [`crate`] for description.\n\npub struct JellyfishMerkleTree<'a, R> {\n\n reader: &'a R,\n\n leaf_count_migration: bool,\n\n}\n\n\n\nimpl<'a, R> JellyfishMerkleTree<'a, R>\n\nwhere\n\n R: 'a + TreeReader + Sync,\n\n{\n\n /// Creates a `JellyfishMerkleTree` backed by the given [`TreeReader`](trait.TreeReader.html).\n\n pub fn new(reader: &'a R) -> Self {\n\n Self {\n\n reader,\n\n leaf_count_migration: true,\n\n }\n", "file_path": "src/tree.rs", "rank": 73, "score": 25.49231084712353 }, { "content": " /// `self.parent_stack` is empty. But in case of a tree with a single leaf, we need this\n\n /// additional bit.\n\n done: bool,\n\n\n\n /// The future, if any, of a node lookup needed for the next call to `poll_next`.\n\n future: Option<BoxFuture<'static, (NodeKey, Result<Node>)>>,\n\n}\n\n\n\nimpl<R> JellyfishMerkleStream<R>\n\nwhere\n\n R: TreeReader + Send + Sync,\n\n{\n\n /// Constructs a new iterator. This puts the internal state in the correct position, so the\n\n /// following `next` call will yield the smallest key that is greater or equal to\n\n /// `starting_key`.\n\n pub async fn new(reader: Arc<R>, version: Version, starting_key: KeyHash) -> Result<Self> {\n\n let mut parent_stack = vec![];\n\n let mut done = false;\n\n\n\n let mut current_node_key = NodeKey::new_empty_path(version);\n", "file_path": "src/iterator.rs", "rank": 74, "score": 22.07939941048869 }, { "content": " .await?;\n\n\n\n tree_cache.set_root_node_key(new_root_node_key);\n\n Ok(())\n\n }\n\n\n\n /// Helper function for recursive insertion into the subtree that starts from the current\n\n /// [`NodeKey`](node_type/struct.NodeKey.html). Returns the newly inserted node.\n\n /// It is safe to use recursion here because the max depth is limited by the key length which\n\n /// for this tree is the length of the hash of account addresses.\n\n #[async_recursion::async_recursion]\n\n async fn insert_at<'future, 'cache: 'future>(\n\n &'future self,\n\n node_key: NodeKey,\n\n version: Version,\n\n nibble_iter: &mut NibbleIterator<'future>,\n\n value: OwnedValue,\n\n tree_cache: &mut TreeCache<'cache, R>,\n\n ) -> Result<(NodeKey, Node)> {\n\n let node = tree_cache.get_node(&node_key).await?;\n", "file_path": "src/tree.rs", "rank": 75, "score": 21.886112508665633 }, { "content": " let mut tree_cache = TreeCache::new(self.reader, first_version).await?;\n\n for (idx, value_set) in value_sets.into_iter().enumerate() {\n\n assert!(\n\n !value_set.is_empty(),\n\n \"Transactions that output empty write set should not be included.\",\n\n );\n\n let version = first_version + idx as u64;\n\n for (key, value) in value_set {\n\n self.put(key, value, version, &mut tree_cache).await?;\n\n }\n\n // Freezes the current cache to make all contents in the current cache immutable.\n\n tree_cache.freeze().await;\n\n }\n\n\n\n Ok(tree_cache.into())\n\n }\n\n\n\n async fn put(\n\n &self,\n\n key: KeyHash,\n", "file_path": "src/tree.rs", "rank": 76, "score": 21.78778788951723 }, { "content": "mod overlay;\n\nmod reader;\n\nmod tree;\n\nmod tree_cache;\n\nmod types;\n\nmod writer;\n\n\n\npub mod mock;\n\npub mod restore;\n\n\n\nuse bytes32ext::Bytes32Ext;\n\nuse types::nibble::ROOT_NIBBLE_HEIGHT;\n\n\n\n#[cfg(feature = \"ics23\")]\n\npub use ics23_impl::ics23_spec;\n\npub use iterator::JellyfishMerkleStream;\n\npub use overlay::WriteOverlay;\n\npub use tree::JellyfishMerkleTree;\n\npub use types::proof;\n\npub use types::Version;\n", "file_path": "src/lib.rs", "rank": 77, "score": 21.034984385127007 }, { "content": " )\n\n .await?;\n\n tree_cache.set_root_node_key(new_root_node_key);\n\n\n\n // Freezes the current cache to make all contents in the current cache immutable.\n\n tree_cache.freeze().await;\n\n }\n\n\n\n Ok(tree_cache.into())\n\n }\n\n\n\n #[async_recursion::async_recursion]\n\n async fn batch_insert_at(\n\n &self,\n\n mut node_key: NodeKey,\n\n version: Version,\n\n kvs: &[(KeyHash, OwnedValue)],\n\n depth: usize,\n\n hash_cache: &Option<&HashMap<NibblePath, [u8; 32]>>,\n\n tree_cache: &mut TreeCache<R>,\n", "file_path": "src/tree.rs", "rank": 78, "score": 20.903607064346968 }, { "content": " }\n\n\n\n pub fn new_migration(reader: &'a R, leaf_count_migration: bool) -> Self {\n\n Self {\n\n reader,\n\n leaf_count_migration,\n\n }\n\n }\n\n\n\n /// Get the node hash from the cache if exists, otherwise compute it.\n\n fn get_hash(\n\n node_key: &NodeKey,\n\n node: &Node,\n\n hash_cache: &Option<&HashMap<NibblePath, [u8; 32]>>,\n\n ) -> [u8; 32] {\n\n if let Some(cache) = hash_cache {\n\n match cache.get(node_key.nibble_path()) {\n\n Some(hash) => *hash,\n\n None => unreachable!(\"{:?} can not be found in hash cache\", node_key),\n\n }\n", "file_path": "src/tree.rs", "rank": 79, "score": 20.2963749017721 }, { "content": "// Copyright (c) The Diem Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n//! This module implements `JellyfishMerkleIterator`. Initialized with a version and a key, the\n\n//! iterator generates all the key-value pairs in this version of the tree, starting from the\n\n//! smallest key that is greater or equal to the given key, by performing a depth first traversal\n\n//! on the tree.\n\n\n\nuse std::{\n\n pin::Pin,\n\n sync::Arc,\n\n task::{Context, Poll},\n\n};\n\n\n\nuse anyhow::{bail, ensure, format_err, Result};\n\nuse futures::{future::BoxFuture, ready};\n\n\n\nuse crate::{\n\n node_type::{Child, InternalNode, Node, NodeKey},\n\n storage::TreeReader,\n", "file_path": "src/iterator.rs", "rank": 80, "score": 20.098485210174108 }, { "content": "#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize)]\n\n#[cfg_attr(any(test, feature = \"fuzzing\"), derive(Arbitrary))]\n\npub struct SparseMerkleLeafNode {\n\n key_hash: KeyHash,\n\n value_hash: ValueHash,\n\n}\n\n\n\nimpl SparseMerkleLeafNode {\n\n pub(crate) fn new(key_hash: KeyHash, value_hash: ValueHash) -> Self {\n\n SparseMerkleLeafNode {\n\n key_hash,\n\n value_hash,\n\n }\n\n }\n\n\n\n pub(crate) fn key_hash(&self) -> KeyHash {\n\n self.key_hash\n\n }\n\n\n\n pub(crate) fn hash(&self) -> [u8; 32] {\n\n use sha2::Digest;\n\n let mut hasher = sha2::Sha256::new();\n\n hasher.update(b\"JMT::LeafNode\");\n\n hasher.update(&self.key_hash.0);\n\n hasher.update(&self.value_hash.0);\n\n *hasher.finalize().as_ref()\n\n }\n\n}\n", "file_path": "src/types/proof.rs", "rank": 81, "score": 19.62682264747564 }, { "content": " }\n\n }\n\n }\n\n\n\n match crate::reader::get_node_async(&*reader, &current_node_key).await? {\n\n Node::Internal(_) => unreachable!(\"Should have reached the bottom of the tree.\"),\n\n Node::Leaf(leaf_node) => {\n\n if leaf_node.key_hash() < starting_key {\n\n Self::cleanup_stack(&mut parent_stack);\n\n if parent_stack.is_empty() {\n\n done = true;\n\n }\n\n }\n\n }\n\n Node::Null => done = true,\n\n }\n\n\n\n Ok(Self {\n\n reader,\n\n version,\n", "file_path": "src/iterator.rs", "rank": 82, "score": 19.574010569981517 }, { "content": "// Copyright (c) The Diem Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse std::{collections::BTreeMap, sync::Arc};\n\n\n\nuse anyhow::Result;\n\nuse futures::StreamExt;\n\nuse rand::{rngs::StdRng, Rng, SeedableRng};\n\n\n\nuse super::helper::plus_one;\n\nuse crate::{\n\n iterator::JellyfishMerkleStream, mock::MockTreeStore, types::Version, JellyfishMerkleTree,\n\n KeyHash, OwnedValue,\n\n};\n\n\n\n#[tokio::test]\n\nasync fn test_iterator_same_version() {\n\n for i in (1..100).step_by(11) {\n\n test_n_leaves_same_version(i).await;\n\n }\n", "file_path": "src/tests/iterator.rs", "rank": 83, "score": 19.55614369560492 }, { "content": " Child::new(\n\n Self::get_hash(&new_child_node_key, &new_child_node, hash_cache),\n\n version,\n\n new_child_node.node_type(),\n\n ),\n\n );\n\n }\n\n let new_internal_node =\n\n InternalNode::new_migration(children, self.leaf_count_migration);\n\n\n\n tree_cache.put_node(node_key.clone(), new_internal_node.clone().into())?;\n\n Ok((node_key, new_internal_node.into()))\n\n }\n\n }\n\n\n\n /// This is a convenient function that calls\n\n /// [`put_value_sets`](struct.JellyfishMerkleTree.html#method.put_value_sets) with a single\n\n /// `keyed_value_set`.\n\n pub async fn put_value_set(\n\n &self,\n", "file_path": "src/tree.rs", "rank": 84, "score": 19.548872181444462 }, { "content": " .collect();\n\n Ok(SparseMerkleRangeProof::new(siblings))\n\n }\n\n\n\n pub async fn get(&self, key: KeyHash, version: Version) -> Result<Option<OwnedValue>> {\n\n Ok(self.get_with_proof(key, version).await?.0)\n\n }\n\n\n\n async fn get_root_node(&self, version: Version) -> Result<Node> {\n\n self.get_root_node_option(version)\n\n .await?\n\n .ok_or_else(|| format_err!(\"Root node not found for version {}.\", version))\n\n }\n\n\n\n async fn get_root_node_option(&self, version: Version) -> Result<Option<Node>> {\n\n let root_node_key = NodeKey::new_empty_path(version);\n\n self.reader.get_node_option(&root_node_key).await\n\n }\n\n\n\n pub async fn get_root_hash(&self, version: Version) -> Result<RootHash> {\n", "file_path": "src/tree.rs", "rank": 85, "score": 19.479524685936056 }, { "content": " Self::new(version, NibblePath::new(vec![]))\n\n }\n\n\n\n /// Gets the version.\n\n pub fn version(&self) -> Version {\n\n self.version\n\n }\n\n\n\n /// Gets the nibble path.\n\n pub(crate) fn nibble_path(&self) -> &NibblePath {\n\n &self.nibble_path\n\n }\n\n\n\n /// Generates a child node key based on this node key.\n\n pub(crate) fn gen_child_node_key(&self, version: Version, n: Nibble) -> Self {\n\n let mut node_nibble_path = self.nibble_path().clone();\n\n node_nibble_path.push(n);\n\n Self::new(version, node_nibble_path)\n\n }\n\n\n", "file_path": "src/node_type.rs", "rank": 86, "score": 19.478656732379132 }, { "content": " fn advance(&mut self) {\n\n assert!(!self.is_rightmost(), \"Advancing past rightmost child.\");\n\n self.next_child_to_visit <<= 1;\n\n while self.next_child_to_visit & self.children_bitmap == 0 {\n\n self.next_child_to_visit <<= 1;\n\n }\n\n }\n\n}\n\n/// The `JellyfishMerkleStream` implementation.\n\npub struct JellyfishMerkleStream<R> {\n\n /// The storage engine from which we can read nodes using node keys.\n\n reader: Arc<R>,\n\n\n\n /// The version of the tree this iterator is running on.\n\n version: Version,\n\n\n\n /// The stack used for depth first traversal.\n\n parent_stack: Vec<NodeVisitInfo>,\n\n\n\n /// Whether the iteration has finished. Usually this can be determined by checking whether\n", "file_path": "src/iterator.rs", "rank": 87, "score": 19.467072727799913 }, { "content": " } else {\n\n node.hash()\n\n }\n\n }\n\n\n\n /// The batch version of `put_value_sets`.\n\n pub async fn batch_put_value_sets(\n\n &self,\n\n value_sets: Vec<Vec<(KeyHash, OwnedValue)>>,\n\n node_hashes: Option<Vec<&HashMap<NibblePath, [u8; 32]>>>,\n\n first_version: Version,\n\n ) -> Result<(Vec<RootHash>, TreeUpdateBatch)> {\n\n let mut tree_cache = TreeCache::new(self.reader, first_version).await?;\n\n let hash_sets: Vec<_> = match node_hashes {\n\n Some(hashes) => hashes.into_iter().map(Some).collect(),\n\n None => (0..value_sets.len()).map(|_| None).collect(),\n\n };\n\n\n\n for (idx, (value_set, hash_set)) in\n\n itertools::zip_eq(value_sets.into_iter(), hash_sets.into_iter()).enumerate()\n", "file_path": "src/tree.rs", "rank": 88, "score": 19.124078305685508 }, { "content": "#[derive(Clone, Debug, Hash, Eq, PartialEq, Ord, PartialOrd)]\n\n#[cfg_attr(any(test, feature = \"fuzzing\"), derive(Arbitrary))]\n\npub struct NodeKey {\n\n // The version at which the node is created.\n\n version: Version,\n\n // The nibble path this node represents in the tree.\n\n nibble_path: NibblePath,\n\n}\n\n\n\nimpl NodeKey {\n\n /// Creates a new `NodeKey`.\n\n pub(crate) fn new(version: Version, nibble_path: NibblePath) -> Self {\n\n Self {\n\n version,\n\n nibble_path,\n\n }\n\n }\n\n\n\n /// A shortcut to generate a node key consisting of a version and an empty nibble path.\n\n pub(crate) fn new_empty_path(version: Version) -> Self {\n", "file_path": "src/node_type.rs", "rank": 89, "score": 18.877859654707212 }, { "content": " /// / \\ | / \\\n\n /// / x | / x'\n\n /// o<-------------+- / \\\n\n /// / \\ | C D\n\n /// A B |\n\n /// storage (disk) | cache (memory)\n\n /// ```\n\n ///\n\n /// With this design, we are able to query the global state in persistent storage and\n\n /// generate the proposed tree delta based on a specific root hash and `value_set`. For\n\n /// example, if we want to execute another transaction `T_{i+1}'`, we can use the tree `S_i` in\n\n /// storage and apply the `value_set` of transaction `T_{i+1}`. Then if the storage commits\n\n /// the returned batch, the state `S_{i+1}` is ready to be read from the tree by calling\n\n /// [`get_with_proof`](struct.JellyfishMerkleTree.html#method.get_with_proof). Anything inside\n\n /// the batch is not reachable from public interfaces before being committed.\n\n pub async fn put_value_sets(\n\n &self,\n\n value_sets: Vec<Vec<(KeyHash, OwnedValue)>>,\n\n first_version: Version,\n\n ) -> Result<(Vec<RootHash>, TreeUpdateBatch)> {\n", "file_path": "src/tree.rs", "rank": 90, "score": 18.554161553092868 }, { "content": " tree_cache.put_node(node_key.clone(), new_leaf_node.clone())?;\n\n Ok((node_key, new_leaf_node))\n\n }\n\n\n\n /// Returns the value (if applicable) and the corresponding merkle proof.\n\n pub async fn get_with_proof(\n\n &self,\n\n key: KeyHash,\n\n version: Version,\n\n ) -> Result<(Option<OwnedValue>, SparseMerkleProof)> {\n\n // Empty tree just returns proof with no sibling hash.\n\n let mut next_node_key = NodeKey::new_empty_path(version);\n\n let mut siblings = vec![];\n\n let nibble_path = NibblePath::new(key.0.to_vec());\n\n let mut nibble_iter = nibble_path.nibbles();\n\n\n\n // We limit the number of loops here deliberately to avoid potential cyclic graph bugs\n\n // in the tree structure.\n\n for nibble_depth in 0..=ROOT_NIBBLE_HEIGHT {\n\n let next_node = get_node_async(self.reader, &next_node_key)\n", "file_path": "src/tree.rs", "rank": 91, "score": 18.532802688431275 }, { "content": "// Copyright (c) The Diem Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse std::{collections::BTreeMap, sync::Arc};\n\n\n\nuse proptest::{collection::btree_map, prelude::*};\n\nuse tokio::{runtime::Runtime, sync::RwLock};\n\n\n\nuse crate::{\n\n mock::MockTreeStore,\n\n restore::{JellyfishMerkleRestore, StateSnapshotReceiver},\n\n storage::TreeReader,\n\n tests::helper::init_mock_db,\n\n JellyfishMerkleTree, KeyHash, OwnedValue, RootHash, Version,\n\n};\n\n\n\nproptest! {\n\n #![proptest_config(ProptestConfig::with_cases(10))]\n\n\n\n #[test]\n", "file_path": "src/tests/restore.rs", "rank": 92, "score": 18.502114788268134 }, { "content": " expected_root_hash: RootHash,\n\n\n\n /// Whether to use the new internal node format where leaf counts are written.\n\n leaf_count_migration: bool,\n\n}\n\n\n\nimpl JellyfishMerkleRestore {\n\n pub async fn new<D: 'static + TreeReader + TreeWriter>(\n\n store: Arc<RwLock<D>>,\n\n version: Version,\n\n expected_root_hash: RootHash,\n\n leaf_count_migration: bool,\n\n ) -> Result<Self> {\n\n let tree_reader = Arc::clone(&store);\n\n let (partial_nodes, previous_leaf) = if let Some((node_key, leaf_node)) =\n\n tree_reader.read().await.get_rightmost_leaf().await?\n\n {\n\n // TODO: confirm rightmost leaf is at the desired version\n\n // If the system crashed in the middle of the previous restoration attempt, we need\n\n // to recover the partial nodes to the state right before the crash.\n", "file_path": "src/restore.rs", "rank": 93, "score": 18.443841129450607 }, { "content": "use std::{\n\n collections::{BTreeMap, HashMap},\n\n convert::TryInto,\n\n};\n\n\n\nuse anyhow::{bail, ensure, format_err, Result};\n\n\n\nuse crate::{\n\n bytes32ext::Bytes32Ext,\n\n node_type::{Child, Children, InternalNode, LeafNode, Node, NodeKey, NodeType},\n\n reader::get_node_async,\n\n storage::{TreeReader, TreeUpdateBatch},\n\n tree_cache::TreeCache,\n\n types::{\n\n nibble::{\n\n nibble_path::{skip_common_prefix, NibbleIterator, NibblePath},\n\n NibbleRangeIterator, ROOT_NIBBLE_HEIGHT,\n\n },\n\n proof::{SparseMerkleProof, SparseMerkleRangeProof},\n\n Version,\n", "file_path": "src/tree.rs", "rank": 94, "score": 18.443768700823178 }, { "content": "\n\n/// Contains types used to bridge a [`JellyfishMerkleTree`](crate::JellyfishMerkleTree)\n\n/// to the backing storage recording the tree's internal data.\n\npub mod storage {\n\n pub use node_type::{LeafNode, Node, NodeDecodeError, NodeKey};\n\n pub use reader::TreeReader;\n\n pub use writer::{\n\n NodeBatch, NodeStats, StaleNodeIndex, StaleNodeIndexBatch, TreeUpdateBatch, TreeWriter,\n\n };\n\n\n\n use super::*;\n\n}\n\n\n\n#[cfg(any(test, feature = \"fuzzing\"))]\n\nmod tests;\n\n\n\n/// An error that occurs when the state root for a requested version is missing (e.g., because it was pruned).\n\n#[derive(Error, Debug)]\n\n#[error(\"Missing state root node at version {version}, probably pruned.\")]\n\npub struct MissingRootError {\n", "file_path": "src/lib.rs", "rank": 95, "score": 18.394999183861156 }, { "content": " }\n\n Poll::Pending => {\n\n self.future = Some(future); // Put the future back into `self.future`\n\n Poll::Pending\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<R> futures::Stream for JellyfishMerkleStream<R>\n\nwhere\n\n R: TreeReader + Send + Sync + 'static,\n\n{\n\n type Item = Result<(KeyHash, OwnedValue)>;\n\n\n\n fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n if self.done {\n\n return Poll::Ready(None);\n\n }\n\n\n", "file_path": "src/iterator.rs", "rank": 96, "score": 18.300790573548824 }, { "content": " /// Generates parent node key at the same version based on this node key.\n\n pub(crate) fn gen_parent_node_key(&self) -> Self {\n\n let mut node_nibble_path = self.nibble_path().clone();\n\n assert!(\n\n node_nibble_path.pop().is_some(),\n\n \"Current node key is root.\",\n\n );\n\n Self::new(self.version, node_nibble_path)\n\n }\n\n\n\n /// Sets the version to the given version.\n\n pub(crate) fn set_version(&mut self, version: Version) {\n\n self.version = version;\n\n }\n\n\n\n /// Serializes to bytes for physical storage enforcing the same order as that in memory.\n\n pub fn encode(&self) -> Result<Vec<u8>> {\n\n let mut out = vec![];\n\n out.write_u64::<BigEndian>(self.version())?;\n\n out.write_u8(self.nibble_path().num_nibbles() as u8)?;\n", "file_path": "src/node_type.rs", "rank": 97, "score": 18.126696752709602 }, { "content": " }\n\n\n\n /// Creates the [`Internal`](Node::Internal) variant.\n\n #[cfg(any(test, feature = \"fuzzing\"))]\n\n pub(crate) fn new_internal(children: Children) -> Self {\n\n Node::Internal(InternalNode::new(children))\n\n }\n\n\n\n /// Creates the [`Leaf`](Node::Leaf) variant.\n\n pub(crate) fn new_leaf(key_hash: KeyHash, value: Vec<u8>) -> Self {\n\n Node::Leaf(LeafNode::new(key_hash, value))\n\n }\n\n\n\n /// Returns `true` if the node is a leaf node.\n\n pub(crate) fn is_leaf(&self) -> bool {\n\n matches!(self, Node::Leaf(_))\n\n }\n\n\n\n /// Returns `NodeType`\n\n pub(crate) fn node_type(&self) -> NodeType {\n", "file_path": "src/node_type.rs", "rank": 98, "score": 18.03482036665063 }, { "content": "// Copyright (c) The Diem Core Contributors\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n//! This module implements the functionality to restore a\n\n//! [`JellyfishMerkleTree`](crate::JellyfishMerkleTree) from small chunks of\n\n//! key/value pairs.\n\n\n\nuse std::sync::Arc;\n\n\n\nuse anyhow::{bail, ensure, Result};\n\nuse futures::future::{BoxFuture, FutureExt};\n\nuse mirai_annotations::*;\n\nuse tokio::sync::RwLock;\n\n\n\nuse crate::{\n\n node_type::{\n\n get_child_and_sibling_half_start, Child, Children, InternalNode, LeafNode, Node, NodeKey,\n\n NodeType,\n\n },\n\n storage::{NodeBatch, TreeReader, TreeWriter},\n", "file_path": "src/restore.rs", "rank": 99, "score": 18.032498053578994 } ]
Rust
engine/src/types.rs
ivankabestwill/wirefilter
294292f53db18a5a3261118e93fad30bd9498b09
use lex::{expect, skip_space, Lex, LexResult, LexWith}; use rhs_types::{Bytes, IpRange, UninhabitedBool}; use serde::{Deserialize, Serialize}; use std::{ cmp::Ordering, fmt::{self, Debug, Formatter}, net::IpAddr, ops::RangeInclusive, }; use strict_partial_ord::StrictPartialOrd; fn lex_rhs_values<'i, T: Lex<'i>>(input: &'i str) -> LexResult<'i, Vec<T>> { let mut input = expect(input, "{")?; let mut res = Vec::new(); loop { input = skip_space(input); if let Ok(rest) = expect(input, "}") { input = rest; return Ok((res, input)); } else { let (item, rest) = T::lex(input)?; res.push(item); input = rest; } } } macro_rules! declare_types { ($(# $attrs:tt)* enum $name:ident $(<$lt:tt>)* { $($(# $vattrs:tt)* $variant:ident ( $ty:ty ) , )* }) => { $(# $attrs)* #[repr(u8)] pub enum $name $(<$lt>)* { $($(# $vattrs)* $variant($ty),)* } impl $(<$lt>)* GetType for $name $(<$lt>)* { fn get_type(&self) -> Type { match self { $($name::$variant(_) => Type::$variant,)* } } } impl $(<$lt>)* Debug for $name $(<$lt>)* { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { $($name::$variant(inner) => Debug::fmt(inner, f),)* } } } }; ($($(# $attrs:tt)* $name:ident ( $lhs_ty:ty | $rhs_ty:ty | $multi_rhs_ty:ty ) , )*) => { #[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] #[repr(u8)] pub enum Type { $($(# $attrs)* $name,)* } pub trait GetType { fn get_type(&self) -> Type; } impl GetType for Type { fn get_type(&self) -> Type { *self } } declare_types! { #[derive(PartialEq, Eq, Clone, Deserialize)] #[serde(untagged)] enum LhsValue<'a> { $($(# $attrs)* $name($lhs_ty),)* } } $(impl<'a> From<$lhs_ty> for LhsValue<'a> { fn from(value: $lhs_ty) -> Self { LhsValue::$name(value) } })* declare_types! { #[derive(PartialEq, Eq, Clone, Serialize)] #[serde(untagged)] enum RhsValue { $($(# $attrs)* $name($rhs_ty),)* } } impl<'i> LexWith<'i, Type> for RhsValue { fn lex_with(input: &str, ty: Type) -> LexResult<'_, Self> { Ok(match ty { $(Type::$name => { let (value, input) = <$rhs_ty>::lex(input)?; (RhsValue::$name(value), input) })* }) } } impl<'a> PartialOrd<RhsValue> for LhsValue<'a> { fn partial_cmp(&self, other: &RhsValue) -> Option<Ordering> { match (self, other) { $((LhsValue::$name(lhs), RhsValue::$name(rhs)) => { lhs.strict_partial_cmp(rhs) },)* _ => None, } } } impl<'a> StrictPartialOrd<RhsValue> for LhsValue<'a> {} impl<'a> PartialEq<RhsValue> for LhsValue<'a> { fn eq(&self, other: &RhsValue) -> bool { self.strict_partial_cmp(other) == Some(Ordering::Equal) } } declare_types! { #[derive(PartialEq, Eq, Clone, Serialize)] #[serde(untagged)] enum RhsValues { $($(# $attrs)* $name(Vec<$multi_rhs_ty>),)* } } impl<'i> LexWith<'i, Type> for RhsValues { fn lex_with(input: &str, ty: Type) -> LexResult<'_, Self> { Ok(match ty { $(Type::$name => { let (value, input) = lex_rhs_values(input)?; (RhsValues::$name(value), input) })* }) } } }; } impl<'a> From<&'a str> for LhsValue<'a> { fn from(s: &'a str) -> Self { s.as_bytes().into() } } declare_types!( Ip(IpAddr | IpAddr | IpRange), Bytes(&'a [u8] | Bytes | Bytes), Int(i32 | i32 | RangeInclusive<i32>), Bool(bool | UninhabitedBool | UninhabitedBool), ); #[test] fn test_lhs_value_deserialize() { use std::str::FromStr; let ipv4: LhsValue<'_> = serde_json::from_str("\"127.0.0.1\"").unwrap(); assert_eq!(ipv4, LhsValue::Ip(IpAddr::from_str("127.0.0.1").unwrap())); let ipv6: LhsValue<'_> = serde_json::from_str("\"::1\"").unwrap(); assert_eq!(ipv6, LhsValue::Ip(IpAddr::from_str("::1").unwrap())); let bytes: LhsValue<'_> = serde_json::from_str("\"a JSON string with unicode ❤\"").unwrap(); assert_eq!( bytes, LhsValue::Bytes(b"a JSON string with unicode \xE2\x9D\xA4") ); assert!( serde_json::from_str::<LhsValue<'_>>("\"a JSON string with escaped-unicode \\u2764\"") .is_err(), "LhsValue can only handle borrowed bytes" ); let bytes: LhsValue<'_> = serde_json::from_str("\"1337\"").unwrap(); assert_eq!(bytes, LhsValue::Bytes(b"1337")); let integer: LhsValue<'_> = serde_json::from_str("1337").unwrap(); assert_eq!(integer, LhsValue::Int(1337)); let b: LhsValue<'_> = serde_json::from_str("false").unwrap(); assert_eq!(b, LhsValue::Bool(false)); }
use lex::{expect, skip_space, Lex, LexResult, LexWith}; use rhs_types::{Bytes, IpRange, UninhabitedBool}; use serde::{Deserialize, Serialize}; use std::{ cmp::Ordering, fmt::{self, Debug, Formatter}, net::IpAddr, ops::RangeInclusive, }; use strict_partial_ord::StrictPartialOrd; fn lex_rhs_values<'i, T: Lex<'i>>(input: &'i str) -> LexResult<'i, Vec<T>> { let mut input = expect(input, "{")?; let mut res = Vec::new(); loop { input = skip_space(input);
} } macro_rules! declare_types { ($(# $attrs:tt)* enum $name:ident $(<$lt:tt>)* { $($(# $vattrs:tt)* $variant:ident ( $ty:ty ) , )* }) => { $(# $attrs)* #[repr(u8)] pub enum $name $(<$lt>)* { $($(# $vattrs)* $variant($ty),)* } impl $(<$lt>)* GetType for $name $(<$lt>)* { fn get_type(&self) -> Type { match self { $($name::$variant(_) => Type::$variant,)* } } } impl $(<$lt>)* Debug for $name $(<$lt>)* { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { $($name::$variant(inner) => Debug::fmt(inner, f),)* } } } }; ($($(# $attrs:tt)* $name:ident ( $lhs_ty:ty | $rhs_ty:ty | $multi_rhs_ty:ty ) , )*) => { #[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] #[repr(u8)] pub enum Type { $($(# $attrs)* $name,)* } pub trait GetType { fn get_type(&self) -> Type; } impl GetType for Type { fn get_type(&self) -> Type { *self } } declare_types! { #[derive(PartialEq, Eq, Clone, Deserialize)] #[serde(untagged)] enum LhsValue<'a> { $($(# $attrs)* $name($lhs_ty),)* } } $(impl<'a> From<$lhs_ty> for LhsValue<'a> { fn from(value: $lhs_ty) -> Self { LhsValue::$name(value) } })* declare_types! { #[derive(PartialEq, Eq, Clone, Serialize)] #[serde(untagged)] enum RhsValue { $($(# $attrs)* $name($rhs_ty),)* } } impl<'i> LexWith<'i, Type> for RhsValue { fn lex_with(input: &str, ty: Type) -> LexResult<'_, Self> { Ok(match ty { $(Type::$name => { let (value, input) = <$rhs_ty>::lex(input)?; (RhsValue::$name(value), input) })* }) } } impl<'a> PartialOrd<RhsValue> for LhsValue<'a> { fn partial_cmp(&self, other: &RhsValue) -> Option<Ordering> { match (self, other) { $((LhsValue::$name(lhs), RhsValue::$name(rhs)) => { lhs.strict_partial_cmp(rhs) },)* _ => None, } } } impl<'a> StrictPartialOrd<RhsValue> for LhsValue<'a> {} impl<'a> PartialEq<RhsValue> for LhsValue<'a> { fn eq(&self, other: &RhsValue) -> bool { self.strict_partial_cmp(other) == Some(Ordering::Equal) } } declare_types! { #[derive(PartialEq, Eq, Clone, Serialize)] #[serde(untagged)] enum RhsValues { $($(# $attrs)* $name(Vec<$multi_rhs_ty>),)* } } impl<'i> LexWith<'i, Type> for RhsValues { fn lex_with(input: &str, ty: Type) -> LexResult<'_, Self> { Ok(match ty { $(Type::$name => { let (value, input) = lex_rhs_values(input)?; (RhsValues::$name(value), input) })* }) } } }; } impl<'a> From<&'a str> for LhsValue<'a> { fn from(s: &'a str) -> Self { s.as_bytes().into() } } declare_types!( Ip(IpAddr | IpAddr | IpRange), Bytes(&'a [u8] | Bytes | Bytes), Int(i32 | i32 | RangeInclusive<i32>), Bool(bool | UninhabitedBool | UninhabitedBool), ); #[test] fn test_lhs_value_deserialize() { use std::str::FromStr; let ipv4: LhsValue<'_> = serde_json::from_str("\"127.0.0.1\"").unwrap(); assert_eq!(ipv4, LhsValue::Ip(IpAddr::from_str("127.0.0.1").unwrap())); let ipv6: LhsValue<'_> = serde_json::from_str("\"::1\"").unwrap(); assert_eq!(ipv6, LhsValue::Ip(IpAddr::from_str("::1").unwrap())); let bytes: LhsValue<'_> = serde_json::from_str("\"a JSON string with unicode ❤\"").unwrap(); assert_eq!( bytes, LhsValue::Bytes(b"a JSON string with unicode \xE2\x9D\xA4") ); assert!( serde_json::from_str::<LhsValue<'_>>("\"a JSON string with escaped-unicode \\u2764\"") .is_err(), "LhsValue can only handle borrowed bytes" ); let bytes: LhsValue<'_> = serde_json::from_str("\"1337\"").unwrap(); assert_eq!(bytes, LhsValue::Bytes(b"1337")); let integer: LhsValue<'_> = serde_json::from_str("1337").unwrap(); assert_eq!(integer, LhsValue::Int(1337)); let b: LhsValue<'_> = serde_json::from_str("false").unwrap(); assert_eq!(b, LhsValue::Bool(false)); }
if let Ok(rest) = expect(input, "}") { input = rest; return Ok((res, input)); } else { let (item, rest) = T::lex(input)?; res.push(item); input = rest; }
if_condition
[ { "content": "fn lex_digits(input: &str) -> LexResult<'_, &str> {\n\n // Lex any supported digits (up to radix 16) for better error locations.\n\n take_while(input, \"digit\", |c| c.is_digit(16))\n\n}\n\n\n", "file_path": "engine/src/rhs_types/int.rs", "rank": 0, "score": 184976.2941145299 }, { "content": "pub fn take(input: &str, expected: usize) -> LexResult<'_, &str> {\n\n let mut chars = input.chars();\n\n for i in 0..expected {\n\n chars.next().ok_or_else(|| {\n\n (\n\n LexErrorKind::CountMismatch {\n\n name: \"character\",\n\n actual: i,\n\n expected,\n\n },\n\n input,\n\n )\n\n })?;\n\n }\n\n let rest = chars.as_str();\n\n Ok((span(input, rest), rest))\n\n}\n\n\n", "file_path": "engine/src/lex.rs", "rank": 1, "score": 175939.4041827986 }, { "content": "fn match_addr_or_cidr(input: &str) -> LexResult<'_, &str> {\n\n take_while(input, \"IP address character\", |c| match c {\n\n '0'..='9' | 'a'..='f' | 'A'..='F' | ':' | '.' | '/' => true,\n\n _ => false,\n\n })\n\n}\n\n\n", "file_path": "engine/src/rhs_types/ip.rs", "rank": 2, "score": 173145.4763350632 }, { "content": "pub fn skip_space(input: &str) -> &str {\n\n input.trim_start_matches(SPACE_CHARS)\n\n}\n\n\n\n/// This macro generates enum declaration + lexer implementation.\n\n///\n\n/// It works by recursively processing variants one by one, while passing\n\n/// around intermediate state (partial declaration and lexer bodies).\n\nmacro_rules! lex_enum {\n\n // Branch for handling `SomeType => VariantName`.\n\n //\n\n // Creates a newtype variant `VariantName(SomeType)`.\n\n //\n\n // On the parser side, tries to parse `SomeType` and wraps into the variant\n\n // on success.\n\n (@decl $preamble:tt $name:ident $input:ident { $($decl:tt)* } { $($expr:tt)* } {\n\n $ty:ty => $item:ident,\n\n $($rest:tt)*\n\n }) => {\n\n lex_enum!(@decl $preamble $name $input {\n", "file_path": "engine/src/lex.rs", "rank": 3, "score": 170599.38602478488 }, { "content": "pub fn span<'i>(input: &'i str, rest: &'i str) -> &'i str {\n\n &input[..input.len() - rest.len()]\n\n}\n\n\n", "file_path": "engine/src/lex.rs", "rank": 5, "score": 162954.3854636584 }, { "content": "fn hex_byte(input: &str) -> LexResult<'_, u8> {\n\n fixed_byte(input, 2, 16)\n\n}\n\n\n", "file_path": "engine/src/rhs_types/bytes.rs", "rank": 6, "score": 160462.79750794952 }, { "content": "fn oct_byte(input: &str) -> LexResult<'_, u8> {\n\n fixed_byte(input, 3, 8)\n\n}\n\n\n\nlex_enum!(ByteSeparator {\n\n \":\" => Colon,\n\n \"-\" => Dash,\n\n \".\" => Dot,\n\n});\n\n\n\nimpl<'i> Lex<'i> for Bytes {\n\n fn lex(mut input: &str) -> LexResult<'_, Self> {\n\n if let Ok(input) = expect(input, \"\\\"\") {\n\n let full_input = input;\n\n let mut res = String::new();\n\n let mut iter = input.chars();\n\n loop {\n\n match iter\n\n .next()\n\n .ok_or_else(|| (LexErrorKind::MissingEndingQuote, full_input))?\n", "file_path": "engine/src/rhs_types/bytes.rs", "rank": 7, "score": 160462.79750794952 }, { "content": "pub fn expect<'i>(input: &'i str, s: &'static str) -> Result<&'i str, LexError<'i>> {\n\n if input.starts_with(s) {\n\n Ok(&input[s.len()..])\n\n } else {\n\n Err((LexErrorKind::ExpectedLiteral(s), input))\n\n }\n\n}\n\n\n\n// Tabs are harder to format as part of the error message because they have\n\n// a different printable width than other characters, and so become a common\n\n// source of issues in different compilers.\n\n//\n\n// It's not impossible to work around that limitation, but let's not bother\n\n// for now until someone really needs them (tabs vs spaces all the way down...).\n\nconst SPACE_CHARS: &[char] = &[' ', '\\r', '\\n'];\n\n\n", "file_path": "engine/src/lex.rs", "rank": 8, "score": 159593.33380547096 }, { "content": "fn parse_number<'i>((input, rest): (&'i str, &'i str), radix: u32) -> LexResult<'_, i32> {\n\n match i32::from_str_radix(input, radix) {\n\n Ok(res) => Ok((res, rest)),\n\n Err(err) => Err((LexErrorKind::ParseInt { err, radix }, input)),\n\n }\n\n}\n\n\n\nimpl<'i> Lex<'i> for i32 {\n\n fn lex(input: &str) -> LexResult<'_, Self> {\n\n if let Ok(input) = expect(input, \"0x\") {\n\n parse_number(lex_digits(input)?, 16)\n\n } else if input.starts_with('0') {\n\n // not using `expect` because we want to include `0` too\n\n parse_number(lex_digits(input)?, 8)\n\n } else {\n\n let without_neg = match expect(input, \"-\") {\n\n Ok(input) => input,\n\n Err(_) => input,\n\n };\n\n\n", "file_path": "engine/src/rhs_types/int.rs", "rank": 9, "score": 147936.4133737964 }, { "content": "fn fixed_byte(input: &str, digits: usize, radix: u32) -> LexResult<'_, u8> {\n\n let (digits, rest) = take(input, digits)?;\n\n match u8::from_str_radix(digits, radix) {\n\n Ok(b) => Ok((b, rest)),\n\n Err(err) => Err((LexErrorKind::ParseInt { err, radix }, digits)),\n\n }\n\n}\n\n\n", "file_path": "engine/src/rhs_types/bytes.rs", "rank": 10, "score": 138772.6915829843 }, { "content": "fn parse_addr(input: &str) -> Result<IpAddr, LexError<'_>> {\n\n IpAddr::from_str(input).map_err(|err| {\n\n (\n\n LexErrorKind::ParseNetwork(NetworkParseError::AddrParseError(err)),\n\n input,\n\n )\n\n })\n\n}\n\n\n\nimpl<'i> Lex<'i> for IpAddr {\n\n fn lex(input: &str) -> LexResult<'_, Self> {\n\n let (input, rest) = match_addr_or_cidr(input)?;\n\n parse_addr(input).map(|res| (res, rest))\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, Clone, Serialize, Debug)]\n\n#[serde(untagged)]\n\npub enum ExplicitIpRange {\n\n V4(RangeInclusive<Ipv4Addr>),\n", "file_path": "engine/src/rhs_types/ip.rs", "rank": 11, "score": 135694.85099885243 }, { "content": "trait Expr<'s>: Sized + Eq + Debug + for<'i> LexWith<'i, &'s Scheme> + Serialize {\n\n fn uses(&self, field: Field<'s>) -> bool;\n\n fn compile(self) -> CompiledExpr<'s>;\n\n}\n\n\n\n/// A parsed filter AST.\n\n///\n\n/// It's attached to its corresponding [`Scheme`](struct@Scheme) because all\n\n/// parsed fields are represented as indices and are valid only when\n\n/// [`ExecutionContext`](::ExecutionContext) is created from the same scheme.\n\n#[derive(PartialEq, Eq, Serialize, Clone)]\n\n#[serde(transparent)]\n\npub struct FilterAst<'s> {\n\n #[serde(skip)]\n\n scheme: &'s Scheme,\n\n\n\n op: CombinedExpr<'s>,\n\n}\n\n\n\nimpl<'s> Debug for FilterAst<'s> {\n", "file_path": "engine/src/ast/mod.rs", "rank": 12, "score": 103760.22092051001 }, { "content": "fn bench_string_comparisons(c: &mut Criterion) {\n\n FieldBench {\n\n field: \"ip.geoip.country\",\n\n filters: &[\n\n r#\"ip.geoip.country == \"GB\"\"#,\n\n r#\"ip.geoip.country in { \"AT\" \"BE\" \"BG\" \"HR\" \"CY\" \"CZ\" \"DK\" \"EE\" \"FI\" \"FR\" \"DE\" \"GR\" \"HU\" \"IE\" \"IT\" \"LV\" \"LT\" \"LU\" \"MT\" \"NL\" \"PL\" \"PT\" \"RO\" \"SK\" \"SI\" \"ES\" \"SE\" \"GB\" \"GF\" \"GP\" \"MQ\" \"ME\" \"YT\" \"RE\" \"MF\" \"GI\" \"AX\" \"PM\" \"GL\" \"BL\" \"SX\" \"AW\" \"CW\" \"WF\" \"PF\" \"NC\" \"TF\" \"AI\" \"BM\" \"IO\" \"VG\" \"KY\" \"FK\" \"MS\" \"PN\" \"SH\" \"GS\" \"TC\" \"AD\" \"LI\" \"MC\" \"SM\" \"VA\" \"JE\" \"GG\" \"GI\" \"CH\" }\"#,\n\n ],\n\n values: &[\"GB\", \"T1\"],\n\n }.run(c)\n\n}\n\n\n", "file_path": "engine/benches/bench.rs", "rank": 13, "score": 88261.08435656843 }, { "content": "fn bench_int_comparisons(c: &mut Criterion) {\n\n FieldBench {\n\n field: \"tcp.port\",\n\n filters: &[\n\n \"tcp.port == 80\",\n\n \"tcp.port >= 1024\",\n\n \"tcp.port in { 80 8080 8880 2052 2082 2086 2095 }\",\n\n ],\n\n values: &[80, 8081],\n\n }\n\n .run(c)\n\n}\n\n\n", "file_path": "engine/benches/bench.rs", "rank": 14, "score": 88261.08435656843 }, { "content": "fn bench_string_matches(c: &mut Criterion) {\n\n FieldBench {\n\n field: \"http.user_agent\",\n\n filters: &[\n\n r#\"http.user_agent ~ \"(?i)googlebot/\\d+\\.\\d+\"\"#,\n\n r#\"http.user_agent ~ \"Googlebot\"\"#,\n\n r#\"http.user_agent contains \"Googlebot\"\"#\n\n ],\n\n values: &[\n\n \"Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko; compatible; Googlebot/2.1; +http://www.google.com/bot.html) Safari/537.36\",\n\n \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36\"\n\n ]\n\n }.run(c)\n\n}\n\n\n\ncriterion_group! {\n\n name = field_benchmarks;\n\n config = Criterion::default();\n\n targets =\n\n bench_ip_comparisons,\n\n bench_int_comparisons,\n\n bench_string_comparisons,\n\n bench_string_matches,\n\n}\n\n\n\ncriterion_main!(field_benchmarks);\n", "file_path": "engine/benches/bench.rs", "rank": 15, "score": 88261.08435656843 }, { "content": "fn bench_ip_comparisons(c: &mut Criterion) {\n\n FieldBench {\n\n field: \"ip.addr\",\n\n filters: &[\n\n \"ip.addr == 173.245.48.1\",\n\n \"ip.addr == 2606:4700:4700::1111\",\n\n \"ip.addr >= 173.245.48.0 && ip.addr < 173.245.49.0\",\n\n \"ip.addr >= 2606:4700:: && ip.addr < 2606:4701::\",\n\n \"ip.addr in { 103.21.244.0/22 2405:8100::/32 104.16.0.0/12 2803:f800::/32 131.0.72.0/22 173.245.48.0/20 2405:b500::/32 172.64.0.0/13 190.93.240.0/20 103.22.200.0/22 2606:4700::/32 198.41.128.0/17 197.234.240.0/22 162.158.0.0/15 108.162.192.0/18 2c0f:f248::/32 2400:cb00::/32 103.31.4.0/22 2a06:98c0::/29 141.101.64.0/18 188.114.96.0/20 }\"\n\n ],\n\n values: &[\n\n IpAddr::from([127, 0, 0, 1]),\n\n IpAddr::from([0x2001, 0x0db8, 0x85a3, 0x0000, 0x0000, 0x8a2e, 0x0370, 0x7334]),\n\n IpAddr::from([173, 245, 48, 1]),\n\n IpAddr::from([0x2606, 0x4700, 0x4700, 0x0000, 0x0000, 0x0000, 0x0000, 0x1111]),\n\n ]\n\n }.run(c)\n\n}\n\n\n", "file_path": "engine/benches/bench.rs", "rank": 16, "score": 88261.08435656843 }, { "content": "pub fn complete<T>(res: LexResult<'_, T>) -> Result<T, LexError<'_>> {\n\n let (res, input) = res?;\n\n if input.is_empty() {\n\n Ok(res)\n\n } else {\n\n Err((LexErrorKind::EOF, input))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmacro_rules! assert_ok {\n\n ($s:expr, $res:expr, $rest:expr) => {{\n\n let expr = $s.unwrap();\n\n assert_eq!(expr, ($res, $rest));\n\n expr.0\n\n }};\n\n\n\n ($s:expr, $res:expr) => {\n\n assert_ok!($s, $res, \"\")\n\n };\n", "file_path": "engine/src/lex.rs", "rank": 17, "score": 82302.68504805892 }, { "content": "pub fn take_while<'i, F: Fn(char) -> bool>(\n\n input: &'i str,\n\n name: &'static str,\n\n f: F,\n\n) -> LexResult<'i, &'i str> {\n\n let mut iter = input.chars();\n\n loop {\n\n let rest = iter.as_str();\n\n match iter.next() {\n\n Some(c) if f(c) => {}\n\n _ => {\n\n return if rest.len() != input.len() {\n\n Ok((span(input, rest), rest))\n\n } else {\n\n Err((LexErrorKind::ExpectedName(name), input))\n\n };\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "engine/src/lex.rs", "rank": 18, "score": 77085.62932928359 }, { "content": "#[allow(clippy::needless_pass_by_value)]\n\nfn into_js_error(err: impl std::error::Error) -> JsValue {\n\n js_sys::Error::new(&err.to_string()).into()\n\n}\n\n\n\n#[wasm_bindgen]\n\nimpl Scheme {\n\n #[wasm_bindgen(constructor)]\n\n pub fn try_from(fields: &JsValue) -> Result<Scheme, JsValue> {\n\n fields.into_serde().map(Scheme).map_err(into_js_error)\n\n }\n\n\n\n pub fn parse(&self, s: &str) -> Result<JsValue, JsValue> {\n\n let filter = self.0.parse(s).map_err(into_js_error)?;\n\n JsValue::from_serde(&filter).map_err(into_js_error)\n\n }\n\n}\n", "file_path": "wasm/src/lib.rs", "rank": 19, "score": 76923.48239341736 }, { "content": "fn serialize_op_rhs<T: Serialize, S: Serializer>(\n\n op: &'static str,\n\n rhs: &T,\n\n ser: S,\n\n) -> Result<S::Ok, S::Error> {\n\n use serde::ser::SerializeStruct;\n\n\n\n let mut out = ser.serialize_struct(\"FieldOp\", 2)?;\n\n out.serialize_field(\"op\", op)?;\n\n out.serialize_field(\"rhs\", rhs)?;\n\n out.end()\n\n}\n\n\n", "file_path": "engine/src/ast/field_expr.rs", "rank": 20, "score": 75321.71912049808 }, { "content": "#[test]\n\nfn test_lex() {\n\n fn addr<A: Into<IpAddr>>(addr: A) -> IpRange {\n\n IpRange::Cidr(IpCidr::new_host(addr.into()))\n\n }\n\n\n\n fn range<A: Into<IpAddr>>(range: RangeInclusive<A>) -> IpRange {\n\n let (first, last) = range.into_inner();\n\n match (first.into(), last.into()) {\n\n (IpAddr::V4(first), IpAddr::V4(last)) => {\n\n IpRange::Explicit(ExplicitIpRange::V4(first..=last))\n\n }\n\n (IpAddr::V6(first), IpAddr::V6(last)) => {\n\n IpRange::Explicit(ExplicitIpRange::V6(first..=last))\n\n }\n\n _ => panic!(\"Invalid inputs\"),\n\n }\n\n }\n\n\n\n fn cidr<A: Into<IpAddr>>(addr: A, len: u8) -> IpRange {\n\n IpRange::Cidr(IpCidr::new(addr.into(), len).unwrap())\n", "file_path": "engine/src/rhs_types/ip.rs", "rank": 21, "score": 72841.96810310514 }, { "content": "fn serialize_is_true<S: Serializer>(ser: S) -> Result<S::Ok, S::Error> {\n\n use serde::ser::SerializeStruct;\n\n\n\n let mut out = ser.serialize_struct(\"FieldOp\", 1)?;\n\n out.serialize_field(\"op\", \"IsTrue\")?;\n\n out.end()\n\n}\n\n\n", "file_path": "engine/src/ast/field_expr.rs", "rank": 22, "score": 62770.02439707256 }, { "content": "fn serialize_contains<S: Serializer>(rhs: &Bytes, ser: S) -> Result<S::Ok, S::Error> {\n\n serialize_op_rhs(\"Contains\", rhs, ser)\n\n}\n\n\n", "file_path": "engine/src/ast/field_expr.rs", "rank": 23, "score": 59818.84096205355 }, { "content": "fn serialize_matches<S: Serializer>(rhs: &Regex, ser: S) -> Result<S::Ok, S::Error> {\n\n serialize_op_rhs(\"Matches\", rhs, ser)\n\n}\n\n\n", "file_path": "engine/src/ast/field_expr.rs", "rank": 24, "score": 59818.84096205355 }, { "content": "fn serialize_one_of<S: Serializer>(rhs: &RhsValues, ser: S) -> Result<S::Ok, S::Error> {\n\n serialize_op_rhs(\"OneOf\", rhs, ser)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Serialize)]\n\npub struct FieldExpr<'s> {\n\n field: Field<'s>,\n\n\n\n #[serde(flatten)]\n\n op: FieldOp,\n\n}\n\n\n\nimpl<'i, 's> LexWith<'i, &'s Scheme> for FieldExpr<'s> {\n\n fn lex_with(input: &'i str, scheme: &'s Scheme) -> LexResult<'i, Self> {\n\n let initial_input = input;\n\n\n\n let (field, input) = Field::lex_with(input, scheme)?;\n\n let field_type = field.get_type();\n\n\n\n let (op, input) = if field_type == Type::Bool {\n", "file_path": "engine/src/ast/field_expr.rs", "rank": 25, "score": 58793.05190549428 }, { "content": "pub trait Lex<'i>: Sized {\n\n fn lex(input: &'i str) -> LexResult<'i, Self>;\n\n}\n\n\n", "file_path": "engine/src/lex.rs", "rank": 26, "score": 58413.430141427474 }, { "content": "pub trait LexWith<'i, E>: Sized {\n\n fn lex_with(input: &'i str, extra: E) -> LexResult<'i, Self>;\n\n}\n\n\n\nimpl<'i, T: Lex<'i>, E> LexWith<'i, E> for T {\n\n fn lex_with(input: &'i str, _extra: E) -> LexResult<'i, Self> {\n\n Self::lex(input)\n\n }\n\n}\n\n\n", "file_path": "engine/src/lex.rs", "rank": 27, "score": 56350.38567420839 }, { "content": "fn main() {\n\n let filter = args()\n\n .nth(1)\n\n .expect(\"Expected an input as a command-line argument\");\n\n\n\n let scheme = Scheme! {\n\n ip: Ip,\n\n str: Bytes,\n\n int: Int,\n\n bool: Bool,\n\n };\n\n\n\n match scheme.parse(&filter) {\n\n Ok(res) => println!(\"{:#?}\", res),\n\n Err(err) => println!(\"{}\", err),\n\n }\n\n}\n", "file_path": "engine/examples/cli.rs", "rank": 28, "score": 47124.06571350734 }, { "content": "#[test]\n\nfn test_field() {\n\n let scheme = &Scheme! {\n\n x: Bytes,\n\n x.y.z0: Int,\n\n is_TCP: Bool,\n\n };\n\n\n\n assert_ok!(\n\n Field::lex_with(\"x;\", scheme),\n\n scheme.get_field_index(\"x\").unwrap(),\n\n \";\"\n\n );\n\n\n\n assert_ok!(\n\n Field::lex_with(\"x.y.z0-\", scheme),\n\n scheme.get_field_index(\"x.y.z0\").unwrap(),\n\n \"-\"\n\n );\n\n\n\n assert_ok!(\n", "file_path": "engine/src/scheme.rs", "rank": 29, "score": 45993.93634581908 }, { "content": "#[test]\n\nfn test() {\n\n use std::str::FromStr;\n\n\n\n assert_ok!(i32::lex(\"0\"), 0i32, \"\");\n\n assert_ok!(i32::lex(\"0-\"), 0i32, \"-\");\n\n assert_ok!(i32::lex(\"0x1f5+\"), 501i32, \"+\");\n\n assert_ok!(i32::lex(\"0123;\"), 83i32, \";\");\n\n assert_ok!(i32::lex(\"78!\"), 78i32, \"!\");\n\n assert_ok!(i32::lex(\"0xefg\"), 239i32, \"g\");\n\n assert_ok!(i32::lex(\"-12-\"), -12i32, \"-\");\n\n assert_err!(\n\n i32::lex(\"-2147483649!\"),\n\n LexErrorKind::ParseInt {\n\n err: i32::from_str(\"-2147483649\").unwrap_err(),\n\n radix: 10\n\n },\n\n \"-2147483649\"\n\n );\n\n assert_err!(\n\n i32::lex(\"2147483648!\"),\n", "file_path": "engine/src/rhs_types/int.rs", "rank": 30, "score": 44941.658834066984 }, { "content": "#[test]\n\nfn test() {\n\n use super::field_expr::FieldExpr;\n\n use execution_context::ExecutionContext;\n\n use lex::complete;\n\n\n\n let scheme = &Scheme! {\n\n t: Bool,\n\n f: Bool,\n\n };\n\n\n\n let ctx = &mut ExecutionContext::new(scheme);\n\n\n\n let t_expr = CombinedExpr::Simple(SimpleExpr::Field(\n\n complete(FieldExpr::lex_with(\"t\", scheme)).unwrap(),\n\n ));\n\n\n\n let t_expr = || t_expr.clone();\n\n\n\n let f_expr = CombinedExpr::Simple(SimpleExpr::Field(\n\n complete(FieldExpr::lex_with(\"f\", scheme)).unwrap(),\n", "file_path": "engine/src/ast/combined_expr.rs", "rank": 31, "score": 44941.658834066984 }, { "content": "#[test]\n\nfn test_parse_error() {\n\n use indoc::indoc;\n\n\n\n let scheme = &Scheme! { num: Int };\n\n\n\n {\n\n let err = scheme.parse(\"xyz\").unwrap_err();\n\n assert_eq!(\n\n err,\n\n ParseError {\n\n kind: LexErrorKind::UnknownField(UnknownFieldError),\n\n input: \"xyz\",\n\n line_number: 0,\n\n span_start: 0,\n\n span_len: 3\n\n }\n\n );\n\n assert_eq!(\n\n err.to_string(),\n\n indoc!(\n", "file_path": "engine/src/scheme.rs", "rank": 32, "score": 44941.658834066984 }, { "content": "#[test]\n\nfn test() {\n\n assert_ok!(\n\n Bytes::lex(\"01:2e:f3-77.12;\"),\n\n Bytes::from(vec![0x01, 0x2E, 0xF3, 0x77, 0x12]),\n\n \";\"\n\n );\n\n\n\n assert_ok!(\n\n Bytes::lex(r#\"\"s\\\\t\\\"r\\x0A\\000t\"\"#),\n\n Bytes::from(\"s\\\\t\\\"r\\n\\0t\".to_owned())\n\n );\n\n\n\n assert_err!(\n\n Bytes::lex(\"01:4x;\"),\n\n LexErrorKind::ParseInt {\n\n err: u8::from_str_radix(\"4x\", 16).unwrap_err(),\n\n radix: 16,\n\n },\n\n \"4x\"\n\n );\n", "file_path": "engine/src/rhs_types/bytes.rs", "rank": 33, "score": 44941.658834066984 }, { "content": "#[test]\n\nfn test() {\n\n use execution_context::ExecutionContext;\n\n use lex::complete;\n\n\n\n let scheme = &Scheme! { t: Bool };\n\n\n\n let ctx = &mut ExecutionContext::new(scheme);\n\n ctx.set_field_value(\"t\", true).unwrap();\n\n\n\n let t_expr = SimpleExpr::Field(complete(FieldExpr::lex_with(\"t\", scheme)).unwrap());\n\n let t_expr = || t_expr.clone();\n\n\n\n {\n\n let expr = assert_ok!(SimpleExpr::lex_with(\"t\", scheme), t_expr());\n\n\n\n assert_json!(\n\n expr,\n\n {\n\n \"field\": \"t\",\n\n \"op\": \"IsTrue\"\n", "file_path": "engine/src/ast/simple_expr.rs", "rank": 34, "score": 44941.658834066984 }, { "content": "#[test]\n\nfn test_field_type_override() {\n\n let mut scheme = Scheme! { foo: Int };\n\n\n\n assert_eq!(\n\n scheme.add_field(\"foo\".into(), Type::Bytes),\n\n Err(FieldRedefinitionError(\"foo\".into()))\n\n )\n\n}\n", "file_path": "engine/src/scheme.rs", "rank": 35, "score": 43959.456499728796 }, { "content": "#[test]\n\nfn test() {\n\n let expr = assert_ok!(\n\n Regex::lex(r#\"\"[a-z\"\\]]+\\d{1,10}\\\"\";\"#),\n\n Regex::from_str(r#\"[a-z\"\\]]+\\d{1,10}\"\"#).unwrap(),\n\n \";\"\n\n );\n\n\n\n assert_json!(expr, r#\"[a-z\"\\]]+\\d{1,10}\"\"#);\n\n\n\n assert_err!(\n\n Regex::lex(r#\"\"abcd\\\"#),\n\n LexErrorKind::MissingEndingQuote,\n\n \"abcd\\\\\"\n\n );\n\n}\n", "file_path": "engine/src/rhs_types/regex/mod.rs", "rank": 36, "score": 43959.456499728796 }, { "content": "#[test]\n\n#[should_panic(expected = \"attempt to redefine field foo\")]\n\nfn test_static_field_type_override() {\n\n Scheme! { foo: Int, foo: Int };\n\n}\n\n\n", "file_path": "engine/src/scheme.rs", "rank": 38, "score": 43040.5550455182 }, { "content": "#[test]\n\nfn test_strict_partial_ord() {\n\n let ips = &[\n\n IpAddr::from([10, 0, 0, 0]),\n\n IpAddr::from([127, 0, 0, 1]),\n\n IpAddr::from([0, 0, 0, 0, 0, 0, 0, 1]),\n\n IpAddr::from([0, 0, 0, 0, 0, 0, 0, 2]),\n\n ];\n\n\n\n for lhs in ips {\n\n for rhs in ips {\n\n if lhs.is_ipv4() == rhs.is_ipv4() {\n\n assert_eq!(lhs.strict_partial_cmp(rhs), lhs.partial_cmp(rhs));\n\n } else {\n\n assert_eq!(lhs.strict_partial_cmp(rhs), None);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "engine/src/rhs_types/ip.rs", "rank": 39, "score": 42179.026093776105 }, { "content": "#[test]\n\nfn test_field_value_type_mismatch() {\n\n let scheme = Scheme! { foo: Int };\n\n\n\n let mut ctx = ExecutionContext::new(&scheme);\n\n\n\n assert_eq!(\n\n ctx.set_field_value(\"foo\", LhsValue::Bool(false)),\n\n Err(FieldValueTypeMismatchError {\n\n field_type: Type::Int,\n\n value_type: Type::Bool\n\n })\n\n );\n\n}\n", "file_path": "engine/src/execution_context.rs", "rank": 40, "score": 42179.026093776105 }, { "content": "use cidr::NetworkParseError;\n\nuse failure::Fail;\n\nuse rhs_types::RegexError;\n\nuse scheme::UnknownFieldError;\n\nuse std::num::ParseIntError;\n\nuse types::Type;\n\n\n\n#[derive(Debug, PartialEq, Fail)]\n\npub enum LexErrorKind {\n\n #[fail(display = \"expected {}\", _0)]\n\n ExpectedName(&'static str),\n\n\n\n #[fail(display = \"expected literal {:?}\", _0)]\n\n ExpectedLiteral(&'static str),\n\n\n\n #[fail(display = \"{} while parsing with radix {}\", err, radix)]\n\n ParseInt {\n\n #[cause]\n\n err: ParseIntError,\n\n radix: u32,\n", "file_path": "engine/src/lex.rs", "rank": 41, "score": 33332.49017032207 }, { "content": "\n\n #[fail(display = \"{}\", _0)]\n\n UnknownField(#[cause] UnknownFieldError),\n\n\n\n #[fail(display = \"cannot use this operation type {:?}\", field_type)]\n\n UnsupportedOp { field_type: Type },\n\n\n\n #[fail(display = \"incompatible range bounds\")]\n\n IncompatibleRangeBounds,\n\n\n\n #[fail(display = \"unrecognised input\")]\n\n EOF,\n\n}\n\n\n\npub type LexError<'i> = (LexErrorKind, &'i str);\n\n\n\npub type LexResult<'i, T> = Result<(T, &'i str), LexError<'i>>;\n\n\n", "file_path": "engine/src/lex.rs", "rank": 42, "score": 33331.239215580856 }, { "content": " }) => {\n\n lex_enum!(@decl $preamble $name $input {\n\n $($decl)*\n\n $item $(= $value)*,\n\n } {\n\n $($expr)*\n\n $(if let Ok($input) = $crate::lex::expect($input, $s) {\n\n return Ok(($name::$item, $input));\n\n })+\n\n } { $($rest)* });\n\n };\n\n\n\n // Internal finish point for declaration + lexer generation.\n\n //\n\n // This is invoked when no more variants are left to process.\n\n // At this point declaration and lexer body are considered complete.\n\n (@decl { $($preamble:tt)* } $name:ident $input:ident $decl:tt { $($expr:stmt)* } {}) => {\n\n #[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize)]\n\n $($preamble)*\n\n pub enum $name $decl\n", "file_path": "engine/src/lex.rs", "rank": 43, "score": 33330.88086814959 }, { "content": "\n\n impl<'i> $crate::lex::Lex<'i> for $name {\n\n fn lex($input: &'i str) -> $crate::lex::LexResult<'_, Self> {\n\n $($expr)*\n\n Err((\n\n $crate::lex::LexErrorKind::ExpectedName(stringify!($name)),\n\n $input\n\n ))\n\n }\n\n }\n\n };\n\n\n\n // The public entry point to the macro.\n\n ($(# $attrs:tt)* $name:ident $items:tt) => {\n\n lex_enum!(@decl {\n\n $(# $attrs)*\n\n } $name input {} {} $items);\n\n };\n\n}\n\n\n", "file_path": "engine/src/lex.rs", "rank": 44, "score": 33330.501358503076 }, { "content": " $($decl)*\n\n $item($ty),\n\n } {\n\n $($expr)*\n\n if let Ok((res, $input)) = $crate::lex::Lex::lex($input) {\n\n return Ok(($name::$item(res), $input));\n\n }\n\n } { $($rest)* });\n\n };\n\n\n\n // Branch for handling `\"some_string\" | \"other_string\" => VariantName`.\n\n // (also supports optional constant value via `... => VariantName = 42`)\n\n //\n\n // Creates a unit variant `VariantName`.\n\n //\n\n // On the parser side, tries to parse either of the given string values,\n\n // and returns the variant if any of them succeeded.\n\n (@decl $preamble:tt $name:ident $input:ident { $($decl:tt)* } { $($expr:tt)* } {\n\n $($s:tt)|+ => $item:ident $(= $value:expr)*,\n\n $($rest:tt)*\n", "file_path": "engine/src/lex.rs", "rank": 45, "score": 33327.02137458014 }, { "content": " },\n\n\n\n #[fail(display = \"{}\", _0)]\n\n ParseNetwork(#[cause] NetworkParseError),\n\n\n\n #[fail(display = \"{}\", _0)]\n\n ParseRegex(#[cause] RegexError),\n\n\n\n #[fail(display = \"expected \\\", xHH or OOO after \\\\\")]\n\n InvalidCharacterEscape,\n\n\n\n #[fail(display = \"could not find an ending quote\")]\n\n MissingEndingQuote,\n\n\n\n #[fail(display = \"expected {} {}s, but found {}\", expected, name, actual)]\n\n CountMismatch {\n\n name: &'static str,\n\n actual: usize,\n\n expected: usize,\n\n },\n", "file_path": "engine/src/lex.rs", "rank": 46, "score": 33322.48760916161 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmacro_rules! assert_err {\n\n ($s:expr, $kind:expr, $span:expr) => {\n\n assert_eq!($s, Err(($kind, $span)))\n\n };\n\n}\n\n\n\n#[cfg(test)]\n\nmacro_rules! assert_json {\n\n ($expr:expr, $json:tt) => {\n\n assert_eq!(\n\n ::serde_json::to_value(&$expr).unwrap(),\n\n ::serde_json::json!($json)\n\n );\n\n };\n\n}\n", "file_path": "engine/src/lex.rs", "rank": 47, "score": 33320.373384820086 }, { "content": "fn unwrap_json_result<T>(filter_ast: &FilterAst<'_>, result: serde_json::Result<T>) -> T {\n\n // Filter serialisation must never fail.\n\n result.unwrap_or_else(|err| panic!(\"{} while serializing filter {:#?}\", err, filter_ast))\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn wirefilter_get_filter_hash(filter_ast: &FilterAst<'_>) -> u64 {\n\n let mut hasher = FnvHasher::default();\n\n // Serialize JSON to our Write-compatible wrapper around FnvHasher,\n\n // effectively calculating a hash for our filter in a streaming fashion\n\n // that is as stable as the JSON representation itself\n\n // (instead of relying on #[derive(Hash)] which would be tied to impl details).\n\n let result = serde_json::to_writer(HasherWrite(&mut hasher), filter_ast);\n\n unwrap_json_result(filter_ast, result);\n\n hasher.finish()\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn wirefilter_serialize_filter_to_json(\n\n filter_ast: &FilterAst<'_>,\n", "file_path": "ffi/src/lib.rs", "rank": 48, "score": 30298.17645577532 }, { "content": "use super::{ExternPtrRepr, ExternSliceRepr};\n\nuse std::str;\n\n\n\n/// This structure provides FFI-safe representation for Rust string slice\n\n/// pointers.\n\n///\n\n/// The representation is guaranteed to be the same as [`ExternSliceRepr`]\n\n/// for regular bytes ([`u8`]), but adds extra conversion checks for UTF-8.\n\n///\n\n/// Example C definition:\n\n/// ```\n\n/// struct Str {\n\n/// const uint8_t *data;\n\n/// size_t length;\n\n/// };\n\n/// ```\n\n#[repr(transparent)]\n\n#[derive(Clone, Copy)]\n\npub struct ExternStrRepr(ExternSliceRepr<u8>);\n\n\n", "file_path": "ffi/src/transfer_types/raw_ptr_repr/str.rs", "rank": 49, "score": 28222.91397195327 }, { "content": "impl From<*mut str> for ExternStrRepr {\n\n #[allow(clippy::not_unsafe_ptr_arg_deref)]\n\n fn from(ptr: *mut str) -> Self {\n\n let bytes: *mut [u8] = unsafe { (*ptr).as_bytes_mut() };\n\n ExternStrRepr(bytes.into())\n\n }\n\n}\n\n\n\nimpl ExternPtrRepr for str {\n\n type Repr = ExternStrRepr;\n\n\n\n unsafe fn from_extern_repr_unchecked(repr: Self::Repr) -> *mut str {\n\n let bytes = ExternPtrRepr::from_extern_repr_unchecked(repr.0);\n\n str::from_utf8_unchecked_mut(&mut *bytes)\n\n }\n\n\n\n fn from_extern_repr(repr: Self::Repr) -> *mut str {\n\n let bytes = ExternPtrRepr::from_extern_repr(repr.0);\n\n // Make sure that strings coming via FFI are UTF-8 compatible.\n\n str::from_utf8_mut(unsafe { &mut *bytes }).unwrap()\n\n }\n\n}\n", "file_path": "ffi/src/transfer_types/raw_ptr_repr/str.rs", "rank": 50, "score": 28222.81035424664 }, { "content": "impl<'s> Serialize for Field<'s> {\n\n fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {\n\n self.name().serialize(ser)\n\n }\n\n}\n\n\n\nimpl<'s> Debug for Field<'s> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.name())\n\n }\n\n}\n\n\n\nimpl<'i, 's> LexWith<'i, &'s Scheme> for Field<'s> {\n\n fn lex_with(mut input: &'i str, scheme: &'s Scheme) -> LexResult<'i, Self> {\n\n let initial_input = input;\n\n\n\n loop {\n\n input = take_while(input, \"identifier character\", |c| {\n\n c.is_ascii_alphanumeric() || c == '_'\n\n })?\n", "file_path": "engine/src/scheme.rs", "rank": 51, "score": 22.739758563089417 }, { "content": " self.as_str() == other.as_str()\n\n }\n\n}\n\n\n\nimpl Eq for Regex {}\n\n\n\nimpl Debug for Regex {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n f.write_str(self.as_str())\n\n }\n\n}\n\n\n\nimpl<'i> Lex<'i> for Regex {\n\n fn lex(input: &str) -> LexResult<'_, Self> {\n\n let input = expect(input, \"\\\"\")?;\n\n let mut regex_buf = String::new();\n\n let mut in_char_class = false;\n\n let (regex_str, input) = {\n\n let mut iter = input.chars();\n\n loop {\n", "file_path": "engine/src/rhs_types/regex/mod.rs", "rank": 52, "score": 22.220664372355714 }, { "content": "use cfg_if::cfg_if;\n\nuse lex::{expect, span, Lex, LexErrorKind, LexResult};\n\nuse serde::{Serialize, Serializer};\n\nuse std::{\n\n fmt::{self, Debug, Formatter},\n\n str::FromStr,\n\n};\n\n\n\ncfg_if! {\n\n if #[cfg(feature = \"regex\")] {\n\n mod imp_real;\n\n pub use self::imp_real::*;\n\n } else {\n\n mod imp_stub;\n\n pub use self::imp_stub::*;\n\n }\n\n}\n\n\n\nimpl PartialEq for Regex {\n\n fn eq(&self, other: &Regex) -> bool {\n", "file_path": "engine/src/rhs_types/regex/mod.rs", "rank": 53, "score": 20.290089732779133 }, { "content": "use lex::{expect, take, Lex, LexErrorKind, LexResult};\n\nuse serde::Serialize;\n\nuse std::{\n\n borrow::Borrow,\n\n fmt::{self, Debug, Formatter},\n\n hash::{Hash, Hasher},\n\n ops::Deref,\n\n str,\n\n};\n\nuse strict_partial_ord::StrictPartialOrd;\n\n\n\n#[derive(PartialEq, Eq, Clone, Serialize)]\n\n#[serde(untagged)]\n\npub enum Bytes {\n\n Str(Box<str>),\n\n Raw(Box<[u8]>),\n\n}\n\n\n\n// We need custom `Hash` consistent with `Borrow` invariants.\n\n// We can get away with `Eq` invariant though because we do want\n", "file_path": "engine/src/rhs_types/bytes.rs", "rank": 54, "score": 18.861654694135982 }, { "content": "use std::fmt;\n\nuse failure::Fail;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug, PartialEq, Fail)]\n\npub enum Error {}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match *self {}\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Regex(String);\n\n\n\nimpl FromStr for Regex {\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Error> {\n", "file_path": "engine/src/rhs_types/regex/imp_stub.rs", "rank": 55, "score": 17.60109968604162 }, { "content": "use ast::FilterAst;\n\nuse failure::Fail;\n\nuse fnv::FnvBuildHasher;\n\nuse indexmap::map::{Entry, IndexMap};\n\nuse lex::{complete, expect, span, take_while, LexErrorKind, LexResult, LexWith};\n\nuse serde::{Deserialize, Serialize, Serializer};\n\nuse std::{\n\n cmp::{max, min},\n\n error::Error,\n\n fmt::{self, Debug, Display, Formatter},\n\n ptr,\n\n};\n\nuse types::{GetType, Type};\n\n\n\n#[derive(PartialEq, Eq, Clone, Copy)]\n\npub(crate) struct Field<'s> {\n\n scheme: &'s Scheme,\n\n index: usize,\n\n}\n\n\n", "file_path": "engine/src/scheme.rs", "rank": 57, "score": 16.208989798201017 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.op.fmt(f)\n\n }\n\n}\n\n\n\nimpl<'i, 's> LexWith<'i, &'s Scheme> for FilterAst<'s> {\n\n fn lex_with(input: &'i str, scheme: &'s Scheme) -> LexResult<'i, Self> {\n\n let (op, input) = CombinedExpr::lex_with(input, scheme)?;\n\n Ok((FilterAst { scheme, op }, input))\n\n }\n\n}\n\n\n\nimpl<'s> FilterAst<'s> {\n\n /// Recursively checks whether a [`FilterAst`] uses a given field name.\n\n ///\n\n /// This is useful to lazily initialise expensive fields only if necessary.\n\n pub fn uses(&self, field_name: &str) -> Result<bool, UnknownFieldError> {\n\n self.scheme\n\n .get_field_index(field_name)\n\n .map(|field| self.op.uses(field))\n\n }\n\n\n\n /// Compiles a [`FilterAst`] into a [`Filter`].\n\n pub fn compile(self) -> Filter<'s> {\n\n Filter::new(self.op.compile(), self.scheme)\n\n }\n\n}\n", "file_path": "engine/src/ast/mod.rs", "rank": 58, "score": 15.307466324799751 }, { "content": "mod combined_expr;\n\nmod field_expr;\n\nmod simple_expr;\n\n\n\nuse self::combined_expr::CombinedExpr;\n\nuse filter::{CompiledExpr, Filter};\n\nuse lex::{LexResult, LexWith};\n\nuse scheme::{Field, Scheme, UnknownFieldError};\n\nuse serde::Serialize;\n\nuse std::fmt::{self, Debug};\n\n\n", "file_path": "engine/src/ast/mod.rs", "rank": 59, "score": 15.055890668444004 }, { "content": "use cidr::{Cidr, IpCidr, Ipv4Cidr, Ipv6Cidr, NetworkParseError};\n\nuse lex::{take_while, Lex, LexError, LexErrorKind, LexResult};\n\nuse serde::Serialize;\n\nuse std::{\n\n cmp::Ordering,\n\n net::{IpAddr, Ipv4Addr, Ipv6Addr},\n\n ops::RangeInclusive,\n\n str::FromStr,\n\n};\n\nuse strict_partial_ord::StrictPartialOrd;\n\n\n", "file_path": "engine/src/rhs_types/ip.rs", "rank": 60, "score": 15.049710684039098 }, { "content": " _ => {\n\n return Err((\n\n LexErrorKind::InvalidCharacterEscape,\n\n &input[..c.len_utf8()],\n\n ));\n\n }\n\n });\n\n }\n\n '\"' => return Ok((res.into(), iter.as_str())),\n\n c => res.push(c),\n\n };\n\n }\n\n } else {\n\n let mut res = Vec::new();\n\n loop {\n\n let (b, rest) = hex_byte(input)?;\n\n res.push(b);\n\n input = rest;\n\n if let Ok((_, rest)) = ByteSeparator::lex(input) {\n\n input = rest;\n", "file_path": "engine/src/rhs_types/bytes.rs", "rank": 61, "score": 14.519254634798783 }, { "content": "\n\nimpl<'s> CombinedExpr<'s> {\n\n fn lex_combining_op(input: &str) -> (Option<CombiningOp>, &str) {\n\n match CombiningOp::lex(skip_space(input)) {\n\n Ok((op, input)) => (Some(op), skip_space(input)),\n\n Err(_) => (None, input),\n\n }\n\n }\n\n\n\n fn lex_more_with_precedence<'i>(\n\n self,\n\n scheme: &'s Scheme,\n\n min_prec: Option<CombiningOp>,\n\n mut lookahead: (Option<CombiningOp>, &'i str),\n\n ) -> LexResult<'i, Self> {\n\n let mut lhs = self;\n\n\n\n while let Some(op) = lookahead.0 {\n\n let mut rhs = SimpleExpr::lex_with(lookahead.1, scheme)\n\n .map(|(op, input)| (CombinedExpr::Simple(op), input))?;\n", "file_path": "engine/src/ast/combined_expr.rs", "rank": 62, "score": 13.985088915080864 }, { "content": "use lex::{Lex, LexResult};\n\nuse serde::Serialize;\n\nuse std::{borrow::Borrow, cmp::Ordering};\n\nuse strict_partial_ord::StrictPartialOrd;\n\n\n\n/// [Uninhabited / empty type](https://doc.rust-lang.org/nomicon/exotic-sizes.html#empty-types)\n\n/// for `bool` with traits we need for RHS values.\n\n#[derive(Debug, PartialEq, Eq, Clone, Hash, Serialize)]\n\npub enum UninhabitedBool {}\n\n\n\nimpl Borrow<bool> for UninhabitedBool {\n\n fn borrow(&self) -> &bool {\n\n match *self {}\n\n }\n\n}\n\n\n\nimpl PartialEq<UninhabitedBool> for bool {\n\n fn eq(&self, other: &UninhabitedBool) -> bool {\n\n match *other {}\n\n }\n", "file_path": "engine/src/rhs_types/bool.rs", "rank": 63, "score": 13.022325675209263 }, { "content": "use super::{simple_expr::SimpleExpr, CompiledExpr, Expr};\n\nuse lex::{skip_space, Lex, LexResult, LexWith};\n\nuse scheme::{Field, Scheme};\n\nuse serde::Serialize;\n\n\n\nlex_enum!(#[derive(PartialOrd, Ord)] CombiningOp {\n\n \"or\" | \"||\" => Or,\n\n \"xor\" | \"^^\" => Xor,\n\n \"and\" | \"&&\" => And,\n\n});\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Serialize)]\n\n#[serde(untagged)]\n\npub enum CombinedExpr<'s> {\n\n Simple(SimpleExpr<'s>),\n\n Combining {\n\n op: CombiningOp,\n\n items: Vec<CombinedExpr<'s>>,\n\n },\n\n}\n", "file_path": "engine/src/ast/combined_expr.rs", "rank": 64, "score": 12.290207064979448 }, { "content": "use super::{combined_expr::CombinedExpr, field_expr::FieldExpr, CompiledExpr, Expr};\n\nuse lex::{expect, skip_space, Lex, LexResult, LexWith};\n\nuse scheme::{Field, Scheme};\n\nuse serde::Serialize;\n\n\n\nlex_enum!(UnaryOp {\n\n \"not\" | \"!\" => Not,\n\n});\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Serialize)]\n\n#[serde(untagged)]\n\npub enum SimpleExpr<'s> {\n\n Field(FieldExpr<'s>),\n\n Parenthesized(Box<CombinedExpr<'s>>),\n\n Unary {\n\n op: UnaryOp,\n\n arg: Box<SimpleExpr<'s>>,\n\n },\n\n}\n\n\n", "file_path": "engine/src/ast/simple_expr.rs", "rank": 65, "score": 12.10417228831787 }, { "content": "use super::{CompiledExpr, Expr};\n\nuse fnv::FnvBuildHasher;\n\nuse heap_searcher::HeapSearcher;\n\nuse indexmap::IndexSet;\n\nuse lex::{skip_space, span, Lex, LexErrorKind, LexResult, LexWith};\n\nuse memmem::Searcher;\n\nuse range_set::RangeSet;\n\nuse rhs_types::{Bytes, ExplicitIpRange, Regex};\n\nuse scheme::{Field, Scheme};\n\nuse serde::{Serialize, Serializer};\n\nuse std::{cmp::Ordering, net::IpAddr};\n\nuse strict_partial_ord::StrictPartialOrd;\n\nuse types::{GetType, LhsValue, RhsValue, RhsValues, Type};\n\n\n\nconst LESS: u8 = 0b001;\n\nconst GREATER: u8 = 0b010;\n\nconst EQUAL: u8 = 0b100;\n\n\n\nlex_enum!(#[repr(u8)] OrderingOp {\n\n \"eq\" | \"==\" => Equal = EQUAL,\n", "file_path": "engine/src/ast/field_expr.rs", "rank": 66, "score": 11.580996182936047 }, { "content": " V6(RangeInclusive<Ipv6Addr>),\n\n}\n\n\n\n#[derive(PartialEq, Eq, Clone, Serialize, Debug)]\n\n#[serde(untagged)]\n\npub enum IpRange {\n\n Explicit(ExplicitIpRange),\n\n Cidr(IpCidr),\n\n}\n\n\n\nimpl<'i> Lex<'i> for IpRange {\n\n fn lex(input: &str) -> LexResult<'_, Self> {\n\n let (chunk, rest) = match_addr_or_cidr(input)?;\n\n\n\n // check for \"..\" before trying to lex an address\n\n let range = if let Some(split_pos) = chunk.find(\"..\") {\n\n let first = parse_addr(&chunk[..split_pos])?;\n\n let last = parse_addr(&chunk[split_pos + \"..\".len()..])?;\n\n\n\n IpRange::Explicit(match (first, last) {\n", "file_path": "engine/src/rhs_types/ip.rs", "rank": 67, "score": 11.51351987349307 }, { "content": " ']' if in_char_class => {\n\n in_char_class = false;\n\n regex_buf.push(']');\n\n }\n\n c => {\n\n regex_buf.push(c);\n\n }\n\n };\n\n }\n\n };\n\n match Regex::from_str(&regex_buf) {\n\n Ok(regex) => Ok((regex, input)),\n\n Err(err) => Err((LexErrorKind::ParseRegex(err), regex_str)),\n\n }\n\n }\n\n}\n\n\n\nimpl Serialize for Regex {\n\n fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {\n\n self.as_str().serialize(ser)\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "engine/src/rhs_types/regex/mod.rs", "rank": 68, "score": 11.45761371861143 }, { "content": "\n\nimpl<'i> Error for ParseError<'i> {}\n\n\n\nimpl<'i> ParseError<'i> {\n\n pub(crate) fn new(mut input: &'i str, (kind, span): (LexErrorKind, &'i str)) -> Self {\n\n let mut span_start = span.as_ptr() as usize - input.as_ptr() as usize;\n\n\n\n let (line_number, line_start) = input[..span_start]\n\n .match_indices('\\n')\n\n .map(|(pos, _)| pos + 1)\n\n .scan(0, |line_number, line_start| {\n\n *line_number += 1;\n\n Some((*line_number, line_start))\n\n })\n\n .last()\n\n .unwrap_or_default();\n\n\n\n input = &input[line_start..];\n\n\n\n span_start -= line_start;\n", "file_path": "engine/src/scheme.rs", "rank": 70, "score": 11.028459724049627 }, { "content": "use lex::{expect, span, take_while, Lex, LexErrorKind, LexResult};\n\nuse std::ops::RangeInclusive;\n\nuse strict_partial_ord::StrictPartialOrd;\n\n\n", "file_path": "engine/src/rhs_types/int.rs", "rank": 71, "score": 10.895283045597253 }, { "content": "use std::alloc::System;\n\n\n\n// Most of our usage will be via FFI as a dynamic library, so we're interested\n\n// in performance with system allocator and not jemalloc.\n\n#[global_allocator]\n\nstatic A: System = System;\n\n\n\nextern crate criterion;\n\nextern crate wirefilter;\n\n\n\nuse criterion::{\n\n criterion_group, criterion_main, Bencher, Benchmark, Criterion, ParameterizedBenchmark,\n\n};\n\nuse std::{fmt::Debug, net::IpAddr};\n\nuse wirefilter::{ExecutionContext, GetType, LhsValue, Scheme};\n\n\n", "file_path": "engine/benches/bench.rs", "rank": 72, "score": 10.849952569494253 }, { "content": "\n\nimpl From<Bytes> for Box<[u8]> {\n\n fn from(bytes: Bytes) -> Self {\n\n match bytes {\n\n Bytes::Str(s) => s.into_boxed_bytes(),\n\n Bytes::Raw(b) => b,\n\n }\n\n }\n\n}\n\n\n\nimpl Debug for Bytes {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Bytes::Str(s) => s.fmt(f),\n\n Bytes::Raw(b) => {\n\n for (i, b) in b.iter().cloned().enumerate() {\n\n if i != 0 {\n\n write!(f, \":\")?;\n\n }\n\n write!(f, \"{:02X}\", b)?;\n", "file_path": "engine/src/rhs_types/bytes.rs", "rank": 73, "score": 10.695678643866891 }, { "content": " let (_, rest) = lex_digits(without_neg)?;\n\n\n\n parse_number((span(input, rest), rest), 10)\n\n }\n\n }\n\n}\n\n\n\nimpl<'i> Lex<'i> for RangeInclusive<i32> {\n\n fn lex(input: &str) -> LexResult<'_, Self> {\n\n let initial_input = input;\n\n let (first, input) = i32::lex(input)?;\n\n let (last, input) = if let Ok(input) = expect(input, \"..\") {\n\n i32::lex(input)?\n\n } else {\n\n (first, input)\n\n };\n\n if last < first {\n\n return Err((\n\n LexErrorKind::IncompatibleRangeBounds,\n\n span(initial_input, input),\n\n ));\n\n }\n\n Ok((first..=last, input))\n\n }\n\n}\n\n\n\nimpl StrictPartialOrd for i32 {}\n\n\n", "file_path": "engine/src/rhs_types/int.rs", "rank": 74, "score": 10.331178518624867 }, { "content": " drop(s);\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn wirefilter_parse_filter<'s, 'i>(\n\n scheme: &'s Scheme,\n\n input: ExternallyAllocatedStr<'i>,\n\n) -> ParsingResult<'s> {\n\n match scheme.parse(input.into_ref()) {\n\n Ok(filter) => ParsingResult::from(filter),\n\n Err(err) => ParsingResult::from(err),\n\n }\n\n}\n\n\n\n/// Wrapper for Hasher that allows using Write API (e.g. with serializer).\n\n#[derive(Default)]\n", "file_path": "ffi/src/lib.rs", "rank": 75, "score": 10.168287781577332 }, { "content": "impl<'i, 's> LexWith<'i, &'s Scheme> for SimpleExpr<'s> {\n\n fn lex_with(input: &'i str, scheme: &'s Scheme) -> LexResult<'i, Self> {\n\n Ok(if let Ok(input) = expect(input, \"(\") {\n\n let input = skip_space(input);\n\n let (op, input) = CombinedExpr::lex_with(input, scheme)?;\n\n let input = skip_space(input);\n\n let input = expect(input, \")\")?;\n\n (SimpleExpr::Parenthesized(Box::new(op)), input)\n\n } else if let Ok((op, input)) = UnaryOp::lex(input) {\n\n let input = skip_space(input);\n\n let (arg, input) = SimpleExpr::lex_with(input, scheme)?;\n\n (\n\n SimpleExpr::Unary {\n\n op,\n\n arg: Box::new(arg),\n\n },\n\n input,\n\n )\n\n } else {\n\n let (op, input) = FieldExpr::lex_with(input, scheme)?;\n", "file_path": "engine/src/ast/simple_expr.rs", "rank": 76, "score": 10.1127520911776 }, { "content": " {\n\n '\\\\' => {\n\n let input = iter.as_str();\n\n\n\n let c = iter\n\n .next()\n\n .ok_or_else(|| (LexErrorKind::MissingEndingQuote, full_input))?;\n\n\n\n res.push(match c {\n\n '\"' | '\\\\' => c,\n\n 'x' => {\n\n let (b, input) = hex_byte(iter.as_str())?;\n\n iter = input.chars();\n\n b as char\n\n }\n\n '0'..='7' => {\n\n let (b, input) = oct_byte(input)?;\n\n iter = input.chars();\n\n b as char\n\n }\n", "file_path": "engine/src/rhs_types/bytes.rs", "rank": 77, "score": 10.013854440348531 }, { "content": " ExternallyAllocatedStr::from(\"num1\"),\n\n 42,\n\n );\n\n\n\n wirefilter_add_int_value_to_execution_context(\n\n &mut exec_context,\n\n ExternallyAllocatedStr::from(\"num2\"),\n\n 1337,\n\n );\n\n\n\n exec_context\n\n }\n\n\n\n fn parse_filter<'s>(scheme: &'s Scheme, input: &'static str) -> ParsingResult<'s> {\n\n wirefilter_parse_filter(scheme, ExternallyAllocatedStr::from(input))\n\n }\n\n\n\n fn match_filter(\n\n input: &'static str,\n\n scheme: &Scheme,\n", "file_path": "ffi/src/lib.rs", "rank": 78, "score": 9.814869563253833 }, { "content": " op,\n\n items: vec![lhs, rhs.0],\n\n };\n\n }\n\n }\n\n\n\n if lookahead.0 < min_prec {\n\n // pretend we haven't seen an operator if its precedence is\n\n // outside of our limits\n\n lookahead = (None, rhs.1);\n\n }\n\n }\n\n\n\n Ok((lhs, lookahead.1))\n\n }\n\n}\n\n\n\nimpl<'i, 's> LexWith<'i, &'s Scheme> for CombinedExpr<'s> {\n\n fn lex_with(input: &'i str, scheme: &'s Scheme) -> LexResult<'i, Self> {\n\n let (lhs, input) = SimpleExpr::lex_with(input, scheme)?;\n", "file_path": "engine/src/ast/combined_expr.rs", "rank": 79, "score": 9.634532941251187 }, { "content": "#[derive(Debug, PartialEq, Fail)]\n\n#[fail(display = \"unknown field\")]\n\npub struct UnknownFieldError;\n\n\n\n/// An error that occurs when previously defined field gets redefined.\n\n#[derive(Debug, PartialEq, Fail)]\n\n#[fail(display = \"attempt to redefine field {}\", _0)]\n\npub struct FieldRedefinitionError(String);\n\n\n\n/// An opaque filter parsing error associated with the original input.\n\n///\n\n/// For now, you can just print it in a debug or a human-readable fashion.\n\n#[derive(Debug, PartialEq)]\n\npub struct ParseError<'i> {\n\n kind: LexErrorKind,\n\n input: &'i str,\n\n line_number: usize,\n\n span_start: usize,\n\n span_len: usize,\n\n}\n", "file_path": "engine/src/scheme.rs", "rank": 80, "score": 9.548293588677325 }, { "content": "use std::str::FromStr;\n\n\n\npub use regex::Error;\n\n\n\n#[derive(Clone)]\n\npub struct Regex(regex::bytes::Regex);\n\n\n\nimpl FromStr for Regex {\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Error> {\n\n ::regex::bytes::RegexBuilder::new(s)\n\n .unicode(false)\n\n .build()\n\n .map(Regex)\n\n }\n\n}\n\n\n\nimpl Regex {\n\n pub fn is_match(&self, text: &[u8]) -> bool {\n\n self.0.is_match(text)\n\n }\n\n\n\n pub fn as_str(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n", "file_path": "engine/src/rhs_types/regex/imp_real.rs", "rank": 81, "score": 9.432224044801089 }, { "content": " let mut span_len = span.len();\n\n\n\n if let Some(line_end) = input.find('\\n') {\n\n input = &input[..line_end];\n\n span_len = min(span_len, line_end - span_start);\n\n }\n\n\n\n ParseError {\n\n kind,\n\n input,\n\n line_number,\n\n span_start,\n\n span_len,\n\n }\n\n }\n\n}\n\n\n\nimpl<'i> Display for ParseError<'i> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n writeln!(\n", "file_path": "engine/src/scheme.rs", "rank": 82, "score": 9.397730586761272 }, { "content": " use rhs_types::IpRange;\n\n use std::net::IpAddr;\n\n\n\n lazy_static! {\n\n static ref SCHEME: Scheme = Scheme! {\n\n http.host: Bytes,\n\n ip.addr: Ip,\n\n ssl: Bool,\n\n tcp.port: Int,\n\n };\n\n }\n\n\n\n fn field(name: &'static str) -> Field<'static> {\n\n SCHEME.get_field_index(name).unwrap()\n\n }\n\n\n\n #[test]\n\n fn test_is_true() {\n\n let expr = assert_ok!(\n\n FieldExpr::lex_with(\"ssl\", &SCHEME),\n", "file_path": "engine/src/ast/field_expr.rs", "rank": 83, "score": 9.106336360456845 }, { "content": " .1;\n\n\n\n match expect(input, \".\") {\n\n Ok(rest) => input = rest,\n\n Err(_) => break,\n\n };\n\n }\n\n\n\n let name = span(initial_input, input);\n\n\n\n let field = scheme\n\n .get_field_index(name)\n\n .map_err(|err| (LexErrorKind::UnknownField(err), name))?;\n\n\n\n Ok((field, input))\n\n }\n\n}\n\n\n\nimpl<'s> Field<'s> {\n\n pub fn name(&self) -> &'s str {\n", "file_path": "engine/src/scheme.rs", "rank": 85, "score": 8.75607097275675 }, { "content": "}\n\n\n\nimpl PartialOrd<UninhabitedBool> for bool {\n\n fn partial_cmp(&self, other: &UninhabitedBool) -> Option<Ordering> {\n\n match *other {}\n\n }\n\n}\n\n\n\nimpl StrictPartialOrd<UninhabitedBool> for bool {}\n\n\n\nimpl<'i> Lex<'i> for UninhabitedBool {\n\n fn lex(_input: &str) -> LexResult<'_, Self> {\n\n unreachable!()\n\n }\n\n}\n", "file_path": "engine/src/rhs_types/bool.rs", "rank": 86, "score": 8.733453755354713 }, { "content": "\n\n assert!(re.is_match(version.into_ref()));\n\n }\n\n\n\n #[test]\n\n fn filter_uses() {\n\n let scheme = create_scheme();\n\n\n\n {\n\n let filter = parse_filter(\n\n &scheme,\n\n r#\"num1 > 41 && num2 == 1337 && ip1 != 192.168.0.1 && str2 ~ \"yo\\d+\"\"#,\n\n )\n\n .unwrap();\n\n\n\n assert!(wirefilter_filter_uses(\n\n &filter,\n\n ExternallyAllocatedStr::from(\"num1\")\n\n ));\n\n\n", "file_path": "ffi/src/lib.rs", "rank": 87, "score": 8.621461123201446 }, { "content": " let before_char = iter.as_str();\n\n match iter\n\n .next()\n\n .ok_or_else(|| (LexErrorKind::MissingEndingQuote, input))?\n\n {\n\n '\\\\' => {\n\n if let Some(c) = iter.next() {\n\n if in_char_class || c != '\"' {\n\n regex_buf.push('\\\\');\n\n }\n\n regex_buf.push(c);\n\n }\n\n }\n\n '\"' if !in_char_class => {\n\n break (span(input, before_char), iter.as_str());\n\n }\n\n '[' if !in_char_class => {\n\n in_char_class = true;\n\n regex_buf.push('[');\n\n }\n", "file_path": "engine/src/rhs_types/regex/mod.rs", "rank": 88, "score": 8.483060275868079 }, { "content": "}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn wirefilter_get_version() -> StaticRustAllocatedString {\n\n StaticRustAllocatedString::from(VERSION)\n\n}\n\n\n\n#[cfg(test)]\n\nmod ffi_test {\n\n use super::*;\n\n use regex::Regex;\n\n\n\n fn create_scheme() -> RustBox<Scheme> {\n\n let mut scheme = wirefilter_create_scheme();\n\n\n\n wirefilter_add_type_field_to_scheme(\n\n &mut scheme,\n\n ExternallyAllocatedStr::from(\"ip1\"),\n\n Type::Ip,\n\n );\n", "file_path": "ffi/src/lib.rs", "rank": 89, "score": 8.01632862130547 }, { "content": " Some(ordering) => self.matches(ordering),\n\n // only `!=` should be true for incomparable types\n\n None => self == OrderingOp::NotEqual,\n\n }\n\n }\n\n}\n\n\n\nlex_enum!(IntOp {\n\n \"&\" | \"bitwise_and\" => BitwiseAnd,\n\n});\n\n\n\nlex_enum!(BytesOp {\n\n \"contains\" => Contains,\n\n \"~\" | \"matches\" => Matches,\n\n});\n\n\n\nlex_enum!(ComparisonOp {\n\n \"in\" => In,\n\n OrderingOp => Ordering,\n\n IntOp => Int,\n\n BytesOp => Bytes,\n\n});\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Serialize)]\n\n#[serde(untagged)]\n", "file_path": "engine/src/ast/field_expr.rs", "rank": 90, "score": 7.936023558075213 }, { "content": "\n\n loop {\n\n lookahead = Self::lex_combining_op(rhs.1);\n\n if lookahead.0 <= Some(op) {\n\n break;\n\n }\n\n rhs = rhs\n\n .0\n\n .lex_more_with_precedence(scheme, lookahead.0, lookahead)?;\n\n }\n\n\n\n match lhs {\n\n CombinedExpr::Combining {\n\n op: lhs_op,\n\n ref mut items,\n\n } if lhs_op == op => {\n\n items.push(rhs.0);\n\n }\n\n _ => {\n\n lhs = CombinedExpr::Combining {\n", "file_path": "engine/src/ast/combined_expr.rs", "rank": 91, "score": 7.918928105872222 }, { "content": " (FieldOp::IsTrue, input)\n\n } else {\n\n let (op, input) = ComparisonOp::lex(skip_space(input))?;\n\n\n\n let input_after_op = input;\n\n\n\n let input = skip_space(input);\n\n\n\n match (field_type, op) {\n\n (_, ComparisonOp::In) => {\n\n let (rhs, input) = RhsValues::lex_with(input, field_type)?;\n\n (FieldOp::OneOf(rhs), input)\n\n }\n\n (_, ComparisonOp::Ordering(op)) => {\n\n let (rhs, input) = RhsValue::lex_with(input, field_type)?;\n\n (FieldOp::Ordering { op, rhs }, input)\n\n }\n\n (Type::Int, ComparisonOp::Int(op)) => {\n\n let (rhs, input) = i32::lex(input)?;\n\n (FieldOp::Int { op, rhs }, input)\n", "file_path": "engine/src/ast/field_expr.rs", "rank": 92, "score": 7.870027807350461 }, { "content": "use super::ExternPtrRepr;\n\nuse libc::size_t;\n\nuse std::slice;\n\n\n\n/// This structure provides FFI-safe representation for Rust slice pointers\n\n/// by splitting them into data and length parts.\n\n///\n\n/// Example C definition for `ExternSliceRepr<u8>`:\n\n/// ```\n\n/// struct ByteSlice {\n\n/// const uint8_t *data;\n\n/// size_t length;\n\n/// };\n\n/// ```\n\n#[repr(C)]\n\npub struct ExternSliceRepr<T> {\n\n data: *mut T,\n\n length: size_t,\n\n}\n\n\n", "file_path": "ffi/src/transfer_types/raw_ptr_repr/slice.rs", "rank": 93, "score": 7.785183624151369 }, { "content": " }\n\n (Type::Bytes, ComparisonOp::Bytes(op)) => match op {\n\n BytesOp::Contains => {\n\n let (bytes, input) = Bytes::lex(input)?;\n\n (FieldOp::Contains(bytes), input)\n\n }\n\n BytesOp::Matches => {\n\n let (regex, input) = Regex::lex(input)?;\n\n (FieldOp::Matches(regex), input)\n\n }\n\n },\n\n _ => {\n\n return Err((\n\n LexErrorKind::UnsupportedOp { field_type },\n\n span(initial_input, input_after_op),\n\n ));\n\n }\n\n }\n\n };\n\n\n", "file_path": "engine/src/ast/field_expr.rs", "rank": 94, "score": 7.755722414656348 }, { "content": " let lookahead = Self::lex_combining_op(input);\n\n CombinedExpr::Simple(lhs).lex_more_with_precedence(scheme, None, lookahead)\n\n }\n\n}\n\n\n\nimpl<'s> Expr<'s> for CombinedExpr<'s> {\n\n fn uses(&self, field: Field<'s>) -> bool {\n\n match self {\n\n CombinedExpr::Simple(op) => op.uses(field),\n\n CombinedExpr::Combining { items, .. } => items.iter().any(|op| op.uses(field)),\n\n }\n\n }\n\n\n\n fn compile(self) -> CompiledExpr<'s> {\n\n match self {\n\n CombinedExpr::Simple(op) => op.compile(),\n\n CombinedExpr::Combining { op, items } => {\n\n let items = items\n\n .into_iter()\n\n .map(|item| item.compile())\n", "file_path": "engine/src/ast/combined_expr.rs", "rank": 95, "score": 7.7391618269210625 }, { "content": " assert!(wirefilter_filter_uses(\n\n &filter,\n\n ExternallyAllocatedStr::from(\"ip1\")\n\n ));\n\n\n\n assert!(wirefilter_filter_uses(\n\n &filter,\n\n ExternallyAllocatedStr::from(\"str2\")\n\n ));\n\n\n\n assert!(!wirefilter_filter_uses(\n\n &filter,\n\n ExternallyAllocatedStr::from(\"str1\")\n\n ));\n\n\n\n assert!(!wirefilter_filter_uses(\n\n &filter,\n\n ExternallyAllocatedStr::from(\"ip2\")\n\n ));\n\n\n\n wirefilter_free_parsed_filter(filter);\n\n }\n\n\n\n wirefilter_free_scheme(scheme);\n\n }\n\n}\n", "file_path": "ffi/src/lib.rs", "rank": 96, "score": 7.548741501634594 }, { "content": " Some((index, ..)) => Ok(Field {\n\n scheme: self,\n\n index,\n\n }),\n\n None => Err(UnknownFieldError),\n\n }\n\n }\n\n\n\n pub(crate) fn get_field_count(&self) -> usize {\n\n self.fields.len()\n\n }\n\n\n\n /// Parses a filter into an AST form.\n\n pub fn parse<'i>(&'s self, input: &'i str) -> Result<FilterAst<'s>, ParseError<'i>> {\n\n complete(FilterAst::lex_with(input.trim(), self)).map_err(|err| ParseError::new(input, err))\n\n }\n\n}\n\n\n\n/// A convenience macro for constructing a [`Scheme`](struct@Scheme) with static\n\n/// contents.\n", "file_path": "engine/src/scheme.rs", "rank": 97, "score": 7.504822190122187 }, { "content": "extern crate wirefilter;\n\n\n\nuse std::env::args;\n\nuse wirefilter::Scheme;\n\n\n", "file_path": "engine/examples/cli.rs", "rank": 98, "score": 7.423652439772343 }, { "content": " Bytes::lex(r#\"\"\\01😢\"\"#),\n\n LexErrorKind::ParseInt {\n\n err: u8::from_str_radix(\"01😢\", 8).unwrap_err(),\n\n radix: 8,\n\n },\n\n \"01😢\"\n\n );\n\n\n\n assert_err!(\n\n Bytes::lex(r#\"\"\\x3😢\"\"#),\n\n LexErrorKind::ParseInt {\n\n err: u8::from_str_radix(\"3😢\", 16).unwrap_err(),\n\n radix: 16,\n\n },\n\n \"3😢\"\n\n );\n\n\n\n assert_err!(\n\n Bytes::lex(\"12:3😢\"),\n\n LexErrorKind::ParseInt {\n\n err: u8::from_str_radix(\"3😢\", 16).unwrap_err(),\n\n radix: 16,\n\n },\n\n \"3😢\"\n\n );\n\n}\n", "file_path": "engine/src/rhs_types/bytes.rs", "rank": 99, "score": 7.406381839017225 } ]
Rust
src/main.rs
jmccrae/simple-rust-web
88ac2fc0c0d5ca91c05d8b3543a9cc00a0e43bcd
extern crate clap; extern crate iron; extern crate handlebars; extern crate params; extern crate percent_encoding; #[macro_use] extern crate serde_derive; extern crate serde; extern crate serde_json; mod renderer; mod templates; use clap::{Arg, App}; use handlebars::Handlebars; use iron::middleware::Handler; use iron::prelude::*; use params::Params; use percent_encoding::percent_decode; use renderer::*; use serde::Serialize; use templates::*; use std::collections::HashMap; use std::str::from_utf8; struct Server { hbars : Handlebars, renderers : Vec<(&'static str, Box<Renderer>)> } impl Server { fn new() -> Server { let mut hb = Handlebars::new(); hb.register_template_string("layout", from_utf8(LAYOUT).expect("Layout is not valid utf-8")). expect("Layout is not valid template"); for &(name, template) in TEMPLATES.iter() { hb.register_template_string(name, from_utf8(template).expect( &format!("{} is not valid UTF-8", name))). expect(&format!("{} is not a valid template", name)); } Server { hbars : hb, renderers : Vec::new() } } #[allow(dead_code)] fn add_static(&mut self, path : &'static str, name : &str, content : &'static [u8]) { self.renderers.push((path, Box::new(StaticRenderer::new(name.to_string(), content)))) } #[allow(dead_code)] fn add_renderer(&mut self, path: &'static str, renderer : Box<Renderer>) { self.renderers.push((path, renderer)); } #[allow(dead_code)] fn add_translator<A>(&mut self, path: &'static str, name: &str, template : &'static str, translator : Box<Translator<A>>) where A : Serialize + 'static { self.renderers.push((path, Box::new(TranslatorRenderer(name.to_string(), template.to_string(), translator)))) } } impl Handler for Server { fn handle(&self, req : &mut Request) -> IronResult<Response> { for &(ref path, ref renderer) in self.renderers.iter() { match match_path(path, &req.url.path()) { Some(args) => { let depth = req.url.path().len(); let params = req.get_ref::<Params>().unwrap(); return renderer.render(args, params, &self.hbars, depth); }, None => {} } } return render_error(&self.hbars, from_utf8(templates::NOT_FOUND). expect("Invalid UTF-8 in 404").to_string(), iron::status::NotFound); } } fn match_path(pattern : &str, path : &Vec<&str>) -> Option<HashMap<String,String>> { let mut captures = HashMap::new(); let p : Vec<&str> = pattern.split("/").collect(); if p.len() != path.len() { None } else { for (p1,p2) in p.iter().zip(path.iter()) { if p1.starts_with(":") { captures.insert(p1[1..].to_string(), urldecode(p2)); } else if p1 != p2 { return None; } } Some(captures) } } fn urldecode(s : &str) -> String { percent_decode(s.as_bytes()).decode_utf8().unwrap().into_owned() } fn main() { let matches = App::new(APP_TITLE). version(VERSION). author(AUTHOR). about(ABOUT). arg(Arg::with_name("port") .short("p") .long("port") .value_name("PORT") .help("The port to run the server on") .takes_value(true)). get_matches(); let port : u16 = matches.value_of("port").and_then(|pstr| { pstr.parse::<u16>().ok() }).unwrap_or(3000); let mut server = Server::new(); init(&mut server); let iron = Iron::new(server); iron.http(("localhost",port)).unwrap(); } static APP_TITLE : &'static str = "My App"; static VERSION : &'static str = "0.1"; static AUTHOR : &'static str = "John McCrae <john@mccr.ae>"; static ABOUT : &'static str = "Simple framework for making Rust Webapps"; struct TestTranslator; impl Translator<String> for TestTranslator { fn convert(&self, v : HashMap<String, String>) -> Result<String,TranslatorError> { Ok(v["test"].clone()) } } fn init(server : &mut Server) { server.add_static("", "Index", INDEX); server.add_translator("foo", "bar", "template", Box::new(TestTranslator)); }
extern crate clap; extern crate iron; extern crate handlebars; extern crate params; extern crate percent_encoding; #[macro_use] extern crate serde_derive; extern crate serde; extern crate serde_json; mod renderer; mod templates; use clap::{Arg, App}; use handlebars::Handlebars; use iron::middleware::Handler; use iron::prelude::*; use params::Params; use percent_encoding::percent_decode; use renderer::*; use serde::Serialize; use templates::*; use std::collections::HashMap; use std::str::from_utf8; struct Server { hbars : Handlebars, renderers : Vec<(&'static str, Box<Renderer>)> } impl Server { fn new() -> Server { let mut hb = Handlebars::new(); hb.register_template_string("layout", from_utf8(LAYOUT).expect("Layout is not valid utf-8")). expect("Layout is not valid template"); for &(name, template) in TEMPLATES.iter() { hb.register_template_string(name, from_utf8(template).expect( &format!("{} is not valid UTF-8", name))). expect(&format!("{} is not a valid template", name)); } Server { hbars : hb, renderers : Vec::new() } } #[allow(dead_code)] fn add_static(&mut self, path : &'static str, name : &str, content : &'static [u8]) { self.renderers.push((path, Box::new(StaticRenderer::new(name.to_string(), content)))) } #[allow(dead_code)] fn add_renderer(&mut self, path: &'static str, renderer : Box<Renderer>) { self.renderers.push((path, renderer)); } #[allow(dead_code)] fn add_translator<A>(&mut self, path: &'static str, name: &str, template : &'static str, translator : Box<Translator<A>>) where A : Serialize + 'static { self.renderers.push((path, Box::new(TranslatorRenderer(name.to_string(), template.to_string(), translator)))) } } impl Handler for Server { fn handle(&self, req : &mut Request) -> IronResult<Response> { for &(ref path, ref renderer) in self.renderers.iter() { match match_path(path, &req.url.path()) { Some(args) => { let depth = req.url.path().len(); let params = req.get_ref::<Params>().unwrap(); return renderer.render(args, params, &self.hbars, depth); }, None => {} } } return render_error(&self.hbars, from_utf8(templates::NOT_FOUND). expect("Invalid UTF-8 in 404").to_string(), iron::status::NotFound); } } fn match_path(pattern : &str, path : &Vec<&str>) -> Option<HashMap<String,String>> { let mut captures = HashMap::new(); let p : Vec<&str> = pattern.split("/").collect(); if p.len() != path.len() { None } else { for (p1,p2) in p.iter().zip(path.iter()) { if p1.starts_with(":") { captures.insert(p1[1..].to_string(), urldecode(p2)); } else if p1 != p2 { return None; } } Some(captures) } } fn urldecode(s : &str) -> String { percent_decode(s.as_bytes()).decode_utf8().unwrap().into_owned() } fn main() { let matches = App::new(APP_TITLE). version(VERSION). author(AUTHOR). about(ABOUT). arg(Arg::with_name("port") .short("p") .long("port") .value_name("PORT") .help("The port to run the server on") .takes_value(true)). get_matches(); let port : u16 = matches.value_of("port").and_then(|pstr| { pstr.parse::<u16>().ok() }).unwrap_or(3000);
static APP_TITLE : &'static str = "My App"; static VERSION : &'static str = "0.1"; static AUTHOR : &'static str = "John McCrae <john@mccr.ae>"; static ABOUT : &'static str = "Simple framework for making Rust Webapps"; struct TestTranslator; impl Translator<String> for TestTranslator { fn convert(&self, v : HashMap<String, String>) -> Result<String,TranslatorError> { Ok(v["test"].clone()) } } fn init(server : &mut Server) { server.add_static("", "Index", INDEX); server.add_translator("foo", "bar", "template", Box::new(TestTranslator)); }
let mut server = Server::new(); init(&mut server); let iron = Iron::new(server); iron.http(("localhost",port)).unwrap(); }
function_block-function_prefix_line
[ { "content": "/// Render using `layout.hbs`\n\npub fn render_ok(hbars : &Handlebars, title : String, body : String) -> IronResult<Response> {\n\n Ok(Response::with(\n\n (iron::status::Ok,\n\n Header(ContentType::html()),\n\n hbars.render(\"layout\", &LayoutPage {\n\n title : title,\n\n body : body\n\n }).expect(\"Could not render layout\"))))\n\n}\n\n\n", "file_path": "src/renderer.rs", "rank": 3, "score": 86208.43512414781 }, { "content": "/// Render an error using `layout.hbs`\n\npub fn render_error(hbars : &Handlebars, body : String, error : iron::status::Status) -> IronResult<Response> {\n\n Ok(Response::with(\n\n (error,\n\n Header(ContentType::html()),\n\n hbars.render(\"layout\", &LayoutPage {\n\n title : ::APP_TITLE.to_string(),\n\n body : body\n\n }).expect(\"Could not render error page\"))))\n\n}\n\n\n\n\n", "file_path": "src/renderer.rs", "rank": 4, "score": 84540.1277485104 }, { "content": "#[derive(Debug,Clone,Serialize)]\n\nstruct LayoutPage {\n\n title : String,\n\n body : String\n\n}\n\n\n", "file_path": "src/renderer.rs", "rank": 8, "score": 44289.78406231613 }, { "content": "/// A translator that can convert query arguments into a serializable object\n\npub trait Translator<A : Serialize> : Sync + Send {\n\n fn convert(&self, HashMap<String, String>) -> Result<A,TranslatorError>;\n\n}\n\n\n\nimpl<A : Serialize> Renderer for Box<Translator<A>> {\n\n fn render(&self, args : HashMap<String, String>, _ : &Map, _ : &Handlebars, _:usize) -> IronResult<Response> {\n\n match self.convert(args) {\n\n Ok(data) => {\n\n match serde_json::to_string(&data) {\n\n Ok(s) => {\n\n Ok(Response::with((\n\n iron::status::Ok,\n\n Header(ContentType::json()), s)))\n\n },\n\n Err(e) => {\n\n Ok(Response::with((\n\n iron::status::InternalServerError,\n\n Header(ContentType::plaintext()), e.description())))\n\n }\n\n }\n", "file_path": "src/renderer.rs", "rank": 9, "score": 38670.734461183565 }, { "content": "#[allow(dead_code)]\n\ntype TranslatorResult<A> = Result<A, TranslatorError>;\n\n\n", "file_path": "src/renderer.rs", "rank": 10, "score": 30081.172341311998 }, { "content": "pub static LAYOUT : &'static [u8] = include_bytes!(\"layout.hbs\");\n\npub static NOT_FOUND : &'static [u8] = include_bytes!(\"not_found.html\");\n\npub static INDEX : &'static [u8] = include_bytes!(\"index.html\");\n\n\n\npub static TEMPLATES : [(&'static str, &'static [u8]);0] = [];\n", "file_path": "src/templates.rs", "rank": 11, "score": 17980.061969639264 }, { "content": "/// A renderer that can create a particular page\n\npub trait Renderer : Send + Sync {\n\n fn render(&self, HashMap<String, String>, &Map, &Handlebars, usize) -> IronResult<Response>;\n\n}\n\n\n\n/// The renderer for showing a static page\n\npub struct StaticRenderer(String, String);\n\n\n\nimpl StaticRenderer {\n\n pub fn new(name: String, data : &'static [u8]) -> StaticRenderer {\n\n StaticRenderer(name, from_utf8(data).expect(\"Invalid UTF-8\").to_string())\n\n }\n\n}\n\n\n\nimpl Renderer for StaticRenderer {\n\n fn render(&self, _: HashMap<String, String>, _ : &Map, hb : &Handlebars, _ : usize) -> IronResult<Response> {\n\n let title = format!(\"{} - {}\", ::APP_TITLE, self.0);\n\n render_ok(hb, title, self.1.clone())\n\n }\n\n}\n\n\n\n/// Errors from the translation\n\n#[allow(dead_code)]\n\npub enum TranslatorError {\n\n ParameterError(String),\n\n TranslationError(String)\n\n}\n\n\n", "file_path": "src/renderer.rs", "rank": 12, "score": 16748.26720214054 }, { "content": " },\n\n Err(TranslatorError::ParameterError(msg)) => {\n\n Ok(Response::with((\n\n iron::status::BadRequest,\n\n Header(ContentType::plaintext()), msg)))\n\n },\n\n Err(TranslatorError::TranslationError(msg)) =>{\n\n Ok(Response::with((\n\n iron::status::InternalServerError,\n\n Header(ContentType::plaintext()), msg)))\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// A renderer that uses a template to produce HTML for a translated object\n\npub struct TranslatorRenderer<A : Serialize>(pub String, pub String, pub Box<Translator<A>>);\n\n\n\nimpl<A: Serialize> Renderer for TranslatorRenderer<A> {\n\n fn render(&self, args : HashMap<String, String>, _ : &Map, hb : &Handlebars, _:usize) -> IronResult<Response> {\n", "file_path": "src/renderer.rs", "rank": 13, "score": 15305.191982333588 }, { "content": "use handlebars::Handlebars;\n\nuse iron::headers::ContentType;\n\nuse iron::modifiers::Header;\n\nuse iron::prelude::*;\n\nuse iron;\n\nuse params::Map;\n\nuse serde::Serialize;\n\nuse serde_json;\n\nuse std::collections::HashMap;\n\nuse std::error::Error;\n\nuse std::result::Result;\n\nuse std::str::from_utf8;\n\n\n\n\n\n#[derive(Debug,Clone,Serialize)]\n", "file_path": "src/renderer.rs", "rank": 14, "score": 15303.839199080965 }, { "content": " let title = format!(\"{} - {}\", ::APP_TITLE, self.0);\n\n match self.2.convert(args) {\n\n Ok(body) => {\n\n render_ok(hb, title.to_string(), hb.render(&self.1, &body).\n\n expect(&format!(\"Could not use template {}\", self.1)))\n\n },\n\n Err(TranslatorError::ParameterError(msg)) => {\n\n Ok(Response::with((\n\n iron::status::BadRequest,\n\n Header(ContentType::plaintext()), msg)))\n\n },\n\n Err(TranslatorError::TranslationError(msg)) =>{\n\n Ok(Response::with((\n\n iron::status::InternalServerError,\n\n Header(ContentType::plaintext()), msg)))\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/renderer.rs", "rank": 15, "score": 15303.755489036424 }, { "content": "# Simple Rust Web Framework\n\n\n\nA very simple framework for building web applications in Rust. This is intended \n\nto be used as a template to build a web application.\n", "file_path": "README.md", "rank": 20, "score": 2.5277100669264567 } ]